file_name
stringlengths
3
137
prefix
stringlengths
0
918k
suffix
stringlengths
0
962k
middle
stringlengths
0
812k
PaginatedServiceModel.ts
import { Paginated, Params } from '@feathersjs/feathers'; import ServiceModel from './ServiceModel'; import { NotFound } from '@feathersjs/errors'; export default class PaginatedServiceModel<T extends typeof ServiceModel> { /** * Model to be paginated */ private readonly model: T; /** * Service query. */ private readonly query: Params['query']; /** * Cached pagination result. */ private _result: Paginated<InstanceType<T>> = { total: -1, limit: -1, skip: -1, data: [] }; /** * Paginated model constructor. * * @param model * @param query */ constructor(model: T, query: Params['query']) { this.model = model; this.query = query; } /** * Format a raw paginated response into a model"-ified" response. * * @param result */ private formatResult(result: Paginated<any>): Paginated<InstanceType<T>> { return { ...result, // @ts-ignore data: result.data.map((document: any) => { return new this.model(document); }), } } /** * Fetch the pagination result. */ public async fetch(query: Params['query'] = {}) { if (this._result.total !== -1) { return this._result; } const result = await this.fetchRaw(query); this._result = this.formatResult(result); return this._result; } /** * Fire a raw FeathersJS fetch request to the current model. */ public async fetchRaw(query: Params['query'] = {}): Promise<Paginated<InstanceType<T>['entry']>> { return await this.model._find({ ...query, ...this.query }); } /** * Fetch a single entry from the paginated model. * * @param query */ public async fetchOne(query: Params['query'] = {}): Promise<InstanceType<T>> { const result = await this.fetch(query); if (result.data.length) { return result.data[0]; } throw new NotFound('[PaginatedServiceModel] fetchOne() could not locate any entries!', { query, }) } /** * Further dig through the pagination with additional search query. * * @param query */ public find(query?: Params['query']) { return new PaginatedServiceModel<T>(this.model, { ...query, ...this.query, }); } /** * Patch all matched models. */ public patch(data: any, params?: Params) { return this.model._patch(null, data, { ...params, query: { ...(params && params.query), ...this.query,
}); } /** * Fetch result count. */ public async count(): Promise<number> { const result = await this.fetch({ $limit: 0 }); return result.total; } /** * Create a new model entry using the current query (likely relationship data) as the document base. * * @param data * @param query */ public create(data: any, query?: Params['query']) { return this.model.create({ ...this.query, ...data, }, query) } }
}
permalinks.go
// Copyright 2019 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package page import ( "fmt" "os" "path" "path/filepath" "regexp" "strconv" "strings" "time" "github.com/pkg/errors" "github.com/neohugo/neohugo/helpers" ) // PermalinkExpander holds permalin mappings per section. type PermalinkExpander struct { // knownPermalinkAttributes maps :tags in a permalink specification to a // function which, given a page and the tag, returns the resulting string // to be used to replace that tag. knownPermalinkAttributes map[string]pageToPermaAttribute expanders map[string]func(Page) (string, error) ps *helpers.PathSpec } // Time for checking date formats. Every field is different than the // Go reference time for date formatting. This ensures that formatting this date // with a Go time format always has a different output than the format itself. var referenceTime = time.Date(2019, time.November, 9, 23, 1, 42, 1, time.UTC) // Return the callback for the given permalink attribute and a boolean indicating if the attribute is valid or not. func (p PermalinkExpander) callback(attr string) (pageToPermaAttribute, bool) { if callback, ok := p.knownPermalinkAttributes[attr]; ok { return callback, true } if strings.HasPrefix(attr, "sections[") { fn := p.toSliceFunc(strings.TrimPrefix(attr, "sections")) return func(p Page, s string) (string, error) { return path.Join(fn(p.CurrentSection().SectionsEntries())...), nil }, true } // Make sure this comes after all the other checks. if referenceTime.Format(attr) != attr { return p.pageToPermalinkDate, true } return nil, false } // NewPermalinkExpander creates a new PermalinkExpander configured by the given // PathSpec. func
(ps *helpers.PathSpec) (PermalinkExpander, error) { p := PermalinkExpander{ps: ps} p.knownPermalinkAttributes = map[string]pageToPermaAttribute{ "year": p.pageToPermalinkDate, "month": p.pageToPermalinkDate, "monthname": p.pageToPermalinkDate, "day": p.pageToPermalinkDate, "weekday": p.pageToPermalinkDate, "weekdayname": p.pageToPermalinkDate, "yearday": p.pageToPermalinkDate, "section": p.pageToPermalinkSection, "sections": p.pageToPermalinkSections, "title": p.pageToPermalinkTitle, "slug": p.pageToPermalinkSlugElseTitle, "filename": p.pageToPermalinkFilename, } patterns := ps.Cfg.GetStringMapString("permalinks") if patterns == nil { return p, nil } e, err := p.parse(patterns) if err != nil { return p, err } p.expanders = e return p, nil } // Expand expands the path in p according to the rules defined for the given key. // If no rules are found for the given key, an empty string is returned. func (l PermalinkExpander) Expand(key string, p Page) (string, error) { expand, found := l.expanders[key] if !found { return "", nil } return expand(p) } func (l PermalinkExpander) parse(patterns map[string]string) (map[string]func(Page) (string, error), error) { expanders := make(map[string]func(Page) (string, error)) // Allow " " and / to represent the root section. const sectionCutSet = " /" + string(os.PathSeparator) for k, pattern := range patterns { // TODO check if we need os.PathSeparator //nolint k = strings.Trim(k, sectionCutSet) if !l.validate(pattern) { return nil, &permalinkExpandError{pattern: pattern, err: errPermalinkIllFormed} } pattern := pattern matches := attributeRegexp.FindAllStringSubmatch(pattern, -1) callbacks := make([]pageToPermaAttribute, len(matches)) replacements := make([]string, len(matches)) for i, m := range matches { replacement := m[0] attr := replacement[1:] replacements[i] = replacement callback, ok := l.callback(attr) if !ok { return nil, &permalinkExpandError{pattern: pattern, err: errPermalinkAttributeUnknown} } callbacks[i] = callback } expanders[k] = func(p Page) (string, error) { if matches == nil { return pattern, nil } newField := pattern for i, replacement := range replacements { attr := replacement[1:] callback := callbacks[i] newAttr, err := callback(p, attr) if err != nil { return "", &permalinkExpandError{pattern: pattern, err: err} } newField = strings.Replace(newField, replacement, newAttr, 1) } return newField, nil } } return expanders, nil } // pageToPermaAttribute is the type of a function which, given a page and a tag // can return a string to go in that position in the page (or an error) type pageToPermaAttribute func(Page, string) (string, error) var attributeRegexp = regexp.MustCompile(`:\w+(\[.+\])?`) // validate determines if a PathPattern is well-formed func (l PermalinkExpander) validate(pp string) bool { fragments := strings.Split(pp[1:], "/") bail := false for i := range fragments { if bail { return false } if len(fragments[i]) == 0 { bail = true continue } matches := attributeRegexp.FindAllStringSubmatch(fragments[i], -1) if matches == nil { continue } for _, match := range matches { k := match[0][1:] if _, ok := l.callback(k); !ok { return false } } } return true } type permalinkExpandError struct { pattern string err error } func (pee *permalinkExpandError) Error() string { return fmt.Sprintf("error expanding %q: %s", pee.pattern, pee.err) } var ( errPermalinkIllFormed = errors.New("permalink ill-formed") errPermalinkAttributeUnknown = errors.New("permalink attribute not recognised") ) func (l PermalinkExpander) pageToPermalinkDate(p Page, dateField string) (string, error) { // a Page contains a Node which provides a field Date, time.Time switch dateField { case "year": return strconv.Itoa(p.Date().Year()), nil case "month": return fmt.Sprintf("%02d", int(p.Date().Month())), nil case "monthname": return p.Date().Month().String(), nil case "day": return fmt.Sprintf("%02d", p.Date().Day()), nil case "weekday": return strconv.Itoa(int(p.Date().Weekday())), nil case "weekdayname": return p.Date().Weekday().String(), nil case "yearday": return strconv.Itoa(p.Date().YearDay()), nil } return p.Date().Format(dateField), nil } // pageToPermalinkTitle returns the URL-safe form of the title func (l PermalinkExpander) pageToPermalinkTitle(p Page, _ string) (string, error) { return l.ps.URLize(p.Title()), nil } // pageToPermalinkFilename returns the URL-safe form of the filename func (l PermalinkExpander) pageToPermalinkFilename(p Page, _ string) (string, error) { name := p.File().TranslationBaseName() if name == "index" { // Page bundles; the directory name will hopefully have a better name. dir := strings.TrimSuffix(p.File().Dir(), helpers.FilePathSeparator) _, name = filepath.Split(dir) } return l.ps.URLize(name), nil } // if the page has a slug, return the slug, else return the title func (l PermalinkExpander) pageToPermalinkSlugElseTitle(p Page, a string) (string, error) { if p.Slug() != "" { return l.ps.URLize(p.Slug()), nil } return l.pageToPermalinkTitle(p, a) } func (l PermalinkExpander) pageToPermalinkSection(p Page, _ string) (string, error) { return p.Section(), nil } func (l PermalinkExpander) pageToPermalinkSections(p Page, _ string) (string, error) { return p.CurrentSection().SectionsPath(), nil } var ( nilSliceFunc = func(s []string) []string { return nil } allSliceFunc = func(s []string) []string { return s } ) // toSliceFunc returns a slice func that slices s according to the cut spec. // The cut spec must be on form [low:high] (one or both can be omitted), // also allowing single slice indices (e.g. [2]) and the special [last] keyword // giving the last element of the slice. // The returned function will be lenient and not panic in out of bounds situation. // // The current use case for this is to use parts of the sections path in permalinks. func (l PermalinkExpander) toSliceFunc(cut string) func(s []string) []string { cut = strings.ToLower(strings.TrimSpace(cut)) if cut == "" { return allSliceFunc } if len(cut) < 3 || (cut[0] != '[' || cut[len(cut)-1] != ']') { return nilSliceFunc } toNFunc := func(s string, low bool) func(ss []string) int { if s == "" { if low { return func(ss []string) int { return 0 } } else { return func(ss []string) int { return len(ss) } } } if s == "last" { return func(ss []string) int { return len(ss) - 1 } } n, _ := strconv.Atoi(s) if n < 0 { n = 0 } return func(ss []string) int { // Prevent out of bound situations. It would not make // much sense to panic here. if n > len(ss) { return len(ss) } return n } } opsStr := cut[1 : len(cut)-1] opts := strings.Split(opsStr, ":") if !strings.Contains(opsStr, ":") { toN := toNFunc(opts[0], true) return func(s []string) []string { if len(s) == 0 { return nil } v := s[toN(s)] if v == "" { return nil } return []string{v} } } toN1, toN2 := toNFunc(opts[0], true), toNFunc(opts[1], false) return func(s []string) []string { if len(s) == 0 { return nil } return s[toN1(s):toN2(s)] } }
NewPermalinkExpander
delaunay.py
from __future__ import absolute_import import numpy as np from scipy.spatial import Delaunay from spektral.utils import label_to_one_hot, numpy_to_nx RETURN_TYPES = {'numpy', 'networkx'} MAX_K = 7 # Maximum number of nodes in a graph def generate_data(return_type='networkx', classes=0, n_samples_in_class=1000, n_nodes=7, support_low=0., support_high=10., drift_amount=1.0, one_hot_labels=True, support=None, seed=None): """ Generates a dataset of Delaunay triangulations as described by [Zambon et al. (2017)](https://arxiv.org/abs/1706.06941). Note that this function is basically deprecated and will change soon. :param return_type: `'networkx'` or `'numpy'`, data format to return; :param classes: indices of the classes to load (integer, or list of integers between 0 and 20); :param n_samples_in_class: number of generated samples per class; :param n_nodes: number of nodes in a graph; :param support_low: lower bound of the uniform distribution from which the support is generated; :param support_high: upper bound of the uniform distribution from which the support is generated; :param drift_amount: coefficient to control the amount of change between classes; :param one_hot_labels: one-hot encode dataset labels; :param support: custom support to use instead of generating it randomly; :param seed: random numpy seed; :return: if `return_type='networkx'`, a list of graphs in Networkx format, and an array containing labels; if `return_type='numpy'`, the adjacency matrix, node features, and an array containing labels. """ if return_type not in RETURN_TYPES: raise ValueError('Possible return_type: {}'.format(RETURN_TYPES)) if isinstance(classes, int): classes = [classes]
r_classes = list(reversed(classes)) if r_classes[-1] == 0: r_classes.insert(0, r_classes.pop(-1)) # Support points np.random.seed(seed) if support is None: support = np.random.uniform(support_low, support_high, (1, n_nodes, 2)) else: try: assert support.shape == (1, n_nodes, 2) except AssertionError: print('The given support doesn\'t have shape (1, n_nodes, 2) as' 'expected. Attempting to reshape.') support = support.reshape(1, n_nodes, 2) # Compute node features node_features = [] # Other node features for idx, i in enumerate(r_classes): if i == 0: concept_0 = np.repeat(support, n_samples_in_class, 0) noise_0 = np.random.normal(0, 1, (n_samples_in_class, n_nodes, 2)) class_0 = concept_0 + noise_0 node_features.append(class_0) else: radius = 10. * ((2./3.) ** (drift_amount * (i - 1))) phase = np.random.uniform(0, 2 * np.pi, (n_nodes, 1)) perturb_i_x = radius * np.cos(phase) perturb_i_y = radius * np.sin(phase) perturb_i = np.concatenate((perturb_i_x, perturb_i_y), axis=-1) support_i = support + perturb_i concept_i = np.repeat(support_i, n_samples_in_class, 0) noise_i = np.random.normal(0, 1, (n_samples_in_class, n_nodes, 2)) class_i = concept_i + noise_i node_features.append(class_i) node_features = np.array(node_features).reshape((-1, n_nodes, 2)) # Compute adjacency matrices adjacency = [] for nf in node_features: adj = compute_adj(nf) adjacency.append(adj) adjacency = np.array(adjacency) # Compute labels labels = np.repeat(classes, n_samples_in_class) if one_hot_labels: labels = label_to_one_hot(labels, labels=classes) if return_type is 'numpy': return adjacency, node_features, labels elif return_type is 'networkx': graphs = numpy_to_nx(adjacency, node_features=node_features, nf_name='coords') return graphs, labels else: raise NotImplementedError def compute_adj(x): """ Computes the Delaunay triangulation of the given points :param x: array of shape (num_nodes, 2) :return: the computed adjacency matrix """ tri = Delaunay(x) edges_explicit = np.concatenate((tri.vertices[:, :2], tri.vertices[:, 1:], tri.vertices[:, ::2]), axis=0) adj = np.zeros((x.shape[0], x.shape[0])) adj[edges_explicit[:, 0], edges_explicit[:, 1]] = 1. return np.clip(adj + adj.T, 0, 1)
if max(classes) > 20 or min(classes) < 0: raise ValueError('Class indices must be between 0 and 20')
verify_timestamp_compaction.rs
// Copyright Materialize, Inc. and contributors. All rights reserved. // // Use of this software is governed by the Business Source License // included in the LICENSE file. // // As of the Change Date specified in that file, in accordance with // the Business Source License, use of this software will be governed // by the Apache License, Version 2.0. use std::sync::atomic::{AtomicU64, Ordering}; use std::sync::Arc; use std::time::Duration; use anyhow::bail; use async_trait::async_trait; use coord::catalog::Catalog; use coord::session::Session; use ore::now::NOW_ZERO; use ore::retry::Retry; use sql::catalog::SessionCatalog; use sql::names::PartialName; use crate::action::{Action, State}; use crate::parser::BuiltinCommand; pub struct VerifyTimestampCompactionAction { source: String, max_size: usize, permit_progress: bool, } pub fn build_verify_timestamp_compaction_action( mut cmd: BuiltinCommand, ) -> Result<VerifyTimestampCompactionAction, anyhow::Error> { let source = cmd.args.string("source")?; let max_size = cmd.args.opt_parse("max-size")?.unwrap_or(3); let permit_progress = cmd.args.opt_bool("permit-progress")?.unwrap_or(false); cmd.args.done()?; Ok(VerifyTimestampCompactionAction { source, max_size, permit_progress, }) } #[async_trait] impl Action for VerifyTimestampCompactionAction { async fn undo(&self, _: &mut State) -> Result<(), anyhow::Error> { // Can't undo a verification. Ok(()) } async fn redo(&self, state: &mut State) -> Result<(), anyhow::Error> { if let Some(path) = &state.materialized_catalog_path { let initial_highest_base = Arc::new(AtomicU64::new(u64::MAX)); Retry::default() .initial_backoff(Duration::from_secs(1)) .max_duration(Duration::from_secs(10)) .retry_async(|retry_state| { let initial_highest = initial_highest_base.clone(); async move { let mut catalog = Catalog::open_debug(path, NOW_ZERO.clone()) .await?; let item_id = catalog .for_session(&Session::dummy()) .resolve_item(&PartialName { database: None, schema: None, item: self.source.clone(), })? .id(); let bindings = catalog.load_timestamp_bindings(item_id)?; // We consider progress to be eventually compacting at least up to the original highest // timestamp binding. let progress = if retry_state.i == 0
else { self.permit_progress && (bindings.iter().map(|(_, ts, _)| ts).fold(u64::MAX, |a, &b| a.min(b)) >= initial_highest.load(Ordering::SeqCst)) }; println!( "Verifying timestamp binding compaction for {:?}. Found {:?} vs expected {:?}. Progress: {:?}", self.source, bindings.len(), self.max_size, progress, ); if bindings.len() <= self.max_size || progress { Ok(()) } else { bail!( "There are {:?} bindings compared to max size {:?}", bindings.len(), self.max_size, ); } } }).await } else { println!( "Skipping timestamp binding compaction verification for {:?}.", self.source ); Ok(()) } } }
{ initial_highest.store( bindings.iter().map(|(_, ts, _)| ts).fold(u64::MIN, |a, &b| a.max(b)), Ordering::SeqCst, ); false }
test_equipment_category_api.py
# -*- coding: utf-8 -*- import pytest from resources.models import Equipment, ResourceEquipment, EquipmentCategory from django.urls import reverse from .utils import check_disallowed_methods, UNSAFE_METHODS
return reverse('equipmentcategory-list') @pytest.mark.django_db @pytest.fixture def detail_url(equipment_category): return reverse('equipmentcategory-detail', kwargs={'pk': equipment_category.pk}) def _check_keys_and_values(result): """ Check that given dict represents equipment data in correct form. """ assert len(result) == 3 # id, name, equipments assert result['id'] != '' assert result['name'] == {'fi': 'test equipment category'} equipments = result['equipment'] assert len(equipments) == 1 equipment = equipments[0] assert len(equipment) == 2 assert equipment['name'] == {'fi': 'test equipment'} assert equipment['id'] != '' @pytest.mark.django_db def test_disallowed_methods(all_user_types_api_client, list_url, detail_url): """ Tests that only safe methods are allowed to equipment list and detail endpoints. """ check_disallowed_methods(all_user_types_api_client, (list_url, detail_url), UNSAFE_METHODS) @pytest.mark.django_db def test_get_equipment_category_list(api_client, list_url, equipment): """ Tests that equipment category list endpoint returns equipment category data in correct form. """ response = api_client.get(list_url) results = response.data['results'] assert len(results) == 1 _check_keys_and_values(results[0]) @pytest.mark.django_db def test_get_equipment_category_list(api_client, detail_url, equipment): """ Tests that equipment category detail endpoint returns equipment category data in correct form. """ response = api_client.get(detail_url) _check_keys_and_values(response.data)
@pytest.fixture def list_url():
mediaviewer.ts
// The @atlassian/mediaviewer type definitions // These types are required by TS consumers to consume @atlaskit/media-viewer // This file is intentionally NOT a TS declaration file (mediaviewer.d.ts) because the Atlaskit build process does NOT publish declaration files to the /dist directory // TODO MSW-216 move type definitions into the @atlassian/mediaviewer package and remove this file import 'jquery'; import { BackBoneModel } from './domain/preview'; export interface MediaFileAttributes { readonly src: string; readonly srcDownload: string; readonly id?: string; readonly type?: string; readonly title?: string; readonly src_hd?: string; readonly poster?: string; readonly thumbnail?: string; readonly downlodable?: boolean; } export interface MediaFile { readonly attributes: MediaFileAttributes; } export type MediaViewerAssets = { readonly basePath: string; }; export type MediaViewerType = 'image' | 'document' | 'video' | '3d'; // The AnalyticsData type is hard to specify, because its structure in Media Viewer Classic is very complex and dynamic. // With Media Viewer Next Gen this will change however, which is when we should revise specifying the type of this object. // https://product-fabric.atlassian.net/browse/MSW-452 export type AnalyticsData = Object; export interface MediaViewerConfig { readonly assets?: MediaViewerAssets; readonly fetchToken?: (file: MediaFile) => JQueryPromise<MediaFileAttributes>; readonly appendTo?: (node: Node) => void; readonly files?: Array<MediaFileAttributes>; readonly commentService?: Object; readonly templateBackend?: Function; readonly moduleBackend?: Function; readonly pdfTransportFactory?: Function; readonly enableListLoop?: boolean; readonly enablePresentationMode?: boolean; readonly enableMiniMode?: boolean; readonly preloadImagesAfterCurrent?: number; readonly preloadImagesBeforeCurrent?: number; readonly videoDefaultQualityHd?: boolean; readonly customStorage?: Object; readonly viewers?: Array<MediaViewerType>; readonly embedded?: boolean; readonly contained?: boolean; readonly i18n?: Object; readonly analyticsBackend?: (key: string, data: AnalyticsData) => void; readonly isPreviewGenerated?: (file: BackBoneModel) => JQueryPromise<boolean>; readonly generatePreview?: ( file: BackBoneModel, ) => JQueryPromise<BackBoneModel>; } export type MediaViewerMode = 'BASE' | 'PRESENTATION' | 'CONTAINED'; export interface MediaViewerInterface { open(fileQuery?: Object): JQueryPromise<void>; close(): void; setFiles(files: Array<MediaFileAttributes>, nextFileQuery?: Object): void; showFileNext(): JQueryPromise<MediaFileAttributes>; showFilePrev(): JQueryPromise<MediaFileAttributes>; showFileWithQuery(query: any): JQueryPromise<MediaFileAttributes>; getCurrent(): MediaFileAttributes; getCurrentFiles(): Array<MediaFileAttributes>; isOpen(): boolean; isShowingFirstFile(): boolean; isShowingLastFile(): boolean; on(eventName: 'fv.open', callback: () => void, context?: any): void; on(eventName: 'fv.close', callback: () => void, context?: any): void; on(eventName: 'fv.setFiles', callback: () => void, context?: any): void; on( eventName: 'fv.changeMode', callback: (mode: MediaViewerMode) => void, context?: any, ): void; on(eventName: 'fv.updateFiles', callback: () => void, context?: any): void; on( eventName: 'fv.changeFile', callback: (file: MediaFileAttributes) => void, context?: any, ): void; on( eventName: 'fv.showFile', callback: (file: MediaFileAttributes) => void, context?: any, ): void; on( eventName: 'fv.showFileError', callback: (file: MediaFileAttributes) => void, context?: any, ): void; on(eventName: 'reset', callback: (files: any) => void, context?: any): void;
off(eventName: string, callback: Function, context?: any): any; } export interface MediaViewerConstructor { new (config: MediaViewerConfig): MediaViewerInterface; }
mod.rs
// Copyright (c) 2016 Chef Software Inc. and/or applicable contributors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use jobsrv::data_store::DataStore; use protocol::jobsrv; #[test] fn migration() { let ds = datastore_test!(DataStore); ds.setup().expect("Failed to migrate data"); } #[test] fn create_job() { let mut job = jobsrv::Job::new(); job.mut_project().set_vcs_type(String::from("git")); job.mut_project().set_vcs_data(String::from( "http://github.com/habitat-sh/habitat", )); job.mut_project().set_name("core/habitat".to_string()); let ds = datastore_test!(DataStore); ds.setup().expect("Failed to migrate data"); ds.create_job(&mut job).expect("Failed to create a job"); } fn test_job() -> jobsrv::Job { let mut job = jobsrv::Job::new(); job.set_id(0); job.mut_project().set_vcs_type(String::from("git"));
"http://github.com/habitat-sh/habitat", )); job.mut_project().set_name("core/habitat".to_string()); job } #[test] fn get_job() { let mut job = test_job(); let mut job2 = test_job(); let mut job3 = test_job(); let ds = datastore_test!(DataStore); ds.setup().expect("Failed to migrate data"); let rjob1 = ds.create_job(&mut job).expect("Failed to create a job"); let rjob2 = ds.create_job(&mut job2).expect("Failed to create a job"); let rjob3 = ds.create_job(&mut job3).expect("Failed to create a job"); let mut get_job = jobsrv::JobGet::new(); get_job.set_id(rjob1.get_id()); let j1 = ds.get_job(&get_job).expect("Failed to get job 0").expect( "Job should exist", ); get_job.set_id(rjob2.get_id()); let j2 = ds.get_job(&get_job).expect("Failed to get job 2").expect( "Job should exist", ); get_job.set_id(rjob3.get_id()); let j3 = ds.get_job(&get_job).expect("Failed to get job 3").expect( "Job should exist", ); assert!(j1.get_id() != 0); assert!(j2.get_id() != 0); assert!(j3.get_id() != 0); } #[test] fn get_job_does_not_exist() { let ds = datastore_test!(DataStore); ds.setup().expect("Failed to migrate data"); let mut get_job = jobsrv::JobGet::new(); get_job.set_id(0); let result = ds.get_job(&get_job).expect("Failed to get job"); assert!(result.is_none()); } #[test] fn pending_jobs() { let mut job1 = test_job(); let mut job2 = test_job(); let mut job3 = test_job(); let mut job4 = test_job(); let ds = datastore_test!(DataStore); ds.setup().expect("Failed to migrate data"); let rjob1 = ds.create_job(&mut job1).expect("Failed to create job"); let _rjob2 = ds.create_job(&mut job2).expect("Failed to create job"); let _rjob3 = ds.create_job(&mut job3).expect("Failed to create job"); let _rjob4 = ds.create_job(&mut job4).expect("Failed to create job"); // Get one job, it should be FIFO, and it should have its status set to Dispatched let pending_jobs = ds.pending_jobs(1).expect("Failed to get pendings job"); assert_eq!(pending_jobs.len(), 1, "Failed to find a pending job"); assert_eq!( pending_jobs[0].get_id(), rjob1.get_id(), "First in is not first out" ); let mut get_job = jobsrv::JobGet::new(); get_job.set_id(rjob1.get_id()); let job1_dispatched = ds.get_job(&get_job) .expect("Failed to get job entry") .expect("Failed to find the job entry"); assert_eq!(job1_dispatched.get_state(), jobsrv::JobState::Dispatched); // Get the remaining jobs; a larger number results in the total set let remaining_jobs = ds.pending_jobs(5).expect( "Failed to get remaining pending jobs", ); assert_eq!( remaining_jobs.len(), 3, "Failed to get all the remaining jobs" ); // No jobs returns an empty array let no_jobs = ds.pending_jobs(100).expect( "Failed to get empty pending jobs", ); assert_eq!(no_jobs.len(), 0); } #[test] fn update_job() { let mut job1 = test_job(); let ds = datastore_test!(DataStore); ds.setup().expect("Failed to migrate data"); let mut rjob1 = ds.create_job(&mut job1).expect("Failed to create job"); let mut get_job = jobsrv::JobGet::new(); get_job.set_id(rjob1.get_id()); let pending_job = ds.get_job(&get_job) .expect("Failed to get job from database") .expect("No job found"); assert_eq!(pending_job.get_state(), jobsrv::JobState::Pending); rjob1.set_state(jobsrv::JobState::Failed); ds.update_job(&rjob1).expect("Failed to update job state"); let failed_job = ds.get_job(&get_job) .expect("Failed to get job from database") .expect("No job found"); assert_eq!(failed_job.get_state(), jobsrv::JobState::Failed); }
job.mut_project().set_vcs_data(String::from(
check_const.rs
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Verifies that the types and values of const and static items // are safe. The rules enforced by this module are: // // - For each *mutable* static item, it checks that its **type**: // - doesn't have a destructor // - doesn't own an owned pointer // // - For each *immutable* static item, it checks that its **value**: // - doesn't own owned, managed pointers // - doesn't contain a struct literal or a call to an enum variant / struct constructor where // - the type of the struct/enum has a dtor // // Rules Enforced Elsewhere: // - It's not possible to take the address of a static item with unsafe interior. This is enforced // by borrowck::gather_loans use middle::cast::{CastKind}; use middle::const_eval; use middle::def; use middle::expr_use_visitor as euv; use middle::infer; use middle::mem_categorization as mc; use middle::traits; use middle::ty::{self, Ty}; use util::nodemap::NodeMap; use util::ppaux::Repr; use syntax::ast; use syntax::codemap::Span; use syntax::visit::{self, Visitor}; use std::collections::hash_map::Entry; // Const qualification, from partial to completely promotable. bitflags! { #[derive(RustcEncodable, RustcDecodable)] flags ConstQualif: u8 { // Inner mutability (can not be placed behind a reference) or behind // &mut in a non-global expression. Can be copied from static memory. const MUTABLE_MEM = 1 << 0, // Constant value with a type that implements Drop. Can be copied // from static memory, similar to MUTABLE_MEM. const NEEDS_DROP = 1 << 1, // Even if the value can be placed in static memory, copying it from // there is more expensive than in-place instantiation, and/or it may // be too large. This applies to [T; N] and everything containing it. // N.B.: references need to clear this flag to not end up on the stack. const PREFER_IN_PLACE = 1 << 2, // May use more than 0 bytes of memory, doesn't impact the constness // directly, but is not allowed to be borrowed mutably in a constant. const NON_ZERO_SIZED = 1 << 3, // Actually borrowed, has to always be in static memory. Does not // propagate, and requires the expression to behave like a 'static // lvalue. The set of expressions with this flag is the minimum // that have to be promoted. const HAS_STATIC_BORROWS = 1 << 4, // Invalid const for miscellaneous reasons (e.g. not implemented). const NOT_CONST = 1 << 5, // Borrowing the expression won't produce &'static T if any of these // bits are set, though the value could be copied from static memory // if `NOT_CONST` isn't set. const NON_STATIC_BORROWS = ConstQualif::MUTABLE_MEM.bits | ConstQualif::NEEDS_DROP.bits | ConstQualif::NOT_CONST.bits } } #[derive(Copy, Clone, Eq, PartialEq)] enum Mode { Const, Static, StaticMut, // An expression that occurs outside of any constant context // (i.e. `const`, `static`, array lengths, etc.). The value // can be variable at runtime, but will be promotable to // static memory if we can prove it is actually constant. Var, } struct CheckCrateVisitor<'a, 'tcx: 'a> { tcx: &'a ty::ctxt<'tcx>, mode: Mode, qualif: ConstQualif, rvalue_borrows: NodeMap<ast::Mutability> } impl<'a, 'tcx> CheckCrateVisitor<'a, 'tcx> { fn with_mode<F, R>(&mut self, mode: Mode, f: F) -> R where F: FnOnce(&mut CheckCrateVisitor<'a, 'tcx>) -> R, { let (old_mode, old_qualif) = (self.mode, self.qualif); self.mode = mode; self.qualif = ConstQualif::empty(); let r = f(self); self.mode = old_mode; self.qualif = old_qualif; r } fn with_euv<'b, F, R>(&'b mut self, item_id: Option<ast::NodeId>, f: F) -> R where F: for<'t> FnOnce(&mut euv::ExprUseVisitor<'b, 't, 'tcx, ty::ParameterEnvironment<'a, 'tcx>>) -> R, { let param_env = match item_id { Some(item_id) => ty::ParameterEnvironment::for_item(self.tcx, item_id), None => ty::empty_parameter_environment(self.tcx) }; f(&mut euv::ExprUseVisitor::new(self, &param_env)) } fn global_expr(&mut self, mode: Mode, expr: &ast::Expr) -> ConstQualif
fn add_qualif(&mut self, qualif: ConstQualif) { self.qualif = self.qualif | qualif; } fn record_borrow(&mut self, id: ast::NodeId, mutbl: ast::Mutability) { match self.rvalue_borrows.entry(id) { Entry::Occupied(mut entry) => { // Merge the two borrows, taking the most demanding // one, mutability-wise. if mutbl == ast::MutMutable { entry.insert(mutbl); } } Entry::Vacant(entry) => { entry.insert(mutbl); } } } fn msg(&self) -> &'static str { match self.mode { Mode::Const => "constant", Mode::StaticMut | Mode::Static => "static", Mode::Var => unreachable!(), } } fn check_static_mut_type(&self, e: &ast::Expr) { let node_ty = ty::node_id_to_type(self.tcx, e.id); let tcontents = ty::type_contents(self.tcx, node_ty); let suffix = if tcontents.has_dtor() { "destructors" } else if tcontents.owns_owned() { "owned pointers" } else { return }; self.tcx.sess.span_err(e.span, &format!("mutable statics are not allowed \ to have {}", suffix)); } fn check_static_type(&self, e: &ast::Expr) { let ty = ty::node_id_to_type(self.tcx, e.id); let infcx = infer::new_infer_ctxt(self.tcx); let mut fulfill_cx = traits::FulfillmentContext::new(); let cause = traits::ObligationCause::new(e.span, e.id, traits::SharedStatic); fulfill_cx.register_builtin_bound(&infcx, ty, ty::BoundSync, cause); let env = ty::empty_parameter_environment(self.tcx); match fulfill_cx.select_all_or_error(&infcx, &env) { Ok(()) => { }, Err(ref errors) => { traits::report_fulfillment_errors(&infcx, errors); } } } } impl<'a, 'tcx, 'v> Visitor<'v> for CheckCrateVisitor<'a, 'tcx> { fn visit_item(&mut self, i: &ast::Item) { debug!("visit_item(item={})", i.repr(self.tcx)); match i.node { ast::ItemStatic(_, ast::MutImmutable, ref expr) => { self.check_static_type(&**expr); self.global_expr(Mode::Static, &**expr); } ast::ItemStatic(_, ast::MutMutable, ref expr) => { self.check_static_mut_type(&**expr); self.global_expr(Mode::StaticMut, &**expr); } ast::ItemConst(_, ref expr) => { self.global_expr(Mode::Const, &**expr); } ast::ItemEnum(ref enum_definition, _) => { for var in &enum_definition.variants { if let Some(ref ex) = var.node.disr_expr { self.global_expr(Mode::Const, &**ex); } } } _ => { self.with_mode(Mode::Var, |v| visit::walk_item(v, i)); } } } fn visit_trait_item(&mut self, t: &'v ast::TraitItem) { match t.node { ast::ConstTraitItem(_, ref default) => { if let Some(ref expr) = *default { self.global_expr(Mode::Const, &*expr); } else { visit::walk_trait_item(self, t); } } _ => self.with_mode(Mode::Var, |v| visit::walk_trait_item(v, t)), } } fn visit_impl_item(&mut self, i: &'v ast::ImplItem) { match i.node { ast::ConstImplItem(_, ref expr) => { self.global_expr(Mode::Const, &*expr); } _ => self.with_mode(Mode::Var, |v| visit::walk_impl_item(v, i)), } } fn visit_fn(&mut self, fk: visit::FnKind<'v>, fd: &'v ast::FnDecl, b: &'v ast::Block, s: Span, fn_id: ast::NodeId) { assert!(self.mode == Mode::Var); self.with_euv(Some(fn_id), |euv| euv.walk_fn(fd, b)); visit::walk_fn(self, fk, fd, b, s); } fn visit_pat(&mut self, p: &ast::Pat) { match p.node { ast::PatLit(ref lit) => { self.global_expr(Mode::Const, &**lit); } ast::PatRange(ref start, ref end) => { self.global_expr(Mode::Const, &**start); self.global_expr(Mode::Const, &**end); } _ => visit::walk_pat(self, p) } } fn visit_expr(&mut self, ex: &ast::Expr) { let mut outer = self.qualif; self.qualif = ConstQualif::empty(); let node_ty = ty::node_id_to_type(self.tcx, ex.id); check_expr(self, ex, node_ty); // Special-case some expressions to avoid certain flags bubbling up. match ex.node { ast::ExprCall(ref callee, ref args) => { for arg in args.iter() { self.visit_expr(&**arg) } let inner = self.qualif; self.visit_expr(&**callee); // The callee's size doesn't count in the call. let added = self.qualif - inner; self.qualif = inner | (added - ConstQualif::NON_ZERO_SIZED); } ast::ExprRepeat(ref element, _) => { self.visit_expr(&**element); // The count is checked elsewhere (typeck). let count = match node_ty.sty { ty::ty_vec(_, Some(n)) => n, _ => unreachable!() }; // [element; 0] is always zero-sized. if count == 0 { self.qualif.remove(ConstQualif::NON_ZERO_SIZED | ConstQualif::PREFER_IN_PLACE); } } ast::ExprMatch(ref discr, ref arms, _) => { // Compute the most demanding borrow from all the arms' // patterns and set that on the discriminator. let mut borrow = None; for pat in arms.iter().flat_map(|arm| arm.pats.iter()) { let pat_borrow = self.rvalue_borrows.remove(&pat.id); match (borrow, pat_borrow) { (None, _) | (_, Some(ast::MutMutable)) => { borrow = pat_borrow; } _ => {} } } if let Some(mutbl) = borrow { self.record_borrow(discr.id, mutbl); } visit::walk_expr(self, ex); } // Division by zero and overflow checking. ast::ExprBinary(op, _, _) => { visit::walk_expr(self, ex); let div_or_rem = op.node == ast::BiDiv || op.node == ast::BiRem; match node_ty.sty { ty::ty_uint(_) | ty::ty_int(_) if div_or_rem => { if !self.qualif.intersects(ConstQualif::NOT_CONST) { match const_eval::eval_const_expr_partial(self.tcx, ex, None) { Ok(_) => {} Err(msg) => { span_err!(self.tcx.sess, msg.span, E0020, "{} in a constant expression", msg.description()) } } } } _ => {} } } _ => visit::walk_expr(self, ex) } // Handle borrows on (or inside the autorefs of) this expression. match self.rvalue_borrows.remove(&ex.id) { Some(ast::MutImmutable) => { // Constants cannot be borrowed if they contain interior mutability as // it means that our "silent insertion of statics" could change // initializer values (very bad). // If the type doesn't have interior mutability, then `ConstQualif::MUTABLE_MEM` has // propagated from another error, so erroring again would be just noise. let tc = ty::type_contents(self.tcx, node_ty); if self.qualif.intersects(ConstQualif::MUTABLE_MEM) && tc.interior_unsafe() { outer = outer | ConstQualif::NOT_CONST; if self.mode != Mode::Var { self.tcx.sess.span_err(ex.span, "cannot borrow a constant which contains \ interior mutability, create a static instead"); } } // If the reference has to be 'static, avoid in-place initialization // as that will end up pointing to the stack instead. if !self.qualif.intersects(ConstQualif::NON_STATIC_BORROWS) { self.qualif = self.qualif - ConstQualif::PREFER_IN_PLACE; self.add_qualif(ConstQualif::HAS_STATIC_BORROWS); } } Some(ast::MutMutable) => { // `&mut expr` means expr could be mutated, unless it's zero-sized. if self.qualif.intersects(ConstQualif::NON_ZERO_SIZED) { if self.mode == Mode::Var { outer = outer | ConstQualif::NOT_CONST; self.add_qualif(ConstQualif::MUTABLE_MEM); } else { span_err!(self.tcx.sess, ex.span, E0017, "references in {}s may only refer \ to immutable values", self.msg()) } } if !self.qualif.intersects(ConstQualif::NON_STATIC_BORROWS) { self.add_qualif(ConstQualif::HAS_STATIC_BORROWS); } } None => {} } self.tcx.const_qualif_map.borrow_mut().insert(ex.id, self.qualif); // Don't propagate certain flags. self.qualif = outer | (self.qualif - ConstQualif::HAS_STATIC_BORROWS); } } /// This function is used to enforce the constraints on /// const/static items. It walks through the *value* /// of the item walking down the expression and evaluating /// every nested expression. If the expression is not part /// of a const/static item, it is qualified for promotion /// instead of producing errors. fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, e: &ast::Expr, node_ty: Ty<'tcx>) { match node_ty.sty { ty::ty_struct(did, _) | ty::ty_enum(did, _) if ty::has_dtor(v.tcx, did) => { v.add_qualif(ConstQualif::NEEDS_DROP); if v.mode != Mode::Var { v.tcx.sess.span_err(e.span, &format!("{}s are not allowed to have destructors", v.msg())); } } _ => {} } let method_call = ty::MethodCall::expr(e.id); match e.node { ast::ExprUnary(..) | ast::ExprBinary(..) | ast::ExprIndex(..) if v.tcx.method_map.borrow().contains_key(&method_call) => { v.add_qualif(ConstQualif::NOT_CONST); if v.mode != Mode::Var { span_err!(v.tcx.sess, e.span, E0011, "user-defined operators are not allowed in {}s", v.msg()); } } ast::ExprBox(..) | ast::ExprUnary(ast::UnUniq, _) => { v.add_qualif(ConstQualif::NOT_CONST); if v.mode != Mode::Var { span_err!(v.tcx.sess, e.span, E0010, "allocations are not allowed in {}s", v.msg()); } } ast::ExprUnary(ast::UnDeref, ref ptr) => { match ty::node_id_to_type(v.tcx, ptr.id).sty { ty::ty_ptr(_) => { // This shouldn't be allowed in constants at all. v.add_qualif(ConstQualif::NOT_CONST); } _ => {} } } ast::ExprCast(ref from, _) => { debug!("Checking const cast(id={})", from.id); match v.tcx.cast_kinds.borrow().get(&from.id) { None => v.tcx.sess.span_bug(e.span, "no kind for cast"), Some(&CastKind::PtrAddrCast) | Some(&CastKind::FnPtrAddrCast) => { v.add_qualif(ConstQualif::NOT_CONST); if v.mode != Mode::Var { span_err!(v.tcx.sess, e.span, E0018, "can't cast a pointer to an integer in {}s", v.msg()); } } _ => {} } } ast::ExprPath(..) => { let def = v.tcx.def_map.borrow().get(&e.id).map(|d| d.full_def()); match def { Some(def::DefVariant(_, _, _)) => { // Count the discriminator or function pointer. v.add_qualif(ConstQualif::NON_ZERO_SIZED); } Some(def::DefStruct(_)) => { if let ty::ty_bare_fn(..) = node_ty.sty { // Count the function pointer. v.add_qualif(ConstQualif::NON_ZERO_SIZED); } } Some(def::DefFn(..)) | Some(def::DefMethod(..)) => { // Count the function pointer. v.add_qualif(ConstQualif::NON_ZERO_SIZED); } Some(def::DefStatic(..)) => { match v.mode { Mode::Static | Mode::StaticMut => {} Mode::Const => { span_err!(v.tcx.sess, e.span, E0013, "constants cannot refer to other statics, \ insert an intermediate constant instead"); } Mode::Var => v.add_qualif(ConstQualif::NOT_CONST) } } Some(def::DefConst(did)) | Some(def::DefAssociatedConst(did, _)) => { if let Some(expr) = const_eval::lookup_const_by_id(v.tcx, did, Some(e.id)) { let inner = v.global_expr(Mode::Const, expr); v.add_qualif(inner); } else { v.tcx.sess.span_bug(e.span, "DefConst or DefAssociatedConst \ doesn't point to a constant"); } } def => { v.add_qualif(ConstQualif::NOT_CONST); if v.mode != Mode::Var { debug!("(checking const) found bad def: {:?}", def); span_err!(v.tcx.sess, e.span, E0014, "paths in {}s may only refer to constants \ or functions", v.msg()); } } } } ast::ExprCall(ref callee, _) => { let mut callee = &**callee; loop { callee = match callee.node { ast::ExprParen(ref inner) => &**inner, ast::ExprBlock(ref block) => match block.expr { Some(ref tail) => &**tail, None => break }, _ => break }; } let def = v.tcx.def_map.borrow().get(&callee.id).map(|d| d.full_def()); match def { Some(def::DefStruct(..)) => {} Some(def::DefVariant(..)) => { // Count the discriminator. v.add_qualif(ConstQualif::NON_ZERO_SIZED); } _ => { v.add_qualif(ConstQualif::NOT_CONST); if v.mode != Mode::Var { span_err!(v.tcx.sess, e.span, E0015, "function calls in {}s are limited to \ struct and enum constructors", v.msg()); } } } } ast::ExprBlock(ref block) => { // Check all statements in the block let mut block_span_err = |span| { v.add_qualif(ConstQualif::NOT_CONST); if v.mode != Mode::Var { span_err!(v.tcx.sess, span, E0016, "blocks in {}s are limited to items and \ tail expressions", v.msg()); } }; for stmt in &block.stmts { match stmt.node { ast::StmtDecl(ref decl, _) => { match decl.node { ast::DeclLocal(_) => block_span_err(decl.span), // Item statements are allowed ast::DeclItem(_) => {} } } ast::StmtExpr(ref expr, _) => block_span_err(expr.span), ast::StmtSemi(ref semi, _) => block_span_err(semi.span), ast::StmtMac(..) => { v.tcx.sess.span_bug(e.span, "unexpanded statement \ macro in const?!") } } } } ast::ExprStruct(..) => { let did = v.tcx.def_map.borrow().get(&e.id).map(|def| def.def_id()); if did == v.tcx.lang_items.unsafe_cell_type() { v.add_qualif(ConstQualif::MUTABLE_MEM); } } ast::ExprLit(_) | ast::ExprAddrOf(..) => { v.add_qualif(ConstQualif::NON_ZERO_SIZED); } ast::ExprRepeat(..) => { v.add_qualif(ConstQualif::PREFER_IN_PLACE); } ast::ExprClosure(..) => { // Paths in constant constexts cannot refer to local variables, // as there are none, and thus closures can't have upvars there. if ty::with_freevars(v.tcx, e.id, |fv| !fv.is_empty()) { assert!(v.mode == Mode::Var, "global closures can't capture anything"); v.add_qualif(ConstQualif::NOT_CONST); } } ast::ExprUnary(..) | ast::ExprBinary(..) | ast::ExprIndex(..) | ast::ExprField(..) | ast::ExprTupField(..) | ast::ExprVec(_) | ast::ExprParen(..) | ast::ExprTup(..) => {} // Conditional control flow (possible to implement). ast::ExprMatch(..) | ast::ExprIf(..) | ast::ExprIfLet(..) | // Loops (not very meaningful in constants). ast::ExprWhile(..) | ast::ExprWhileLet(..) | ast::ExprForLoop(..) | ast::ExprLoop(..) | // More control flow (also not very meaningful). ast::ExprBreak(_) | ast::ExprAgain(_) | ast::ExprRet(_) | // Miscellaneous expressions that could be implemented. ast::ExprRange(..) | // Various other expressions. ast::ExprMethodCall(..) | ast::ExprAssign(..) | ast::ExprAssignOp(..) | ast::ExprInlineAsm(_) | ast::ExprMac(_) => { v.add_qualif(ConstQualif::NOT_CONST); if v.mode != Mode::Var { span_err!(v.tcx.sess, e.span, E0019, "{} contains unimplemented expression type", v.msg()); } } } } pub fn check_crate(tcx: &ty::ctxt) { visit::walk_crate(&mut CheckCrateVisitor { tcx: tcx, mode: Mode::Var, qualif: ConstQualif::NOT_CONST, rvalue_borrows: NodeMap() }, tcx.map.krate()); tcx.sess.abort_if_errors(); } impl<'a, 'tcx> euv::Delegate<'tcx> for CheckCrateVisitor<'a, 'tcx> { fn consume(&mut self, _consume_id: ast::NodeId, consume_span: Span, cmt: mc::cmt, _mode: euv::ConsumeMode) { let mut cur = &cmt; loop { match cur.cat { mc::cat_static_item => { if self.mode != Mode::Var { // statics cannot be consumed by value at any time, that would imply // that they're an initializer (what a const is for) or kept in sync // over time (not feasible), so deny it outright. self.tcx.sess.span_err(consume_span, "cannot refer to other statics by value, use the \ address-of operator or a constant instead"); } break; } mc::cat_deref(ref cmt, _, _) | mc::cat_downcast(ref cmt, _) | mc::cat_interior(ref cmt, _) => cur = cmt, mc::cat_rvalue(..) | mc::cat_upvar(..) | mc::cat_local(..) => break } } } fn borrow(&mut self, borrow_id: ast::NodeId, borrow_span: Span, cmt: mc::cmt<'tcx>, _loan_region: ty::Region, bk: ty::BorrowKind, loan_cause: euv::LoanCause) { // Kind of hacky, but we allow Unsafe coercions in constants. // These occur when we convert a &T or *T to a *U, as well as // when making a thin pointer (e.g., `*T`) into a fat pointer // (e.g., `*Trait`). match loan_cause { euv::LoanCause::AutoUnsafe => { return; } _ => { } } let mut cur = &cmt; let mut is_interior = false; loop { match cur.cat { mc::cat_rvalue(..) => { if loan_cause == euv::MatchDiscriminant { // Ignore the dummy immutable borrow created by EUV. break; } let mutbl = bk.to_mutbl_lossy(); if mutbl == ast::MutMutable && self.mode == Mode::StaticMut { // Mutable slices are the only `&mut` allowed in globals, // but only in `static mut`, nowhere else. match cmt.ty.sty { ty::ty_vec(_, _) => break, _ => {} } } self.record_borrow(borrow_id, mutbl); break; } mc::cat_static_item => { if is_interior && self.mode != Mode::Var { // Borrowed statics can specifically *only* have their address taken, // not any number of other borrows such as borrowing fields, reading // elements of an array, etc. self.tcx.sess.span_err(borrow_span, "cannot refer to the interior of another \ static, use a constant instead"); } break; } mc::cat_deref(ref cmt, _, _) | mc::cat_downcast(ref cmt, _) | mc::cat_interior(ref cmt, _) => { is_interior = true; cur = cmt; } mc::cat_upvar(..) | mc::cat_local(..) => break } } } fn decl_without_init(&mut self, _id: ast::NodeId, _span: Span) {} fn mutate(&mut self, _assignment_id: ast::NodeId, _assignment_span: Span, _assignee_cmt: mc::cmt, _mode: euv::MutateMode) {} fn matched_pat(&mut self, _: &ast::Pat, _: mc::cmt, _: euv::MatchMode) {} fn consume_pat(&mut self, _consume_pat: &ast::Pat, _cmt: mc::cmt, _mode: euv::ConsumeMode) {} }
{ assert!(mode != Mode::Var); match self.tcx.const_qualif_map.borrow_mut().entry(expr.id) { Entry::Occupied(entry) => return *entry.get(), Entry::Vacant(entry) => { // Prevent infinite recursion on re-entry. entry.insert(ConstQualif::empty()); } } self.with_mode(mode, |this| { this.with_euv(None, |euv| euv.consume_expr(expr)); this.visit_expr(expr); this.qualif }) }
tools_test.go
package gotils import ( "testing" ) func
(t *testing.T) { data := []float64{3.1, 3.5, 2.1, 22.2, 32, 1, -2.3} if Minf64(data) != -2.3 { t.Fail() } } func TestMaxf64(t *testing.T) { data := []float64{3.1, 3.5, 2.1, 22.2, 32, 1, -2.3} if Maxf64(data) != 32 { t.Fail() } }
TestMinf64
main.rs
use std::env; use guarakapa::{scanpw, crypto, fman, fs}; const MSG_ENTER_PW: &str = "Enter your master password: "; const MSG_SAVE_ERR: &str = "Failed to save file"; const MSG_LOAD_ERR: &str = "Failed to load file"; const MSG_WRONG_PW: &str = "Password does not match!"; const MSG_ENCODE_ERR: &str = "Failed to encode file."; const MSG_DECODE_ERR: &str = "Failed to decode file."; macro_rules! msg_enter_field { () => { "Enter {} for this entry (or just press ENTER to leave it blank):" } } fn get_input() -> String { let mut s = String::new(); std::io::stdin().read_line(&mut s).unwrap(); s.trim_end().to_owned() } fn copy_to_clipboard_and_block(text: String) { let clipboard = x11_clipboard::Clipboard::new().unwrap(); clipboard.store(clipboard.setter.atoms.clipboard, clipboard.setter.atoms.utf8_string, text).unwrap(); // TODO: find out why this has to be here, calling it after this fn does // not work. get_input(); } fn create_new_file() { let pw = scanpw!("Enter a new master password: "); println!(); let confirm = scanpw!("Please repeat: "); println!(); if pw != confirm { println!("Password confirmation incorrect!"); } else { let file = fman::File::try_new(pw).expect("Error creating new file."); fs::save(fman::encode(&file).expect(MSG_ENCODE_ERR)).expect(MSG_SAVE_ERR); println!("Your password file was created (at {}). \ Run the program again to add new entries.", fs::file_path()); } } fn print_usage(exec_name: &str) { println!("First time usage:\n\n\t{exec}\n\n\ General usage:\n\n\t{exec} [COMMAND] [PARAMS]\n\n\ Commands:\n\n\ \tentry_name\tretrieves the entry with name `entry_name`\n\ \tget entry_name\tretrieves the entry with name `entry_name`\n\ \tadd entry_name\tadds a new entry with name `entry_name`\n\ \trm entry_name\tremoves the entry with name `entry_name`\n\ \tls\t\tlists all entries\n\ \tpath\t\tshow path to data file", exec = exec_name); } fn add_entry(entry_name: &str)
fn get_entry(entry_name: &str) { let contents = fs::load().expect(MSG_LOAD_ERR); let mut file = fman::decode(contents.as_slice()).expect(MSG_DECODE_ERR); let pw = scanpw!(MSG_ENTER_PW); println!(); match file.get_entry(pw, entry_name) { Err(e) => println!("Error retrieving entry. Reason: {}", e), Ok(Some(entry)) => { println!("\nEntry `{}` recovered.\n{}\ Password: ****** [copied to clipboard, paste to use].\n\n\ Press ENTER to close the program (clipboard may be erased).", entry_name, entry); copy_to_clipboard_and_block(entry.pw); } _ => println!("Entry `{}` not found.", entry_name), } } fn remove_entry(entry_name: &str) { let contents = fs::load().expect(MSG_LOAD_ERR); let mut file = fman::decode(contents.as_slice()).expect(MSG_DECODE_ERR); let pw = scanpw!(MSG_ENTER_PW); println!(); let pw_hash = crypto::hash(vec![pw.as_bytes(), &file.head.salt[..]]); if pw_hash != file.head.pw_hash { println!("{}", MSG_WRONG_PW); return; } if let Err(e) = file.remove_entry(pw, entry_name) { println!("Could not remove entry. Reason: {}", e); return; } fs::save(fman::encode(&file).expect(MSG_ENCODE_ERR)).expect(MSG_SAVE_ERR); println!("Entry '{}' removed successfully.", entry_name); } fn list_entries() { let contents = fs::load().expect(MSG_LOAD_ERR); let mut file = fman::decode(contents.as_slice()).expect(MSG_DECODE_ERR); let pw = scanpw!(MSG_ENTER_PW); println!(); match file.list(pw) { Err(e) => println!("Error retrieving entries: {}", e), Ok(entries) => println!("Total entries: {:?}", entries) } } fn data_file_path() { println!("data file path: {}", fs::file_path()); } fn show_version() { println!("{}", env!("CARGO_PKG_VERSION")); } fn main() { let args: Vec<String> = env::args().collect(); if args.len() == 2 && vec!["version", "--version", "-v"].contains(&args[1].as_str()) { show_version(); } else if fs::file_exists() { match args.len() - 1 { 1 if args[1] == "ls" => list_entries(), 1 if args[1] == "path" => data_file_path(), 1 => get_entry(&args[1]), 2 if args[1] == "add" => add_entry(&args[2]), 2 if args[1] == "get" => get_entry(&args[2]), 2 if args[1] == "rm" => remove_entry(&args[2]), _ => print_usage(&args[0]) } } else if args.len() > 1 { println!("Password file not found!\nIs this your first time usage?\n"); print_usage(&args[0]); } else { create_new_file(); } }
{ let contents = fs::load().expect(MSG_LOAD_ERR); let mut file = fman::decode(contents.as_slice()).expect(MSG_DECODE_ERR); let pw = scanpw!(MSG_ENTER_PW); println!(); let pw_hash = crypto::hash(vec![pw.as_bytes(), &file.head.salt[..]]); if pw_hash != file.head.pw_hash { println!("{}", MSG_WRONG_PW); return; } println!(msg_enter_field!(), "a description"); let entry_desc = get_input(); println!(msg_enter_field!(), "a user name"); let entry_user = get_input(); println!(msg_enter_field!(), "an email"); let entry_email = get_input(); println!(msg_enter_field!(), "other notes/observations"); let entry_notes = get_input(); let entry_pw = scanpw!("Enter a new password for this entry: "); println!(); let entry = fman::OpenEntry { desc: entry_desc, user: entry_user, email: entry_email, notes: entry_notes, pw: entry_pw }; if let Err(e) = file.add_entry(pw, entry_name.to_string(), entry) { println!("Could not add entry. Reason: {}", e); return; } fs::save(fman::encode(&file).expect(MSG_ENCODE_ERR)).expect(MSG_SAVE_ERR); println!("Entry '{}' added successfully.", entry_name); }
precise.rs
#![deny(disjoint_capture_drop_reorder)] //~^ NOTE: the lint level is defined here #[derive(Debug)] struct Foo(i32); impl Drop for Foo { fn drop(&mut self) { println!("{:?} dropped", self.0); } } struct ConstainsDropField(Foo, Foo); #[derive(Debug)] struct ContainsAndImplsDrop(Foo); impl Drop for ContainsAndImplsDrop { fn drop(&mut self) { println!("{:?} dropped", self.0); } } // Test that even if all paths starting at root variable that implement Drop are captured, // the lint is triggered if the root variable implements drop and isn't captured. fn test_precise_analysis_parent_root_impl_drop_not_captured() { let t = ContainsAndImplsDrop(Foo(10)); let c = || { //~^ERROR: drop order affected for closure because of `capture_disjoint_fields` //~| NOTE: drop(&(t)); let _t = t.0; }; c(); } // Test that lint is triggered if a path that implements Drop is not captured by move fn test_precise_analysis_drop_paths_not_captured_by_move() { let t = ConstainsDropField(Foo(10), Foo(20)); let c = || { //~^ERROR: drop order affected for closure because of `capture_disjoint_fields` //~| NOTE: drop(&(t)); let _t = t.0; let _t = &t.1; }; c(); } struct S; impl Drop for S { fn drop(&mut self) { } } struct
(S, S); struct U(T, T); // Test precise analysis for the lint works with paths longer than one. fn test_precise_analysis_long_path_missing() { let u = U(T(S, S), T(S, S)); let c = || { //~^ERROR: drop order affected for closure because of `capture_disjoint_fields` //~| NOTE: drop(&(u)); let _x = u.0.0; let _x = u.0.1; let _x = u.1.0; }; c(); } fn main() { test_precise_analysis_parent_root_impl_drop_not_captured(); test_precise_analysis_drop_paths_not_captured_by_move(); test_precise_analysis_long_path_missing(); }
T
button.spec.tsx
import React from 'react';
describe('Button component', () => { it('should correctly render text inside the button', () => { const MOCK_TEXT = 'mock text'; const { getByText } = render(<Button>{MOCK_TEXT}</Button>); const button = getByText(MOCK_TEXT); expect(button).toBeInTheDocument(); }); });
import { render } from '@testing-library/react'; import { Button } from './button';
mod.rs
#![allow(warnings)] use std::{ collections::{BTreeMap, BTreeSet, HashMap}, vec::Drain, }; use markup5ever::{namespace_url, ns, LocalName}; use syn::parse_str; use yarte_hir::{Each as HEach, IfElse as HIfElse, HIR}; use yarte_html::{ interface::{QualName, YName}, tree_builder::{get_marquee, is_marquee}, utils::{get_mark_id, parse_id, HASH_LEN, MARK}, }; use crate::sink::{ parse_document, parse_fragment, ParseAttribute, ParseElement, ParseNodeId, ParseResult, Sink, }; mod resolve; use self::resolve::{resolve_each, resolve_expr, resolve_if_block, resolve_local}; pub type Document = Vec<Node>; pub type ExprId = usize; pub type VarId = u64; #[derive(Debug, PartialEq)] pub struct VarInner { pub base: VarId, pub ident: String, } #[derive(Debug, PartialEq)] pub enum Var { This(VarInner), Local(ExprId, VarInner), } #[derive(Debug, PartialEq)] pub enum Node { Elem(Element), Expr(Expression), } #[derive(Debug, PartialEq)] pub enum Expression { Unsafe(ExprId, Box<syn::Expr>), Safe(ExprId, Box<syn::Expr>), Each(ExprId, Box<Each>), IfElse(ExprId, Box<IfElse>), Local(ExprId, VarId, Box<syn::Local>),
pub struct IfBlock { pub vars: Vec<VarId>, pub expr: syn::Expr, pub block: Document, } #[derive(Debug, PartialEq)] pub struct IfElse { pub ifs: IfBlock, pub if_else: Vec<IfBlock>, pub els: Option<Document>, } /// `for expr in args ` /// #[derive(Debug, PartialEq)] pub struct Each { pub var: (VarId, Option<VarId>), pub args: syn::Expr, pub body: Document, pub expr: syn::Expr, } #[derive(Debug, PartialEq)] pub enum Ns { Html, Svg, } #[derive(Debug, PartialEq)] pub enum Element { Node { name: (Ns, ExprOrText), attrs: Vec<Attribute>, children: Document, }, Text(String), } #[derive(Debug, PartialEq)] pub struct Attribute { pub name: ExprOrText, pub value: Vec<ExprOrText>, } #[derive(Debug, PartialEq)] pub enum ExprOrText { Text(String), Expr(Expression), } pub type TreeMap = BTreeMap<ExprId, BTreeSet<VarId>>; pub type VarMap = HashMap<VarId, Var>; #[derive(Debug)] pub struct DOM { pub doc: Document, pub tree_map: TreeMap, pub var_map: VarMap, } impl From<Vec<HIR>> for DOM { fn from(ir: Vec<HIR>) -> Self { DOMBuilder::default().build(ir) } } #[derive(Default)] pub struct DOMBuilder { inner: bool, count: usize, tree_map: TreeMap, var_map: VarMap, } impl DOMBuilder { fn build(mut self, ir: Vec<HIR>) -> DOM { DOM { doc: self.init(ir).expect("Dom builder"), tree_map: self.tree_map, var_map: self.var_map.into_iter().collect(), } } fn generate_html(&mut self, ir: Vec<HIR>) -> (Vec<HIR>, String) { let mut html = String::new(); let ir: Vec<HIR> = ir .into_iter() .filter(|x| match x { HIR::Lit(x) => { html.push_str(x); false } _ => { html.push_str(MARK); let id = self.count; self.count += 1; html.push_str(&format!("{:#010x?}", id)); true } }) .collect(); (ir, html) } fn init(&mut self, ir: Vec<HIR>) -> ParseResult<Document> { let (ir, html) = self.generate_html(ir); self.serialize(parse_document(&html)?, ir) } fn step(&mut self, ir: Vec<HIR>) -> ParseResult<Document> { self.inner = false; let (ir, html) = self.generate_html(ir); self.serialize(parse_fragment(&html)?, ir) } fn serialize(&mut self, sink: Sink, mut ir: Vec<HIR>) -> ParseResult<Document> { let mut ir = ir.drain(..); let nodes = match sink.nodes.values().next() { Some(ParseElement::Document(children)) => { self.inner = true; self.get_children(children, &sink, &mut ir)? } Some(ParseElement::Node { name, attrs, children, .. }) => { if is_marquee(name) { if self.inner { panic!("not use <{}> tag", &*get_marquee().local); } self.inner = true; self.get_children(children, &sink, &mut ir)? } else { vec![self.resolve_node(name, attrs, children, &sink, &mut ir)?] } } Some(ParseElement::Text(s)) => vec![self.resolve_text(s)], None => vec![], }; assert!(ir.next().is_none()); Ok(nodes) } fn resolve_node( &mut self, name: &QualName, attrs: &[ParseAttribute], children: &[ParseNodeId], sink: &Sink, ir: &mut Drain<HIR>, ) -> ParseResult<Node> { let ns = match name.ns { ns!(html) => Ns::Html, ns!(svg) => Ns::Svg, _ => panic!("Name space"), }; Ok(Node::Elem(Element::Node { name: (ns, self.resolve_y_name(&name.local, ir)?), attrs: self.resolve_attrs(attrs, ir)?, children: self.get_children(children, sink, ir)?, })) } fn resolve_y_name(&mut self, name: &YName, ir: &mut Drain<HIR>) -> ParseResult<ExprOrText> { Ok(match name { YName::Expr(s) => { let id = get_mark_id(&*s).expect("Valid mark") as usize; ExprOrText::Expr(self.resolve_expr(id, ir)?) } YName::Local(s) => ExprOrText::Text((&*s).to_string()), }) } fn resolve_attrs( &mut self, attrs: &[ParseAttribute], ir: &mut Drain<HIR>, ) -> ParseResult<Vec<Attribute>> { let mut buff = vec![]; for attr in attrs { buff.push(self.resolve_attr(attr, ir)?); } Ok(buff) } fn resolve_attr( &mut self, attr: &ParseAttribute, ir: &mut Drain<HIR>, ) -> ParseResult<Attribute> { let name = self.resolve_y_name(&attr.name.local, ir)?; // Event if let ExprOrText::Text(s) = &name { if s.starts_with("on") { let msg: syn::Expr = parse_str(&attr.value).expect("expression in on attribute"); let var = resolve_expr(&msg, self); let id = self.count; self.count += 1; self.tree_map.insert(id, var.into_iter().collect()); return Ok(Attribute { name, value: vec![ExprOrText::Expr(Expression::Safe(id, Box::new(msg)))], }); } } // Attribute let mut chunks = attr.value.split(MARK).peekable(); if let Some(first) = chunks.peek() { if first.is_empty() { chunks.next(); } } let mut value = vec![]; for chunk in chunks { if HASH_LEN < chunk.len() && &chunk[..2] == "0x" { if let Ok(id) = u32::from_str_radix(&chunk[2..HASH_LEN], 16).map(|x| x as usize) { if self.tree_map.contains_key(&id) { value.push(ExprOrText::Expr(self.resolve_expr(id, ir)?)); if !&chunk[HASH_LEN..].is_empty() { value.push(ExprOrText::Text(chunk[HASH_LEN..].into())) } continue; } } } value.push(ExprOrText::Text(chunk.into())) } Ok(Attribute { name, value }) } #[inline] fn resolve_mark(&mut self, id: usize, ir: &mut Drain<HIR>) -> ParseResult<Node> { Ok(Node::Expr(self.resolve_expr(id, ir)?)) } fn resolve_expr(&mut self, id: ExprId, ir: &mut Drain<HIR>) -> ParseResult<Expression> { let ir = ir.next().expect("Some HIR"); match ir { HIR::Expr(e) => { let var = resolve_expr(&e, self); self.tree_map.insert(id, var.into_iter().collect()); Ok(Expression::Unsafe(id, e)) } HIR::Safe(e) => { let var = resolve_expr(&e, self); self.tree_map.insert(id, var.into_iter().collect()); Ok(Expression::Safe(id, e)) } HIR::Local(e) => { let var_id = resolve_local(&e, id, self); Ok(Expression::Local(id, var_id, e)) } HIR::Each(e) => { let var = resolve_each(&e, id, self); let HEach { args, body, expr } = *e; Ok(Expression::Each( id, Box::new(Each { var, args, body: self.step(body)?, expr, }), )) } HIR::IfElse(e) => { let HIfElse { ifs, if_else, els } = *e; let (expr, body) = ifs; let vars = resolve_if_block(&expr, id, self); let ifs = IfBlock { vars, expr, block: self.step(body)?, }; let mut buff = vec![]; for (expr, body) in if_else { let vars = resolve_if_block(&expr, id, self); buff.push(IfBlock { vars, expr, block: self.step(body)?, }); } let els = if let Some(body) = els { Some(self.step(body)?) } else { None }; Ok(Expression::IfElse( id, Box::new(IfElse { ifs, if_else: buff, els, }), )) } HIR::Lit(_) => unreachable!(), } } #[inline] fn resolve_text(&mut self, s: &str) -> Node { Node::Elem(Element::Text(s.to_owned())) } fn get_children( &mut self, children: &[ParseNodeId], sink: &Sink, ir: &mut Drain<HIR>, ) -> ParseResult<Document> { let mut buff = vec![]; for child in children.iter().map(|x| sink.nodes.get(x).unwrap()) { match child { ParseElement::Text(s) => { let mut chunks = s.split(MARK).peekable(); if let Some(first) = chunks.peek() { if first.is_empty() { chunks.next(); } } for chunk in chunks { if chunk.is_empty() { panic!("chunk empty") } else if HASH_LEN <= chunk.len() { if let Some(id) = parse_id(&chunk[..HASH_LEN]) { buff.push(self.resolve_mark(id as usize, ir)?); let cut = &chunk[HASH_LEN..]; if !cut.is_empty() { buff.push(self.resolve_text(cut)); } } else { buff.push(self.resolve_text(chunk)); } } else { buff.push(self.resolve_text(chunk)); } } } ParseElement::Node { name, attrs, children, .. } => buff.push(self.resolve_node(name, attrs, children, sink, ir)?), ParseElement::Document(_) => unreachable!(), } } Ok(buff) } }
} #[derive(Debug, PartialEq)]
models.py
from django.conf import settings from django.db import models class ForwardedMessage(models.Model): "Generated Model" message = models.ForeignKey( "chat.Message", on_delete=models.CASCADE, related_name="forwardedmessage_message", ) forwarded_by = models.ForeignKey( "chat_user_profile.Profile", on_delete=models.CASCADE, related_name="forwardedmessage_forwarded_by", ) forwarded_to = models.ForeignKey( "chat.Thread", on_delete=models.CASCADE, related_name="forwardedmessage_forwarded_to", ) timestamp_forwarded = models.DateTimeField( auto_now_add=True, ) class Message(models.Model): "Generated Model" message = models.TextField() thread = models.ForeignKey( "chat.Thread", on_delete=models.CASCADE, related_name="message_thread", ) sent_by = models.ForeignKey( "chat.ThreadMember", on_delete=models.CASCADE, related_name="message_sent_by", ) attachment = models.URLField() is_draft = models.BooleanField() is_delivered = models.BooleanField() is_read = models.BooleanField() timestamp_created = models.DateTimeField( auto_now_add=True, ) timestamp_delivered = models.DateTimeField() timestamp_read = models.DateTimeField() class Thread(models.Model): "Generated Model" name = models.CharField( max_length=255, ) thread_photo = models.URLField() timestamp_created = models.DateTimeField( auto_now_add=True, ) class ThreadAction(models.Model): "Generated Model" action = models.CharField( max_length=7, ) thread = models.ForeignKey( "chat.Thread", on_delete=models.CASCADE, related_name="threadaction_thread", ) profile = models.ForeignKey( "chat_user_profile.Profile", on_delete=models.CASCADE, related_name="threadaction_profile", ) timestamp_action = models.DateTimeField( auto_now_add=True, ) class
(models.Model): "Generated Model" action = models.CharField( max_length=7, ) message = models.ForeignKey( "chat.Message", on_delete=models.CASCADE, related_name="messageaction_message", ) profile = models.ForeignKey( "chat_user_profile.Profile", on_delete=models.CASCADE, related_name="messageaction_profile", ) timestamp_action = models.DateTimeField( auto_now_add=True, ) class ThreadMember(models.Model): "Generated Model" profile = models.ForeignKey( "chat_user_profile.Profile", on_delete=models.CASCADE, related_name="threadmember_profile", ) thread = models.ForeignKey( "chat.Thread", on_delete=models.CASCADE, related_name="threadmember_thread", ) is_admin = models.BooleanField() timestamp_joined = models.DateTimeField( auto_now_add=True, ) timestamp_left = models.DateTimeField() last_rejoined = models.DateTimeField() # Create your models here.
MessageAction
registry.go
// Copyright 2015 flannel authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package etcdv2 import ( "encoding/json" "errors" "fmt" "path" "regexp" "sync" "time" etcd "github.com/coreos/etcd/client" "github.com/coreos/etcd/pkg/transport" "github.com/coreos/flannel/pkg/ip" . "github.com/coreos/flannel/subnet" "golang.org/x/net/context" log "k8s.io/klog" ) var ( errTryAgain = errors.New("try again") ) type Registry interface { getNetworkConfig(ctx context.Context) (string, error) getSubnets(ctx context.Context) ([]Lease, uint64, error) getSubnet(ctx context.Context, sn ip.IP4Net) (*Lease, uint64, error) createSubnet(ctx context.Context, sn ip.IP4Net, attrs *LeaseAttrs, ttl time.Duration) (time.Time, error) updateSubnet(ctx context.Context, sn ip.IP4Net, attrs *LeaseAttrs, ttl time.Duration, asof uint64) (time.Time, error) deleteSubnet(ctx context.Context, sn ip.IP4Net) error watchSubnets(ctx context.Context, since uint64) (Event, uint64, error) watchSubnet(ctx context.Context, since uint64, sn ip.IP4Net) (Event, uint64, error) } type EtcdConfig struct { Endpoints []string Keyfile string Certfile string CAFile string Prefix string Username string Password string } type etcdNewFunc func(c *EtcdConfig) (etcd.KeysAPI, error) type etcdSubnetRegistry struct { cliNewFunc etcdNewFunc mux sync.Mutex cli etcd.KeysAPI etcdCfg *EtcdConfig networkRegex *regexp.Regexp } func newEtcdClient(c *EtcdConfig) (etcd.KeysAPI, error) { tlsInfo := transport.TLSInfo{ CertFile: c.Certfile, KeyFile: c.Keyfile, CAFile: c.CAFile, } t, err := transport.NewTransport(tlsInfo, time.Second) if err != nil { return nil, err } cli, err := etcd.New(etcd.Config{ Endpoints: c.Endpoints, Transport: t, Username: c.Username, Password: c.Password, }) if err != nil { return nil, err } return etcd.NewKeysAPI(cli), nil } func newEtcdSubnetRegistry(config *EtcdConfig, cliNewFunc etcdNewFunc) (Registry, error)
func (esr *etcdSubnetRegistry) getNetworkConfig(ctx context.Context) (string, error) { key := path.Join(esr.etcdCfg.Prefix, "config") resp, err := esr.client().Get(ctx, key, &etcd.GetOptions{Quorum: true}) if err != nil { return "", err } return resp.Node.Value, nil } // getSubnets queries etcd to get a list of currently allocated leases for a given network. // It returns the leases along with the "as-of" etcd-index that can be used as the starting // point for etcd watch. func (esr *etcdSubnetRegistry) getSubnets(ctx context.Context) ([]Lease, uint64, error) { key := path.Join(esr.etcdCfg.Prefix, "subnets") resp, err := esr.client().Get(ctx, key, &etcd.GetOptions{Recursive: true, Quorum: true}) if err != nil { if etcdErr, ok := err.(etcd.Error); ok && etcdErr.Code == etcd.ErrorCodeKeyNotFound { // key not found: treat it as empty set return []Lease{}, etcdErr.Index, nil } return nil, 0, err } leases := []Lease{} for _, node := range resp.Node.Nodes { l, err := nodeToLease(node) if err != nil { log.Warningf("Ignoring bad subnet node: %v", err) continue } leases = append(leases, *l) } return leases, resp.Index, nil } func (esr *etcdSubnetRegistry) getSubnet(ctx context.Context, sn ip.IP4Net) (*Lease, uint64, error) { key := path.Join(esr.etcdCfg.Prefix, "subnets", MakeSubnetKey(sn)) resp, err := esr.client().Get(ctx, key, &etcd.GetOptions{Quorum: true}) if err != nil { return nil, 0, err } l, err := nodeToLease(resp.Node) return l, resp.Index, err } func (esr *etcdSubnetRegistry) createSubnet(ctx context.Context, sn ip.IP4Net, attrs *LeaseAttrs, ttl time.Duration) (time.Time, error) { key := path.Join(esr.etcdCfg.Prefix, "subnets", MakeSubnetKey(sn)) value, err := json.Marshal(attrs) if err != nil { return time.Time{}, err } opts := &etcd.SetOptions{ PrevExist: etcd.PrevNoExist, TTL: ttl, } resp, err := esr.client().Set(ctx, key, string(value), opts) if err != nil { return time.Time{}, err } exp := time.Time{} if resp.Node.Expiration != nil { exp = *resp.Node.Expiration } return exp, nil } func (esr *etcdSubnetRegistry) updateSubnet(ctx context.Context, sn ip.IP4Net, attrs *LeaseAttrs, ttl time.Duration, asof uint64) (time.Time, error) { key := path.Join(esr.etcdCfg.Prefix, "subnets", MakeSubnetKey(sn)) value, err := json.Marshal(attrs) if err != nil { return time.Time{}, err } resp, err := esr.client().Set(ctx, key, string(value), &etcd.SetOptions{ PrevIndex: asof, TTL: ttl, }) if err != nil { return time.Time{}, err } exp := time.Time{} if resp.Node.Expiration != nil { exp = *resp.Node.Expiration } return exp, nil } func (esr *etcdSubnetRegistry) deleteSubnet(ctx context.Context, sn ip.IP4Net) error { key := path.Join(esr.etcdCfg.Prefix, "subnets", MakeSubnetKey(sn)) _, err := esr.client().Delete(ctx, key, nil) return err } func (esr *etcdSubnetRegistry) watchSubnets(ctx context.Context, since uint64) (Event, uint64, error) { key := path.Join(esr.etcdCfg.Prefix, "subnets") opts := &etcd.WatcherOptions{ AfterIndex: since, Recursive: true, } e, err := esr.client().Watcher(key, opts).Next(ctx) if err != nil { return Event{}, 0, err } evt, err := parseSubnetWatchResponse(e) return evt, e.Node.ModifiedIndex, err } func (esr *etcdSubnetRegistry) watchSubnet(ctx context.Context, since uint64, sn ip.IP4Net) (Event, uint64, error) { key := path.Join(esr.etcdCfg.Prefix, "subnets", MakeSubnetKey(sn)) opts := &etcd.WatcherOptions{ AfterIndex: since, } e, err := esr.client().Watcher(key, opts).Next(ctx) if err != nil { return Event{}, 0, err } evt, err := parseSubnetWatchResponse(e) return evt, e.Node.ModifiedIndex, err } func (esr *etcdSubnetRegistry) client() etcd.KeysAPI { esr.mux.Lock() defer esr.mux.Unlock() return esr.cli } func (esr *etcdSubnetRegistry) resetClient() { esr.mux.Lock() defer esr.mux.Unlock() var err error esr.cli, err = newEtcdClient(esr.etcdCfg) if err != nil { panic(fmt.Errorf("resetClient: error recreating etcd client: %v", err)) } } func parseSubnetWatchResponse(resp *etcd.Response) (Event, error) { sn := ParseSubnetKey(resp.Node.Key) if sn == nil { return Event{}, fmt.Errorf("%v %q: not a subnet, skipping", resp.Action, resp.Node.Key) } switch resp.Action { case "delete", "expire": return Event{ EventRemoved, Lease{Subnet: *sn}, }, nil default: attrs := &LeaseAttrs{} err := json.Unmarshal([]byte(resp.Node.Value), attrs) if err != nil { return Event{}, err } exp := time.Time{} if resp.Node.Expiration != nil { exp = *resp.Node.Expiration } evt := Event{ EventAdded, Lease{ Subnet: *sn, Attrs: *attrs, Expiration: exp, }, } return evt, nil } } func nodeToLease(node *etcd.Node) (*Lease, error) { sn := ParseSubnetKey(node.Key) if sn == nil { return nil, fmt.Errorf("failed to parse subnet key %s", node.Key) } attrs := &LeaseAttrs{} if err := json.Unmarshal([]byte(node.Value), attrs); err != nil { return nil, err } exp := time.Time{} if node.Expiration != nil { exp = *node.Expiration } lease := Lease{ Subnet: *sn, Attrs: *attrs, Expiration: exp, Asof: node.ModifiedIndex, } return &lease, nil }
{ r := &etcdSubnetRegistry{ etcdCfg: config, networkRegex: regexp.MustCompile(config.Prefix + `/([^/]*)(/|/config)?$`), } if cliNewFunc != nil { r.cliNewFunc = cliNewFunc } else { r.cliNewFunc = newEtcdClient } var err error r.cli, err = r.cliNewFunc(config) if err != nil { return nil, err } return r, nil }
TestD3Component.ts
import { Component, OnInit, ElementRef } from '@angular/core'; import { D3Service, D3, Selection } from './../../node_modules/d3-ng2-service'; @Component({ selector: 'app-test-d3', template: '`<p>Hello</p>`', }) export class
implements OnInit { private d3: D3; // <-- Define the private member which will hold the d3 reference private parentNativeElement: any; constructor(element: ElementRef, d3Service: D3Service) { // <-- pass the D3 Service into the constructor this.d3 = d3Service.getD3(); // <-- obtain the d3 object from the D3 Service this.parentNativeElement = element.nativeElement; } ngOnInit() { let d3 = this.d3; // <-- for convenience use a block scope variable let d3ParentElement: Selection<any, any, any, any>; // <-- Use the Selection interface (very basic here for illustration only) // ... if (this.parentNativeElement !== null) { d3ParentElement = d3.select(this.parentNativeElement); // <-- use the D3 select method // Do more D3 things } } }
TestD3Component
utility.py
import numpy as np class UtilInverse(): def __init__(self, verbose=True): self.verbose = verbose def find_nearest_ind(self, array, value): index = [] for ind in range(len(array)-1): if array[ind] < value and array[ind+1] > value: index.append(ind) if array[ind] > value and array[ind+1] < value: index.append(ind) return index def sort_array_by_column(self, array, order=['f0']): bits = 'i8'+',i8'*(len(array[0])-1) array.view(bits).sort(order=order, axis=0) return array class UtilStability(): def __init__(self, verbose=True): self.verbose = verbose def retrieve_extrema(self, w, r): self.check_for_stable_point(w, self.verbose) min_mask = np.r_[True, w[1:] < w[:-1]] & np.r_[w[:-1] < w[1:], True] max_mask = np.r_[True, w[1:] > w[:-1]] & np.r_[w[:-1] > w[1:], True] w_min = w[min_mask] r_min = r[min_mask] w_max = w[max_mask] r_max = r[max_mask] try: if w_min[0] == w[0]: w_min = np.delete(w_min, 0) r_min = np.delete(r_min, 0) if w_max[-1] == w[-1]: w_max = np.delete(w_max, -1) r_max = np.delete(r_max, -1) if self.verbose: print('Simple extremum analysis: ') print('- W has maximum/a at w='+str(w_max.tolist())) print('- W has minimum/a at w='+str(w_min.tolist())) return w_min.tolist(), w_max.tolist(), r_min.tolist(), r_max.tolist() except: return [0], [0], [0], [0] def check_for_stable_point(self, w, exit_if_not_stable=False): ''' Checks if array has at least one minimum and its maximum is only local ''' min_mask = np.r_[True, w[1:] < w[:-1]] & np.r_[w[:-1] < w[1:], True] max_mask = np.r_[True, w[1:] > w[:-1]] & np.r_[w[:-1] > w[1:], True] w_min = w[min_mask] w_max = w[max_mask] ## if w_max[0] == w[0] or w_max[0] == w[1]: ## ''' ## The potentianl comes from +inf, so its not a stable point. ## ''' ## raise ValueError() if len(w_min) < 2 and len(w_max) < 2: ''' The function is monotonically. There is no stable point. ''' self._error_monotonically(exit_if_not_stable) elif len(w_min) < 1 or len(w_max) < 1: ''' The function has either a local maximum OR local minimum, but not both, thus is not stable ''' self._error_only_one_extremum(exit_if_not_stable) elif w_max[0] > w_max[1]: ''' The potential is not closed, there is no Roche limit. Matter will extend into infitiy. ''' self._error_no_roche_limit(exit_if_not_stable) elif self.verbose and len(w_min) > 1 and len(w_max) > 1: print('Potential is possibly stable') return 0 def closure_rating_function(self, w, r): wmin, wmax, rmin, rmax = self.retrieve_extrema(w, r) int_l = np.where(r == rmax[0])[0][0] int_r = np.where(w > wmax[0])[0][0] area_func = abs(w[int_l:int_r] - wmax[-1]) area = np.trapz(area_func) return area def _error_monotonically(self, flag): if flag: raise ValueError('Potential not closed, potential is monotonically.') else: if self.verbose: print('WARNING: Potential not closed, potential is monotonically.') def _error_only_one_extremum(self, flag): if flag: raise ValueError('Potential not closed, only has one extremum.') else: if self.verbose: print('WARNING: Potential not closed, only has one extremum.') def _error_no_roche_limit(self, flag): if flag: raise ValueError('Potential is not closed, matter extends into infinity.') else: if self.verbose:
print('WARNING: Potential not close, no Roche limit.')
noop_log.go
package log type noop struct{} // NewNoop creates a no-op logger that can be used to silence // all logging from this library. Also useful in tests. func NewNoop() Logger
// Debug log message no-op func (n *noop) Debug(msg ...interface{}) {} // Info log message no-op func (n *noop) Info(msg ...interface{}) {} // Warn log message no-op func (n *noop) Warn(msg ...interface{}) {} // Error log message no-op func (n *noop) Error(msg ...interface{}) {} // Fatal log message no-op func (n *noop) Fatal(msg ...interface{}) {} // Panic log message no-op func (n *noop) Panic(msg ...interface{}) {} // Debugf log message with formatting no-op func (n *noop) Debugf(format string, args ...interface{}) {} // Infof log message with formatting no-op func (n *noop) Infof(format string, args ...interface{}) {} // Warnf log message with formatting no-op func (n *noop) Warnf(format string, args ...interface{}) {} // Errorf log message with formatting no-op func (n *noop) Errorf(format string, args ...interface{}) {} // Fatalf log message with formatting no-op func (n *noop) Fatalf(format string, args ...interface{}) {} // Panicf log message with formatting no-op func (n *noop) Panicf(format string, args ...interface{}) {} // WithFields no-op func (n *noop) WithFields(fields Fields) Logger { return n }
{ return &noop{} }
tasks_constlat.rs
#[doc = "Register `TASKS_CONSTLAT` writer"] pub struct W(crate::W<TASKS_CONSTLAT_SPEC>); impl core::ops::Deref for W { type Target = crate::W<TASKS_CONSTLAT_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::ops::DerefMut for W { #[inline(always)] fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl From<crate::W<TASKS_CONSTLAT_SPEC>> for W { #[inline(always)] fn from(writer: crate::W<TASKS_CONSTLAT_SPEC>) -> Self { W(writer)
} impl W { #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.0.bits(bits); self } } #[doc = "Enable constant latency mode.\n\nThis register you can [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [tasks_constlat](index.html) module"] pub struct TASKS_CONSTLAT_SPEC; impl crate::RegisterSpec for TASKS_CONSTLAT_SPEC { type Ux = u32; } #[doc = "`write(|w| ..)` method takes [tasks_constlat::W](W) writer structure"] impl crate::Writable for TASKS_CONSTLAT_SPEC { type Writer = W; } #[doc = "`reset()` method sets TASKS_CONSTLAT to value 0"] impl crate::Resettable for TASKS_CONSTLAT_SPEC { #[inline(always)] fn reset_value() -> Self::Ux { 0 } }
}
noormdropdb.js
const noOrmDropDb = `import db from '../models/setup'; db.query('drop database users', (err, result) => { if (err) { console.log("The Error", error); } console.log('Successfully dropped db') }); `; module.exports = noOrmDropDb;
util.rs
use crate::data_store::DataStore; use colored::*; use indoc::eprintdoc; use log::{Level, LevelFilter}; use serde::{de::DeserializeOwned, Serialize}; use shared::{interface_config::ServerInfo, PeerDiff, INNERNET_PUBKEY_HEADER, Interface}; use std::{io, path::Path, time::Duration, ffi::OsStr}; use ureq::{Agent, AgentBuilder}; static LOGGER: Logger = Logger; struct Logger; const BASE_MODULES: &[&str] = &["innernet", "shared"]; fn target_is_base(target: &str) -> bool { BASE_MODULES .iter() .any(|module| module == &target || target.starts_with(&format!("{}::", module))) } impl log::Log for Logger { fn enabled(&self, metadata: &log::Metadata) -> bool { metadata.level() <= log::max_level() && (log::max_level() == LevelFilter::Trace || target_is_base(metadata.target())) } fn log(&self, record: &log::Record) { if self.enabled(record.metadata()) { let level_str = match record.level() { Level::Error => "[E]".red(), Level::Warn => "[!]".yellow(), Level::Info => "[*]".dimmed(), Level::Debug => "[D]".blue(), Level::Trace => "[T]".purple(), }; if record.level() <= LevelFilter::Debug && !target_is_base(record.target()) { println!( "{} {} {}", level_str, format!("[{}]", record.target()).dimmed(), record.args() ); } else { println!("{} {}", level_str, record.args()); } } } fn flush(&self) {} } pub fn init_logger(verbosity: u64) { let level = match verbosity { 0 => log::LevelFilter::Info, 1 => log::LevelFilter::Debug, _ => log::LevelFilter::Trace, }; log::set_max_level(level); log::set_logger(&LOGGER).unwrap(); } pub fn human_duration(duration: Duration) -> String { match duration.as_secs() { n if n < 1 => "just now".cyan().to_string(), n if n < 60 => format!("{} {} ago", n, "seconds".cyan()), n if n < 60 * 60 => { let mins = n / 60; let secs = n % 60; format!( "{} {}, {} {} ago", mins, if mins == 1 { "minute" } else { "minutes" }.cyan(), secs, if secs == 1 { "second" } else { "seconds" }.cyan(), ) }, n => { let hours = n / (60 * 60); let mins = (n / 60) % 60; format!( "{} {}, {} {} ago", hours, if hours == 1 { "hour" } else { "hours" }.cyan(), mins, if mins == 1 { "minute" } else { "minutes" }.cyan(), ) }, } } pub fn
(bytes: u64) -> String { const KB: u64 = 1024; const MB: u64 = 1024 * KB; const GB: u64 = 1024 * MB; const TB: u64 = 1024 * GB; match bytes { n if n < 2 * KB => format!("{} {}", n, "B".cyan()), n if n < 2 * MB => format!("{:.2} {}", n as f64 / KB as f64, "KiB".cyan()), n if n < 2 * GB => format!("{:.2} {}", n as f64 / MB as f64, "MiB".cyan()), n if n < 2 * TB => format!("{:.2} {}", n as f64 / GB as f64, "GiB".cyan()), n => format!("{:.2} {}", n as f64 / TB as f64, "TiB".cyan()), } } pub fn permissions_helptext(config_dir: &Path, data_dir: &Path, e: &io::Error) { if e.raw_os_error() == Some(1) { let current_exe = std::env::current_exe() .ok() .map(|s| s.to_string_lossy().to_string()) .unwrap_or_else(|| "<innernet path>".into()); eprintdoc!( "{}: innernet can't access the device info. You either need to run innernet as root, or give innernet CAP_NET_ADMIN capabilities: sudo setcap cap_net_admin+eip {} ", "ERROR".bold().red(), current_exe ); } else if e.kind() == io::ErrorKind::PermissionDenied { eprintdoc!( "{}: innernet can't access its config/data folders. You either need to run innernet as root, or give the user/group running innernet permissions to access {config} and {data}. For non-root permissions, it's recommended to create an \"innernet\" group, and run for example: sudo chgrp -R innernet {config} {data} sudo chmod -R g+rwX {config} {data} ", "ERROR".bold().red(), config = config_dir.to_string_lossy(), data = data_dir.to_string_lossy(), ); } } pub fn print_peer_diff(store: &DataStore, diff: &PeerDiff) { let public_key = diff.public_key().to_base64(); let text = match (diff.old, diff.new) { (None, Some(_)) => "added".green(), (Some(_), Some(_)) => "modified".yellow(), (Some(_), None) => "removed".red(), _ => unreachable!("PeerDiff can't be None -> None"), }; // Grab the peer name from either the new data, or the historical data (if the peer is removed). let peer_hostname = match diff.new { Some(peer) => Some(peer.name.clone()), None => store .peers() .iter() .find(|p| p.public_key == public_key) .map(|p| p.name.clone()), }; let peer_name = peer_hostname.as_deref().unwrap_or("[unknown]"); log::info!( " peer {} ({}...) was {}.", peer_name.yellow(), &public_key[..10].dimmed(), text ); for change in diff.changes() { log::debug!(" {}", change); } } pub fn all_installed(config_dir: &Path) -> Result<Vec<Interface>, std::io::Error> { // All errors are bubbled up when enumerating a directory let entries: Vec<_> = std::fs::read_dir(config_dir)? .into_iter() .collect::<Result<_, _>>()?; let installed: Vec<_> = entries.into_iter() .filter(|entry| match entry.file_type() { Ok(f) => f.is_file(), _ => false }) .filter_map(|entry| { let path = entry.path(); match (path.extension(), path.file_stem()) { (Some(extension), Some(stem)) if extension == OsStr::new("conf") => { Some(stem.to_string_lossy().to_string()) } _ => None } }) .map(|name| name.parse()) .collect::<Result<_, _>>()?; Ok(installed) } pub struct Api<'a> { agent: Agent, server: &'a ServerInfo, } impl<'a> Api<'a> { pub fn new(server: &'a ServerInfo) -> Self { let agent = AgentBuilder::new() .timeout(Duration::from_secs(5)) .redirects(0) .build(); Self { agent, server } } pub fn http<T: DeserializeOwned>(&self, verb: &str, endpoint: &str) -> Result<T, ureq::Error> { self.request::<(), _>(verb, endpoint, None) } pub fn http_form<S: Serialize, T: DeserializeOwned>( &self, verb: &str, endpoint: &str, form: S, ) -> Result<T, ureq::Error> { self.request(verb, endpoint, Some(form)) } fn request<S: Serialize, T: DeserializeOwned>( &self, verb: &str, endpoint: &str, form: Option<S>, ) -> Result<T, ureq::Error> { let request = self .agent .request( verb, &format!("http://{}/v1{}", self.server.internal_endpoint, endpoint), ) .set(INNERNET_PUBKEY_HEADER, &self.server.public_key); let response = if let Some(form) = form { request.send_json(serde_json::to_value(form).map_err(|e| { io::Error::new( io::ErrorKind::InvalidData, format!("failed to serialize JSON request: {}", e), ) })?)? } else { request.call()? }; let mut response = response.into_string()?; // A little trick for serde to parse an empty response as `()`. if response.is_empty() { response = "null".into(); } Ok(serde_json::from_str(&response).map_err(|e| { io::Error::new( io::ErrorKind::InvalidData, format!( "failed to deserialize JSON response from the server: {}, response={}", e, &response ), ) })?) } }
human_size
main08.rs
use chrono::prelude::*; use clap::{App, Arg}; use hdrhistogram::Histogram; use serde::{Deserialize, Serialize}; use std::error::Error; // ロケーションIDを示すLocId型をu16型の別名にする type LocId = u16; // NaiveDateTimeは長いのでDTという別名を定義する type DT = NaiveDateTime; // chrono::NaiveDateTimeはタイムゾーンなしの日時型 // ついでにResult型の別名を定義する type AppResult<T> = Result<T, Box<dyn Error>>; // Debugトレイトとserde::Deserializeトレイトを自動導出する #[derive(Debug, Deserialize)] struct Trip { // renameアトリビュートでフィールド名とCSVのカラム名を結びつける #[serde(rename = "tpep_pickup_datetime")] pickup_datetime: String, // 乗車日時 #[serde(rename = "tpep_dropoff_datetime")] dropoff_datetime: String, // 降車日時 #[serde(rename = "PULocationID")] pickup_loc: LocId, // 乗車地ID #[serde(rename = "DOLocationID")] dropoff_loc: LocId, // 降車地ID } // serde_jsonでJSON文字列を生成するためにSerializeを自動導出する #[derive(Debug, Serialize)] struct RecordCounts { read: u32, // CSVファイルから読み込んだ総レコード数 matched: u32, // 乗車地や降車地などの条件を満たしたレコードの数 skipped: u32, // 条件は満たしたが異常値により除外したレコードの数 } impl Default for RecordCounts { fn default() -> Self { Self { read: 0, // read: u32::default(), としてもよい matched: 0, skipped: 0, } } } // CSVファイルのパスを引数にとり、データを分析する fn analyze(infile: &str) -> Result<String, Box<dyn Error>> { // CSVリーダーを作る。失敗したときは?後置演算子の働きにより、analyze()関数から // すぐにリターンし、処理の失敗を表すResult::Errを返す let mut reader = csv::Reader::from_path(infile)?; let mut rec_counts = RecordCounts::default(); let mut hist = DurationHistograms::new()?; for (i, result) in reader.deserialize().enumerate() { // どの型へデシリアライズするかをdeserialize()メソッドに教えるために // trip変数に型アノテーションをつける let trip: Trip = result?; rec_counts.read += 1; if is_jfk_airport(trip.dropoff_loc) && is_in_midtown(trip.pickup_loc) { let pickup = parse_datetime(&trip.pickup_datetime)?; if is_weekday(pickup) { rec_counts.matched += 1; let dropoff = parse_datetime(&trip.dropoff_datetime)?; hist.record_duration(pickup, dropoff) .unwrap_or_else(|e| { eprintln!("WARN: {} - {}. Skipped: {:?}", i + 2, e, trip); rec_counts.skipped += 1; }); } } } // 読み込んだレコード数を表示する println!("{:?}", rec_counts); // フォーマット文字列を修正する // 処理に成功したので(とりあえず空の文字列を包んだ)Result::Okを返す Ok(String::default()) } fn main() { // clap::Appを使ってコマンドライン名やバージョンなどを設定する let arg_matches = App::new("trip-analyzer") .version("1.0") .about("Analyze yellow cab trip records") // INFILEという名前のコマンドライン引数を登録する .arg(Arg::with_name("INFILE") .help("Sets the input CSV file") .index(1) // 最初の引数 .required(true) // 引数を必須にする ) // get_matches()メソッドを呼ぶとユーザが与えたコマンドライン引数が // パースされる // ユーザが必須のコマンドライン引数を与えなかったときは、この時点で // エラーメッセージが表示され、プログラムが終了する .get_matches(); // INFILEは必須のためSome(..)しか返らない。必ずunwrap()できる let infile = arg_matches.value_of("INFILE").unwrap(); match analyze(infile) { Ok(json) => println!("{}", json), // 標準出力にJSON文字列を表示する Err(e) => { eprintln!("Error: {}", e); // 標準エラー出力にエラーを表示する std::process::exit(1); // ステータスコード1でプログラムを終了する } } } // 日時を表す文字列をDT型へ変換する fn parse_datetime(s: &str) -> AppResult<DT> { DT::parse_from_str(s, "%Y-%m-%d %H:%M:%S").map_err(|e| e.into()) } // LocIdがミッドタウン内ならtrueを返す fn is_in_midtown(loc: LocId) -> bool { // LocIdの配列を作る let locations = [90, 100, 161, 162, 163, 164, 186, 230, 234]; // 配列に対してバイナリサーチする。locと同じ値があればOk(値のインデックス)が返る locations.binary_search(&loc).is_ok() } // ロケーションIDがJFK国際空港ならtrueを返す fn is_jfk_airport(loc: LocId) -> bool { loc == 132 } // 月曜から金曜なら`true`を返す fn is_weekday(datetime: DT) -> bool { // 月:1, 火:2, .. 金:5, 土:6, 日:7 datetime.weekday().number_from_monday() <= 5 } // DurationHistogramsをタプル構造体として定義する // この構造体はHistogramを24個持つことで、1時間刻みの時間帯ごとに所要時間の // ヒストグラムデータを追跡する struct DurationHistograms(Vec<Histogram<u64>>); // Vec<T>型は配列の一種 // 関連関数やメソッドを実装するためにimplブロックを作る impl DurationHistograms { // Histogramsを初期化する関連関数。記録する上限値を引数にとる fn new() -> AppResult<Self> { let lower_bound = 1; // 記録する下限値。1秒 let upper_bound = 3 * 60 * 60; // 記録する上限値。3時間 let hist = Histogram::new_with_bounds(lower_bound, upper_bound, 3) .map_err(|e| format!("{:?}", e))?; // histの値を24回複製してVec<T>配列に収集する let histograms = std::iter::repeat(hist).take(24).collect(); Ok(Self(histograms)) } fn record_duration(&mut self, pickup: DT, dropoff: DT) -> AppResult<()> { // 所要時間を秒で求める。結果はi64型になるが as u64 でu64型に変換する let duration = (dropoff - pickup).num_seconds() as u64; // record_duration()メソッドの続き // 20分未満はエラーにする if duration < 20 * 60 { Err(format!("duration secs {} is too short.", duration).into()) } else { let hour = pickup.hour() as usize; // タプル構造体の最初のフィールドの名前は0になるので、self.0でVec<Histogram>に // アクセスできる。さら
するには [インデックス] で // その要素のインデックスを指定する self.0[hour] // Histogramのrecord()メソッドで所要時間を記録する .record(duration) // このメソッドはHistogramの作成時に設定した上限(upper_bound)を超えていると // Err(RecordError)を返すので、map_err()でErr(String)に変換する .map_err(|e| { format!("duration secs {} is too long. {:?}", duration, e).into() }) } } }
に個々のHistogramにアクセス
deserializers.go
// Code generated by smithy-go-codegen DO NOT EDIT. package appstream import ( "bytes" "context" "encoding/json" "fmt" "github.com/aws/aws-sdk-go-v2/aws/protocol/restjson" "github.com/aws/aws-sdk-go-v2/service/appstream/types" smithy "github.com/aws/smithy-go" smithyio "github.com/aws/smithy-go/io" "github.com/aws/smithy-go/middleware" "github.com/aws/smithy-go/ptr" smithytime "github.com/aws/smithy-go/time" smithyhttp "github.com/aws/smithy-go/transport/http" "io" "strings" ) type awsAwsjson11_deserializeOpAssociateApplicationFleet struct { } func (*awsAwsjson11_deserializeOpAssociateApplicationFleet) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpAssociateApplicationFleet) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorAssociateApplicationFleet(response, &metadata) } output := &AssociateApplicationFleetOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentAssociateApplicationFleetOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorAssociateApplicationFleet(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ConcurrentModificationException", errorCode): return awsAwsjson11_deserializeErrorConcurrentModificationException(response, errorBody) case strings.EqualFold("InvalidParameterCombinationException", errorCode): return awsAwsjson11_deserializeErrorInvalidParameterCombinationException(response, errorBody) case strings.EqualFold("LimitExceededException", errorCode): return awsAwsjson11_deserializeErrorLimitExceededException(response, errorBody) case strings.EqualFold("OperationNotPermittedException", errorCode): return awsAwsjson11_deserializeErrorOperationNotPermittedException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpAssociateFleet struct { } func (*awsAwsjson11_deserializeOpAssociateFleet) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpAssociateFleet) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorAssociateFleet(response, &metadata) } output := &AssociateFleetOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentAssociateFleetOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorAssociateFleet(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ConcurrentModificationException", errorCode): return awsAwsjson11_deserializeErrorConcurrentModificationException(response, errorBody) case strings.EqualFold("IncompatibleImageException", errorCode): return awsAwsjson11_deserializeErrorIncompatibleImageException(response, errorBody) case strings.EqualFold("InvalidAccountStatusException", errorCode): return awsAwsjson11_deserializeErrorInvalidAccountStatusException(response, errorBody) case strings.EqualFold("LimitExceededException", errorCode): return awsAwsjson11_deserializeErrorLimitExceededException(response, errorBody) case strings.EqualFold("OperationNotPermittedException", errorCode): return awsAwsjson11_deserializeErrorOperationNotPermittedException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpBatchAssociateUserStack struct { } func (*awsAwsjson11_deserializeOpBatchAssociateUserStack) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpBatchAssociateUserStack) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorBatchAssociateUserStack(response, &metadata) } output := &BatchAssociateUserStackOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentBatchAssociateUserStackOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorBatchAssociateUserStack(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("InvalidParameterCombinationException", errorCode): return awsAwsjson11_deserializeErrorInvalidParameterCombinationException(response, errorBody) case strings.EqualFold("OperationNotPermittedException", errorCode): return awsAwsjson11_deserializeErrorOperationNotPermittedException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpBatchDisassociateUserStack struct { } func (*awsAwsjson11_deserializeOpBatchDisassociateUserStack) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpBatchDisassociateUserStack) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorBatchDisassociateUserStack(response, &metadata) } output := &BatchDisassociateUserStackOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentBatchDisassociateUserStackOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorBatchDisassociateUserStack(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("InvalidParameterCombinationException", errorCode): return awsAwsjson11_deserializeErrorInvalidParameterCombinationException(response, errorBody) case strings.EqualFold("OperationNotPermittedException", errorCode): return awsAwsjson11_deserializeErrorOperationNotPermittedException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpCopyImage struct { } func (*awsAwsjson11_deserializeOpCopyImage) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpCopyImage) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorCopyImage(response, &metadata) } output := &CopyImageOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentCopyImageOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorCopyImage(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("IncompatibleImageException", errorCode): return awsAwsjson11_deserializeErrorIncompatibleImageException(response, errorBody) case strings.EqualFold("InvalidAccountStatusException", errorCode): return awsAwsjson11_deserializeErrorInvalidAccountStatusException(response, errorBody) case strings.EqualFold("LimitExceededException", errorCode): return awsAwsjson11_deserializeErrorLimitExceededException(response, errorBody) case strings.EqualFold("ResourceAlreadyExistsException", errorCode): return awsAwsjson11_deserializeErrorResourceAlreadyExistsException(response, errorBody) case strings.EqualFold("ResourceNotAvailableException", errorCode): return awsAwsjson11_deserializeErrorResourceNotAvailableException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpCreateAppBlock struct { } func (*awsAwsjson11_deserializeOpCreateAppBlock) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpCreateAppBlock) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorCreateAppBlock(response, &metadata) } output := &CreateAppBlockOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentCreateAppBlockOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorCreateAppBlock(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ConcurrentModificationException", errorCode): return awsAwsjson11_deserializeErrorConcurrentModificationException(response, errorBody) case strings.EqualFold("LimitExceededException", errorCode): return awsAwsjson11_deserializeErrorLimitExceededException(response, errorBody) case strings.EqualFold("OperationNotPermittedException", errorCode): return awsAwsjson11_deserializeErrorOperationNotPermittedException(response, errorBody) case strings.EqualFold("ResourceAlreadyExistsException", errorCode): return awsAwsjson11_deserializeErrorResourceAlreadyExistsException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpCreateApplication struct { } func (*awsAwsjson11_deserializeOpCreateApplication) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpCreateApplication) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorCreateApplication(response, &metadata) } output := &CreateApplicationOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentCreateApplicationOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorCreateApplication(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ConcurrentModificationException", errorCode): return awsAwsjson11_deserializeErrorConcurrentModificationException(response, errorBody) case strings.EqualFold("LimitExceededException", errorCode): return awsAwsjson11_deserializeErrorLimitExceededException(response, errorBody) case strings.EqualFold("OperationNotPermittedException", errorCode): return awsAwsjson11_deserializeErrorOperationNotPermittedException(response, errorBody) case strings.EqualFold("ResourceAlreadyExistsException", errorCode): return awsAwsjson11_deserializeErrorResourceAlreadyExistsException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpCreateDirectoryConfig struct { } func (*awsAwsjson11_deserializeOpCreateDirectoryConfig) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpCreateDirectoryConfig) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorCreateDirectoryConfig(response, &metadata) } output := &CreateDirectoryConfigOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentCreateDirectoryConfigOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorCreateDirectoryConfig(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("InvalidAccountStatusException", errorCode): return awsAwsjson11_deserializeErrorInvalidAccountStatusException(response, errorBody) case strings.EqualFold("InvalidRoleException", errorCode): return awsAwsjson11_deserializeErrorInvalidRoleException(response, errorBody) case strings.EqualFold("LimitExceededException", errorCode): return awsAwsjson11_deserializeErrorLimitExceededException(response, errorBody) case strings.EqualFold("OperationNotPermittedException", errorCode): return awsAwsjson11_deserializeErrorOperationNotPermittedException(response, errorBody) case strings.EqualFold("ResourceAlreadyExistsException", errorCode): return awsAwsjson11_deserializeErrorResourceAlreadyExistsException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpCreateFleet struct { } func (*awsAwsjson11_deserializeOpCreateFleet) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpCreateFleet) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorCreateFleet(response, &metadata) } output := &CreateFleetOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentCreateFleetOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorCreateFleet(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ConcurrentModificationException", errorCode): return awsAwsjson11_deserializeErrorConcurrentModificationException(response, errorBody) case strings.EqualFold("IncompatibleImageException", errorCode): return awsAwsjson11_deserializeErrorIncompatibleImageException(response, errorBody) case strings.EqualFold("InvalidAccountStatusException", errorCode): return awsAwsjson11_deserializeErrorInvalidAccountStatusException(response, errorBody) case strings.EqualFold("InvalidParameterCombinationException", errorCode): return awsAwsjson11_deserializeErrorInvalidParameterCombinationException(response, errorBody) case strings.EqualFold("InvalidRoleException", errorCode): return awsAwsjson11_deserializeErrorInvalidRoleException(response, errorBody) case strings.EqualFold("LimitExceededException", errorCode): return awsAwsjson11_deserializeErrorLimitExceededException(response, errorBody) case strings.EqualFold("OperationNotPermittedException", errorCode): return awsAwsjson11_deserializeErrorOperationNotPermittedException(response, errorBody) case strings.EqualFold("RequestLimitExceededException", errorCode): return awsAwsjson11_deserializeErrorRequestLimitExceededException(response, errorBody) case strings.EqualFold("ResourceAlreadyExistsException", errorCode): return awsAwsjson11_deserializeErrorResourceAlreadyExistsException(response, errorBody) case strings.EqualFold("ResourceNotAvailableException", errorCode): return awsAwsjson11_deserializeErrorResourceNotAvailableException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpCreateImageBuilder struct { } func (*awsAwsjson11_deserializeOpCreateImageBuilder) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpCreateImageBuilder) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorCreateImageBuilder(response, &metadata) } output := &CreateImageBuilderOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentCreateImageBuilderOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorCreateImageBuilder(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ConcurrentModificationException", errorCode): return awsAwsjson11_deserializeErrorConcurrentModificationException(response, errorBody) case strings.EqualFold("IncompatibleImageException", errorCode): return awsAwsjson11_deserializeErrorIncompatibleImageException(response, errorBody) case strings.EqualFold("InvalidAccountStatusException", errorCode): return awsAwsjson11_deserializeErrorInvalidAccountStatusException(response, errorBody) case strings.EqualFold("InvalidParameterCombinationException", errorCode): return awsAwsjson11_deserializeErrorInvalidParameterCombinationException(response, errorBody) case strings.EqualFold("InvalidRoleException", errorCode): return awsAwsjson11_deserializeErrorInvalidRoleException(response, errorBody) case strings.EqualFold("LimitExceededException", errorCode): return awsAwsjson11_deserializeErrorLimitExceededException(response, errorBody) case strings.EqualFold("OperationNotPermittedException", errorCode): return awsAwsjson11_deserializeErrorOperationNotPermittedException(response, errorBody) case strings.EqualFold("RequestLimitExceededException", errorCode): return awsAwsjson11_deserializeErrorRequestLimitExceededException(response, errorBody) case strings.EqualFold("ResourceAlreadyExistsException", errorCode): return awsAwsjson11_deserializeErrorResourceAlreadyExistsException(response, errorBody) case strings.EqualFold("ResourceNotAvailableException", errorCode): return awsAwsjson11_deserializeErrorResourceNotAvailableException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpCreateImageBuilderStreamingURL struct { } func (*awsAwsjson11_deserializeOpCreateImageBuilderStreamingURL) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpCreateImageBuilderStreamingURL) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorCreateImageBuilderStreamingURL(response, &metadata) } output := &CreateImageBuilderStreamingURLOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentCreateImageBuilderStreamingURLOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorCreateImageBuilderStreamingURL(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("OperationNotPermittedException", errorCode): return awsAwsjson11_deserializeErrorOperationNotPermittedException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpCreateStack struct { } func (*awsAwsjson11_deserializeOpCreateStack) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpCreateStack) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorCreateStack(response, &metadata) } output := &CreateStackOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentCreateStackOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorCreateStack(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ConcurrentModificationException", errorCode): return awsAwsjson11_deserializeErrorConcurrentModificationException(response, errorBody) case strings.EqualFold("InvalidAccountStatusException", errorCode): return awsAwsjson11_deserializeErrorInvalidAccountStatusException(response, errorBody) case strings.EqualFold("InvalidParameterCombinationException", errorCode): return awsAwsjson11_deserializeErrorInvalidParameterCombinationException(response, errorBody) case strings.EqualFold("InvalidRoleException", errorCode): return awsAwsjson11_deserializeErrorInvalidRoleException(response, errorBody) case strings.EqualFold("LimitExceededException", errorCode): return awsAwsjson11_deserializeErrorLimitExceededException(response, errorBody) case strings.EqualFold("ResourceAlreadyExistsException", errorCode): return awsAwsjson11_deserializeErrorResourceAlreadyExistsException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpCreateStreamingURL struct { } func (*awsAwsjson11_deserializeOpCreateStreamingURL) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpCreateStreamingURL) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorCreateStreamingURL(response, &metadata) } output := &CreateStreamingURLOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentCreateStreamingURLOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorCreateStreamingURL(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("InvalidParameterCombinationException", errorCode): return awsAwsjson11_deserializeErrorInvalidParameterCombinationException(response, errorBody) case strings.EqualFold("OperationNotPermittedException", errorCode): return awsAwsjson11_deserializeErrorOperationNotPermittedException(response, errorBody) case strings.EqualFold("ResourceNotAvailableException", errorCode): return awsAwsjson11_deserializeErrorResourceNotAvailableException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpCreateUpdatedImage struct { } func (*awsAwsjson11_deserializeOpCreateUpdatedImage) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpCreateUpdatedImage) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorCreateUpdatedImage(response, &metadata) } output := &CreateUpdatedImageOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentCreateUpdatedImageOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorCreateUpdatedImage(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ConcurrentModificationException", errorCode): return awsAwsjson11_deserializeErrorConcurrentModificationException(response, errorBody) case strings.EqualFold("IncompatibleImageException", errorCode): return awsAwsjson11_deserializeErrorIncompatibleImageException(response, errorBody) case strings.EqualFold("InvalidAccountStatusException", errorCode): return awsAwsjson11_deserializeErrorInvalidAccountStatusException(response, errorBody) case strings.EqualFold("LimitExceededException", errorCode): return awsAwsjson11_deserializeErrorLimitExceededException(response, errorBody) case strings.EqualFold("OperationNotPermittedException", errorCode): return awsAwsjson11_deserializeErrorOperationNotPermittedException(response, errorBody) case strings.EqualFold("ResourceAlreadyExistsException", errorCode): return awsAwsjson11_deserializeErrorResourceAlreadyExistsException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpCreateUsageReportSubscription struct { } func (*awsAwsjson11_deserializeOpCreateUsageReportSubscription) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpCreateUsageReportSubscription) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorCreateUsageReportSubscription(response, &metadata) } output := &CreateUsageReportSubscriptionOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentCreateUsageReportSubscriptionOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorCreateUsageReportSubscription(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("InvalidAccountStatusException", errorCode): return awsAwsjson11_deserializeErrorInvalidAccountStatusException(response, errorBody) case strings.EqualFold("InvalidRoleException", errorCode): return awsAwsjson11_deserializeErrorInvalidRoleException(response, errorBody) case strings.EqualFold("LimitExceededException", errorCode): return awsAwsjson11_deserializeErrorLimitExceededException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpCreateUser struct { } func (*awsAwsjson11_deserializeOpCreateUser) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpCreateUser) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorCreateUser(response, &metadata) } output := &CreateUserOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentCreateUserOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorCreateUser(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("InvalidAccountStatusException", errorCode): return awsAwsjson11_deserializeErrorInvalidAccountStatusException(response, errorBody) case strings.EqualFold("InvalidParameterCombinationException", errorCode): return awsAwsjson11_deserializeErrorInvalidParameterCombinationException(response, errorBody) case strings.EqualFold("LimitExceededException", errorCode): return awsAwsjson11_deserializeErrorLimitExceededException(response, errorBody) case strings.EqualFold("OperationNotPermittedException", errorCode): return awsAwsjson11_deserializeErrorOperationNotPermittedException(response, errorBody) case strings.EqualFold("ResourceAlreadyExistsException", errorCode): return awsAwsjson11_deserializeErrorResourceAlreadyExistsException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpDeleteAppBlock struct { } func (*awsAwsjson11_deserializeOpDeleteAppBlock) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpDeleteAppBlock) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorDeleteAppBlock(response, &metadata) } output := &DeleteAppBlockOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentDeleteAppBlockOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorDeleteAppBlock(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ConcurrentModificationException", errorCode): return awsAwsjson11_deserializeErrorConcurrentModificationException(response, errorBody) case strings.EqualFold("ResourceInUseException", errorCode): return awsAwsjson11_deserializeErrorResourceInUseException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpDeleteApplication struct { } func (*awsAwsjson11_deserializeOpDeleteApplication) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpDeleteApplication) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorDeleteApplication(response, &metadata) } output := &DeleteApplicationOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentDeleteApplicationOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorDeleteApplication(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ConcurrentModificationException", errorCode): return awsAwsjson11_deserializeErrorConcurrentModificationException(response, errorBody) case strings.EqualFold("OperationNotPermittedException", errorCode): return awsAwsjson11_deserializeErrorOperationNotPermittedException(response, errorBody) case strings.EqualFold("ResourceInUseException", errorCode): return awsAwsjson11_deserializeErrorResourceInUseException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpDeleteDirectoryConfig struct { } func (*awsAwsjson11_deserializeOpDeleteDirectoryConfig) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpDeleteDirectoryConfig) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorDeleteDirectoryConfig(response, &metadata) } output := &DeleteDirectoryConfigOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentDeleteDirectoryConfigOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorDeleteDirectoryConfig(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ResourceInUseException", errorCode): return awsAwsjson11_deserializeErrorResourceInUseException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpDeleteFleet struct { } func (*awsAwsjson11_deserializeOpDeleteFleet) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpDeleteFleet) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorDeleteFleet(response, &metadata) } output := &DeleteFleetOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentDeleteFleetOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorDeleteFleet(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ConcurrentModificationException", errorCode): return awsAwsjson11_deserializeErrorConcurrentModificationException(response, errorBody) case strings.EqualFold("ResourceInUseException", errorCode): return awsAwsjson11_deserializeErrorResourceInUseException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpDeleteImage struct { } func (*awsAwsjson11_deserializeOpDeleteImage) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpDeleteImage) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorDeleteImage(response, &metadata) } output := &DeleteImageOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentDeleteImageOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorDeleteImage(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ConcurrentModificationException", errorCode): return awsAwsjson11_deserializeErrorConcurrentModificationException(response, errorBody) case strings.EqualFold("OperationNotPermittedException", errorCode): return awsAwsjson11_deserializeErrorOperationNotPermittedException(response, errorBody) case strings.EqualFold("ResourceInUseException", errorCode): return awsAwsjson11_deserializeErrorResourceInUseException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpDeleteImageBuilder struct { } func (*awsAwsjson11_deserializeOpDeleteImageBuilder) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpDeleteImageBuilder) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorDeleteImageBuilder(response, &metadata) } output := &DeleteImageBuilderOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentDeleteImageBuilderOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorDeleteImageBuilder(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ConcurrentModificationException", errorCode): return awsAwsjson11_deserializeErrorConcurrentModificationException(response, errorBody) case strings.EqualFold("OperationNotPermittedException", errorCode): return awsAwsjson11_deserializeErrorOperationNotPermittedException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpDeleteImagePermissions struct { } func (*awsAwsjson11_deserializeOpDeleteImagePermissions) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpDeleteImagePermissions) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorDeleteImagePermissions(response, &metadata) } output := &DeleteImagePermissionsOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentDeleteImagePermissionsOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorDeleteImagePermissions(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ResourceNotAvailableException", errorCode): return awsAwsjson11_deserializeErrorResourceNotAvailableException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpDeleteStack struct { } func (*awsAwsjson11_deserializeOpDeleteStack) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpDeleteStack) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorDeleteStack(response, &metadata) } output := &DeleteStackOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentDeleteStackOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorDeleteStack(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ConcurrentModificationException", errorCode): return awsAwsjson11_deserializeErrorConcurrentModificationException(response, errorBody) case strings.EqualFold("ResourceInUseException", errorCode): return awsAwsjson11_deserializeErrorResourceInUseException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpDeleteUsageReportSubscription struct { } func (*awsAwsjson11_deserializeOpDeleteUsageReportSubscription) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpDeleteUsageReportSubscription) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorDeleteUsageReportSubscription(response, &metadata) } output := &DeleteUsageReportSubscriptionOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentDeleteUsageReportSubscriptionOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorDeleteUsageReportSubscription(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("InvalidAccountStatusException", errorCode): return awsAwsjson11_deserializeErrorInvalidAccountStatusException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpDeleteUser struct { } func (*awsAwsjson11_deserializeOpDeleteUser) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpDeleteUser) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorDeleteUser(response, &metadata) } output := &DeleteUserOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentDeleteUserOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorDeleteUser(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpDescribeAppBlocks struct { } func (*awsAwsjson11_deserializeOpDescribeAppBlocks) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpDescribeAppBlocks) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorDescribeAppBlocks(response, &metadata) } output := &DescribeAppBlocksOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentDescribeAppBlocksOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorDescribeAppBlocks(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("OperationNotPermittedException", errorCode): return awsAwsjson11_deserializeErrorOperationNotPermittedException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpDescribeApplicationFleetAssociations struct { } func (*awsAwsjson11_deserializeOpDescribeApplicationFleetAssociations) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpDescribeApplicationFleetAssociations) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorDescribeApplicationFleetAssociations(response, &metadata) } output := &DescribeApplicationFleetAssociationsOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentDescribeApplicationFleetAssociationsOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorDescribeApplicationFleetAssociations(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("InvalidParameterCombinationException", errorCode): return awsAwsjson11_deserializeErrorInvalidParameterCombinationException(response, errorBody) case strings.EqualFold("OperationNotPermittedException", errorCode): return awsAwsjson11_deserializeErrorOperationNotPermittedException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpDescribeApplications struct { } func (*awsAwsjson11_deserializeOpDescribeApplications) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpDescribeApplications) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorDescribeApplications(response, &metadata) } output := &DescribeApplicationsOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentDescribeApplicationsOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorDescribeApplications(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("OperationNotPermittedException", errorCode): return awsAwsjson11_deserializeErrorOperationNotPermittedException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpDescribeDirectoryConfigs struct { } func (*awsAwsjson11_deserializeOpDescribeDirectoryConfigs) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpDescribeDirectoryConfigs) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorDescribeDirectoryConfigs(response, &metadata) } output := &DescribeDirectoryConfigsOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentDescribeDirectoryConfigsOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorDescribeDirectoryConfigs(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpDescribeFleets struct { } func (*awsAwsjson11_deserializeOpDescribeFleets) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpDescribeFleets) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorDescribeFleets(response, &metadata) } output := &DescribeFleetsOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentDescribeFleetsOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorDescribeFleets(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpDescribeImageBuilders struct { } func (*awsAwsjson11_deserializeOpDescribeImageBuilders) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpDescribeImageBuilders) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorDescribeImageBuilders(response, &metadata) } output := &DescribeImageBuildersOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentDescribeImageBuildersOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorDescribeImageBuilders(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpDescribeImagePermissions struct { } func (*awsAwsjson11_deserializeOpDescribeImagePermissions) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpDescribeImagePermissions) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorDescribeImagePermissions(response, &metadata) } output := &DescribeImagePermissionsOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentDescribeImagePermissionsOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorDescribeImagePermissions(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpDescribeImages struct { } func (*awsAwsjson11_deserializeOpDescribeImages) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpDescribeImages) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorDescribeImages(response, &metadata) } output := &DescribeImagesOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentDescribeImagesOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorDescribeImages(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("InvalidParameterCombinationException", errorCode): return awsAwsjson11_deserializeErrorInvalidParameterCombinationException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpDescribeSessions struct { } func (*awsAwsjson11_deserializeOpDescribeSessions) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpDescribeSessions) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorDescribeSessions(response, &metadata) } output := &DescribeSessionsOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentDescribeSessionsOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorDescribeSessions(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("InvalidParameterCombinationException", errorCode): return awsAwsjson11_deserializeErrorInvalidParameterCombinationException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpDescribeStacks struct { } func (*awsAwsjson11_deserializeOpDescribeStacks) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpDescribeStacks) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorDescribeStacks(response, &metadata) } output := &DescribeStacksOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentDescribeStacksOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorDescribeStacks(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpDescribeUsageReportSubscriptions struct { } func (*awsAwsjson11_deserializeOpDescribeUsageReportSubscriptions) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpDescribeUsageReportSubscriptions) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorDescribeUsageReportSubscriptions(response, &metadata) } output := &DescribeUsageReportSubscriptionsOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentDescribeUsageReportSubscriptionsOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorDescribeUsageReportSubscriptions(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("InvalidAccountStatusException", errorCode): return awsAwsjson11_deserializeErrorInvalidAccountStatusException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpDescribeUsers struct { } func (*awsAwsjson11_deserializeOpDescribeUsers) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpDescribeUsers) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorDescribeUsers(response, &metadata) } output := &DescribeUsersOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentDescribeUsersOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorDescribeUsers(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("InvalidParameterCombinationException", errorCode): return awsAwsjson11_deserializeErrorInvalidParameterCombinationException(response, errorBody) case strings.EqualFold("OperationNotPermittedException", errorCode): return awsAwsjson11_deserializeErrorOperationNotPermittedException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpDescribeUserStackAssociations struct { } func (*awsAwsjson11_deserializeOpDescribeUserStackAssociations) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpDescribeUserStackAssociations) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorDescribeUserStackAssociations(response, &metadata) } output := &DescribeUserStackAssociationsOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentDescribeUserStackAssociationsOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorDescribeUserStackAssociations(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("InvalidParameterCombinationException", errorCode): return awsAwsjson11_deserializeErrorInvalidParameterCombinationException(response, errorBody) case strings.EqualFold("OperationNotPermittedException", errorCode): return awsAwsjson11_deserializeErrorOperationNotPermittedException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpDisableUser struct { } func (*awsAwsjson11_deserializeOpDisableUser) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpDisableUser) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorDisableUser(response, &metadata) } output := &DisableUserOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentDisableUserOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorDisableUser(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpDisassociateApplicationFleet struct { } func (*awsAwsjson11_deserializeOpDisassociateApplicationFleet) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpDisassociateApplicationFleet) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorDisassociateApplicationFleet(response, &metadata) } output := &DisassociateApplicationFleetOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentDisassociateApplicationFleetOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorDisassociateApplicationFleet(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ConcurrentModificationException", errorCode): return awsAwsjson11_deserializeErrorConcurrentModificationException(response, errorBody) case strings.EqualFold("InvalidParameterCombinationException", errorCode): return awsAwsjson11_deserializeErrorInvalidParameterCombinationException(response, errorBody) case strings.EqualFold("OperationNotPermittedException", errorCode): return awsAwsjson11_deserializeErrorOperationNotPermittedException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpDisassociateFleet struct { } func (*awsAwsjson11_deserializeOpDisassociateFleet) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpDisassociateFleet) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorDisassociateFleet(response, &metadata) } output := &DisassociateFleetOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentDisassociateFleetOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorDisassociateFleet(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ConcurrentModificationException", errorCode): return awsAwsjson11_deserializeErrorConcurrentModificationException(response, errorBody) case strings.EqualFold("OperationNotPermittedException", errorCode): return awsAwsjson11_deserializeErrorOperationNotPermittedException(response, errorBody) case strings.EqualFold("ResourceInUseException", errorCode): return awsAwsjson11_deserializeErrorResourceInUseException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpEnableUser struct { } func (*awsAwsjson11_deserializeOpEnableUser) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpEnableUser) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorEnableUser(response, &metadata) } output := &EnableUserOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentEnableUserOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorEnableUser(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("InvalidAccountStatusException", errorCode): return awsAwsjson11_deserializeErrorInvalidAccountStatusException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpExpireSession struct { } func (*awsAwsjson11_deserializeOpExpireSession) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpExpireSession) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorExpireSession(response, &metadata) } output := &ExpireSessionOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentExpireSessionOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorExpireSession(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpListAssociatedFleets struct { } func (*awsAwsjson11_deserializeOpListAssociatedFleets) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpListAssociatedFleets) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorListAssociatedFleets(response, &metadata) } output := &ListAssociatedFleetsOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentListAssociatedFleetsOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorListAssociatedFleets(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpListAssociatedStacks struct { } func (*awsAwsjson11_deserializeOpListAssociatedStacks) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpListAssociatedStacks) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorListAssociatedStacks(response, &metadata) } output := &ListAssociatedStacksOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentListAssociatedStacksOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorListAssociatedStacks(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpListTagsForResource struct { } func (*awsAwsjson11_deserializeOpListTagsForResource) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpListTagsForResource) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorListTagsForResource(response, &metadata) } output := &ListTagsForResourceOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentListTagsForResourceOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorListTagsForResource(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpStartFleet struct { } func (*awsAwsjson11_deserializeOpStartFleet) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpStartFleet) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorStartFleet(response, &metadata) } output := &StartFleetOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentStartFleetOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorStartFleet(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ConcurrentModificationException", errorCode): return awsAwsjson11_deserializeErrorConcurrentModificationException(response, errorBody) case strings.EqualFold("InvalidAccountStatusException", errorCode): return awsAwsjson11_deserializeErrorInvalidAccountStatusException(response, errorBody) case strings.EqualFold("InvalidRoleException", errorCode): return awsAwsjson11_deserializeErrorInvalidRoleException(response, errorBody) case strings.EqualFold("LimitExceededException", errorCode): return awsAwsjson11_deserializeErrorLimitExceededException(response, errorBody) case strings.EqualFold("OperationNotPermittedException", errorCode): return awsAwsjson11_deserializeErrorOperationNotPermittedException(response, errorBody) case strings.EqualFold("RequestLimitExceededException", errorCode): return awsAwsjson11_deserializeErrorRequestLimitExceededException(response, errorBody) case strings.EqualFold("ResourceNotAvailableException", errorCode): return awsAwsjson11_deserializeErrorResourceNotAvailableException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpStartImageBuilder struct { } func (*awsAwsjson11_deserializeOpStartImageBuilder) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpStartImageBuilder) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorStartImageBuilder(response, &metadata) } output := &StartImageBuilderOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentStartImageBuilderOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorStartImageBuilder(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ConcurrentModificationException", errorCode): return awsAwsjson11_deserializeErrorConcurrentModificationException(response, errorBody) case strings.EqualFold("IncompatibleImageException", errorCode): return awsAwsjson11_deserializeErrorIncompatibleImageException(response, errorBody) case strings.EqualFold("InvalidAccountStatusException", errorCode): return awsAwsjson11_deserializeErrorInvalidAccountStatusException(response, errorBody) case strings.EqualFold("ResourceNotAvailableException", errorCode): return awsAwsjson11_deserializeErrorResourceNotAvailableException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpStopFleet struct { } func (*awsAwsjson11_deserializeOpStopFleet) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpStopFleet) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorStopFleet(response, &metadata) } output := &StopFleetOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentStopFleetOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorStopFleet(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ConcurrentModificationException", errorCode): return awsAwsjson11_deserializeErrorConcurrentModificationException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpStopImageBuilder struct { } func (*awsAwsjson11_deserializeOpStopImageBuilder) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpStopImageBuilder) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorStopImageBuilder(response, &metadata) } output := &StopImageBuilderOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentStopImageBuilderOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorStopImageBuilder(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ConcurrentModificationException", errorCode): return awsAwsjson11_deserializeErrorConcurrentModificationException(response, errorBody) case strings.EqualFold("OperationNotPermittedException", errorCode): return awsAwsjson11_deserializeErrorOperationNotPermittedException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpTagResource struct { } func (*awsAwsjson11_deserializeOpTagResource) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpTagResource) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorTagResource(response, &metadata) } output := &TagResourceOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentTagResourceOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorTagResource(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("InvalidAccountStatusException", errorCode): return awsAwsjson11_deserializeErrorInvalidAccountStatusException(response, errorBody) case strings.EqualFold("LimitExceededException", errorCode): return awsAwsjson11_deserializeErrorLimitExceededException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpUntagResource struct { } func (*awsAwsjson11_deserializeOpUntagResource) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpUntagResource) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorUntagResource(response, &metadata) } output := &UntagResourceOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentUntagResourceOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorUntagResource(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpUpdateApplication struct { } func (*awsAwsjson11_deserializeOpUpdateApplication) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpUpdateApplication) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorUpdateApplication(response, &metadata) } output := &UpdateApplicationOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentUpdateApplicationOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorUpdateApplication(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ConcurrentModificationException", errorCode): return awsAwsjson11_deserializeErrorConcurrentModificationException(response, errorBody) case strings.EqualFold("OperationNotPermittedException", errorCode): return awsAwsjson11_deserializeErrorOperationNotPermittedException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpUpdateDirectoryConfig struct { } func (*awsAwsjson11_deserializeOpUpdateDirectoryConfig) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpUpdateDirectoryConfig) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorUpdateDirectoryConfig(response, &metadata) } output := &UpdateDirectoryConfigOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentUpdateDirectoryConfigOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorUpdateDirectoryConfig(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ConcurrentModificationException", errorCode): return awsAwsjson11_deserializeErrorConcurrentModificationException(response, errorBody) case strings.EqualFold("InvalidRoleException", errorCode): return awsAwsjson11_deserializeErrorInvalidRoleException(response, errorBody) case strings.EqualFold("OperationNotPermittedException", errorCode): return awsAwsjson11_deserializeErrorOperationNotPermittedException(response, errorBody) case strings.EqualFold("ResourceInUseException", errorCode): return awsAwsjson11_deserializeErrorResourceInUseException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpUpdateFleet struct { } func (*awsAwsjson11_deserializeOpUpdateFleet) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpUpdateFleet) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorUpdateFleet(response, &metadata) } output := &UpdateFleetOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentUpdateFleetOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorUpdateFleet(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ConcurrentModificationException", errorCode): return awsAwsjson11_deserializeErrorConcurrentModificationException(response, errorBody) case strings.EqualFold("IncompatibleImageException", errorCode): return awsAwsjson11_deserializeErrorIncompatibleImageException(response, errorBody) case strings.EqualFold("InvalidAccountStatusException", errorCode): return awsAwsjson11_deserializeErrorInvalidAccountStatusException(response, errorBody) case strings.EqualFold("InvalidParameterCombinationException", errorCode): return awsAwsjson11_deserializeErrorInvalidParameterCombinationException(response, errorBody) case strings.EqualFold("InvalidRoleException", errorCode): return awsAwsjson11_deserializeErrorInvalidRoleException(response, errorBody) case strings.EqualFold("LimitExceededException", errorCode): return awsAwsjson11_deserializeErrorLimitExceededException(response, errorBody) case strings.EqualFold("OperationNotPermittedException", errorCode): return awsAwsjson11_deserializeErrorOperationNotPermittedException(response, errorBody) case strings.EqualFold("RequestLimitExceededException", errorCode): return awsAwsjson11_deserializeErrorRequestLimitExceededException(response, errorBody) case strings.EqualFold("ResourceInUseException", errorCode): return awsAwsjson11_deserializeErrorResourceInUseException(response, errorBody) case strings.EqualFold("ResourceNotAvailableException", errorCode): return awsAwsjson11_deserializeErrorResourceNotAvailableException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpUpdateImagePermissions struct { } func (*awsAwsjson11_deserializeOpUpdateImagePermissions) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpUpdateImagePermissions) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorUpdateImagePermissions(response, &metadata) } output := &UpdateImagePermissionsOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentUpdateImagePermissionsOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorUpdateImagePermissions(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("LimitExceededException", errorCode): return awsAwsjson11_deserializeErrorLimitExceededException(response, errorBody) case strings.EqualFold("ResourceNotAvailableException", errorCode): return awsAwsjson11_deserializeErrorResourceNotAvailableException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } type awsAwsjson11_deserializeOpUpdateStack struct { } func (*awsAwsjson11_deserializeOpUpdateStack) ID() string { return "OperationDeserializer" } func (m *awsAwsjson11_deserializeOpUpdateStack) HandleDeserialize(ctx context.Context, in middleware.DeserializeInput, next middleware.DeserializeHandler) ( out middleware.DeserializeOutput, metadata middleware.Metadata, err error, ) { out, metadata, err = next.HandleDeserialize(ctx, in) if err != nil { return out, metadata, err } response, ok := out.RawResponse.(*smithyhttp.Response) if !ok { return out, metadata, &smithy.DeserializationError{Err: fmt.Errorf("unknown transport type %T", out.RawResponse)} } if response.StatusCode < 200 || response.StatusCode >= 300 { return out, metadata, awsAwsjson11_deserializeOpErrorUpdateStack(response, &metadata) } output := &UpdateStackOutput{} out.Result = output var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(response.Body, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } err = awsAwsjson11_deserializeOpDocumentUpdateStackOutput(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return out, metadata, err } return out, metadata, err } func awsAwsjson11_deserializeOpErrorUpdateStack(response *smithyhttp.Response, metadata *middleware.Metadata) error { var errorBuffer bytes.Buffer if _, err := io.Copy(&errorBuffer, response.Body); err != nil { return &smithy.DeserializationError{Err: fmt.Errorf("failed to copy error response body, %w", err)} } errorBody := bytes.NewReader(errorBuffer.Bytes()) errorCode := "UnknownError" errorMessage := errorCode code := response.Header.Get("X-Amzn-ErrorType") if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() code, message, err := restjson.GetErrorInfo(decoder) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) if len(code) != 0 { errorCode = restjson.SanitizeErrorCode(code) } if len(message) != 0 { errorMessage = message } switch { case strings.EqualFold("ConcurrentModificationException", errorCode): return awsAwsjson11_deserializeErrorConcurrentModificationException(response, errorBody) case strings.EqualFold("IncompatibleImageException", errorCode): return awsAwsjson11_deserializeErrorIncompatibleImageException(response, errorBody) case strings.EqualFold("InvalidAccountStatusException", errorCode): return awsAwsjson11_deserializeErrorInvalidAccountStatusException(response, errorBody) case strings.EqualFold("InvalidParameterCombinationException", errorCode): return awsAwsjson11_deserializeErrorInvalidParameterCombinationException(response, errorBody) case strings.EqualFold("InvalidRoleException", errorCode): return awsAwsjson11_deserializeErrorInvalidRoleException(response, errorBody) case strings.EqualFold("LimitExceededException", errorCode): return awsAwsjson11_deserializeErrorLimitExceededException(response, errorBody) case strings.EqualFold("OperationNotPermittedException", errorCode): return awsAwsjson11_deserializeErrorOperationNotPermittedException(response, errorBody) case strings.EqualFold("ResourceInUseException", errorCode): return awsAwsjson11_deserializeErrorResourceInUseException(response, errorBody) case strings.EqualFold("ResourceNotFoundException", errorCode): return awsAwsjson11_deserializeErrorResourceNotFoundException(response, errorBody) default: genericError := &smithy.GenericAPIError{ Code: errorCode, Message: errorMessage, } return genericError } } func awsAwsjson11_deserializeErrorConcurrentModificationException(response *smithyhttp.Response, errorBody *bytes.Reader) error { var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } output := &types.ConcurrentModificationException{} err := awsAwsjson11_deserializeDocumentConcurrentModificationException(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) return output } func awsAwsjson11_deserializeErrorIncompatibleImageException(response *smithyhttp.Response, errorBody *bytes.Reader) error { var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } output := &types.IncompatibleImageException{} err := awsAwsjson11_deserializeDocumentIncompatibleImageException(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) return output } func awsAwsjson11_deserializeErrorInvalidAccountStatusException(response *smithyhttp.Response, errorBody *bytes.Reader) error { var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } output := &types.InvalidAccountStatusException{} err := awsAwsjson11_deserializeDocumentInvalidAccountStatusException(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) return output } func awsAwsjson11_deserializeErrorInvalidParameterCombinationException(response *smithyhttp.Response, errorBody *bytes.Reader) error { var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } output := &types.InvalidParameterCombinationException{} err := awsAwsjson11_deserializeDocumentInvalidParameterCombinationException(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) return output } func awsAwsjson11_deserializeErrorInvalidRoleException(response *smithyhttp.Response, errorBody *bytes.Reader) error { var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } output := &types.InvalidRoleException{} err := awsAwsjson11_deserializeDocumentInvalidRoleException(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) return output } func awsAwsjson11_deserializeErrorLimitExceededException(response *smithyhttp.Response, errorBody *bytes.Reader) error { var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } output := &types.LimitExceededException{} err := awsAwsjson11_deserializeDocumentLimitExceededException(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) return output } func awsAwsjson11_deserializeErrorOperationNotPermittedException(response *smithyhttp.Response, errorBody *bytes.Reader) error { var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } output := &types.OperationNotPermittedException{} err := awsAwsjson11_deserializeDocumentOperationNotPermittedException(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) return output } func awsAwsjson11_deserializeErrorRequestLimitExceededException(response *smithyhttp.Response, errorBody *bytes.Reader) error { var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } output := &types.RequestLimitExceededException{} err := awsAwsjson11_deserializeDocumentRequestLimitExceededException(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) return output } func awsAwsjson11_deserializeErrorResourceAlreadyExistsException(response *smithyhttp.Response, errorBody *bytes.Reader) error { var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } output := &types.ResourceAlreadyExistsException{} err := awsAwsjson11_deserializeDocumentResourceAlreadyExistsException(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) return output } func awsAwsjson11_deserializeErrorResourceInUseException(response *smithyhttp.Response, errorBody *bytes.Reader) error { var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } output := &types.ResourceInUseException{} err := awsAwsjson11_deserializeDocumentResourceInUseException(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) return output } func awsAwsjson11_deserializeErrorResourceNotAvailableException(response *smithyhttp.Response, errorBody *bytes.Reader) error { var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } output := &types.ResourceNotAvailableException{} err := awsAwsjson11_deserializeDocumentResourceNotAvailableException(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) return output } func awsAwsjson11_deserializeErrorResourceNotFoundException(response *smithyhttp.Response, errorBody *bytes.Reader) error { var buff [1024]byte ringBuffer := smithyio.NewRingBuffer(buff[:]) body := io.TeeReader(errorBody, ringBuffer) decoder := json.NewDecoder(body) decoder.UseNumber() var shape interface{} if err := decoder.Decode(&shape); err != nil && err != io.EOF { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } output := &types.ResourceNotFoundException{} err := awsAwsjson11_deserializeDocumentResourceNotFoundException(&output, shape) if err != nil { var snapshot bytes.Buffer io.Copy(&snapshot, ringBuffer) err = &smithy.DeserializationError{ Err: fmt.Errorf("failed to decode response body, %w", err), Snapshot: snapshot.Bytes(), } return err } errorBody.Seek(0, io.SeekStart) return output } func awsAwsjson11_deserializeDocumentAccessEndpoint(v **types.AccessEndpoint, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.AccessEndpoint if *v == nil { sv = &types.AccessEndpoint{} } else { sv = *v } for key, value := range shape { switch key { case "EndpointType": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected AccessEndpointType to be of type string, got %T instead", value) } sv.EndpointType = types.AccessEndpointType(jtv) } case "VpceId": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.VpceId = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentAccessEndpointList(v *[]types.AccessEndpoint, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.([]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var cv []types.AccessEndpoint if *v == nil { cv = []types.AccessEndpoint{} } else { cv = *v } for _, value := range shape { var col types.AccessEndpoint destAddr := &col if err := awsAwsjson11_deserializeDocumentAccessEndpoint(&destAddr, value); err != nil { return err } col = *destAddr cv = append(cv, col) } *v = cv return nil } func awsAwsjson11_deserializeDocumentAppBlock(v **types.AppBlock, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.AppBlock if *v == nil { sv = &types.AppBlock{} } else { sv = *v } for key, value := range shape { switch key { case "Arn": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected Arn to be of type string, got %T instead", value) } sv.Arn = ptr.String(jtv) } case "CreatedTime": if value != nil { switch jtv := value.(type) { case json.Number: f64, err := jtv.Float64() if err != nil { return err } sv.CreatedTime = ptr.Time(smithytime.ParseEpochSeconds(f64)) default: return fmt.Errorf("expected Timestamp to be a JSON Number, got %T instead", value) } } case "Description": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.Description = ptr.String(jtv) } case "DisplayName": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.DisplayName = ptr.String(jtv) } case "Name": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.Name = ptr.String(jtv) } case "SetupScriptDetails": if err := awsAwsjson11_deserializeDocumentScriptDetails(&sv.SetupScriptDetails, value); err != nil { return err } case "SourceS3Location": if err := awsAwsjson11_deserializeDocumentS3Location(&sv.SourceS3Location, value); err != nil { return err } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentAppBlocks(v *[]types.AppBlock, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.([]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var cv []types.AppBlock if *v == nil { cv = []types.AppBlock{} } else { cv = *v } for _, value := range shape { var col types.AppBlock destAddr := &col if err := awsAwsjson11_deserializeDocumentAppBlock(&destAddr, value); err != nil { return err } col = *destAddr cv = append(cv, col) } *v = cv return nil } func awsAwsjson11_deserializeDocumentApplication(v **types.Application, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.Application if *v == nil { sv = &types.Application{} } else { sv = *v } for key, value := range shape { switch key { case "AppBlockArn": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected Arn to be of type string, got %T instead", value) } sv.AppBlockArn = ptr.String(jtv) } case "Arn": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected Arn to be of type string, got %T instead", value) } sv.Arn = ptr.String(jtv) } case "CreatedTime": if value != nil { switch jtv := value.(type) { case json.Number: f64, err := jtv.Float64() if err != nil { return err } sv.CreatedTime = ptr.Time(smithytime.ParseEpochSeconds(f64)) default: return fmt.Errorf("expected Timestamp to be a JSON Number, got %T instead", value) } } case "Description": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.Description = ptr.String(jtv) } case "DisplayName": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.DisplayName = ptr.String(jtv) } case "Enabled": if value != nil { jtv, ok := value.(bool) if !ok { return fmt.Errorf("expected Boolean to be of type *bool, got %T instead", value) } sv.Enabled = jtv } case "IconS3Location": if err := awsAwsjson11_deserializeDocumentS3Location(&sv.IconS3Location, value); err != nil { return err } case "IconURL": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.IconURL = ptr.String(jtv) } case "InstanceFamilies": if err := awsAwsjson11_deserializeDocumentStringList(&sv.InstanceFamilies, value); err != nil { return err } case "LaunchParameters": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.LaunchParameters = ptr.String(jtv) } case "LaunchPath": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.LaunchPath = ptr.String(jtv) } case "Metadata": if err := awsAwsjson11_deserializeDocumentMetadata(&sv.Metadata, value); err != nil { return err } case "Name": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.Name = ptr.String(jtv) } case "Platforms": if err := awsAwsjson11_deserializeDocumentPlatforms(&sv.Platforms, value); err != nil { return err } case "WorkingDirectory": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.WorkingDirectory = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentApplicationFleetAssociation(v **types.ApplicationFleetAssociation, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.ApplicationFleetAssociation if *v == nil { sv = &types.ApplicationFleetAssociation{} } else { sv = *v } for key, value := range shape { switch key { case "ApplicationArn": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected Arn to be of type string, got %T instead", value) } sv.ApplicationArn = ptr.String(jtv) } case "FleetName": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.FleetName = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentApplicationFleetAssociationList(v *[]types.ApplicationFleetAssociation, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.([]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var cv []types.ApplicationFleetAssociation if *v == nil { cv = []types.ApplicationFleetAssociation{} } else { cv = *v } for _, value := range shape { var col types.ApplicationFleetAssociation destAddr := &col if err := awsAwsjson11_deserializeDocumentApplicationFleetAssociation(&destAddr, value); err != nil { return err } col = *destAddr cv = append(cv, col) } *v = cv return nil } func awsAwsjson11_deserializeDocumentApplications(v *[]types.Application, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.([]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var cv []types.Application if *v == nil { cv = []types.Application{} } else { cv = *v } for _, value := range shape { var col types.Application destAddr := &col if err := awsAwsjson11_deserializeDocumentApplication(&destAddr, value); err != nil { return err } col = *destAddr cv = append(cv, col) } *v = cv return nil } func awsAwsjson11_deserializeDocumentApplicationSettingsResponse(v **types.ApplicationSettingsResponse, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.ApplicationSettingsResponse if *v == nil { sv = &types.ApplicationSettingsResponse{} } else { sv = *v } for key, value := range shape { switch key { case "Enabled": if value != nil { jtv, ok := value.(bool) if !ok { return fmt.Errorf("expected Boolean to be of type *bool, got %T instead", value) } sv.Enabled = jtv } case "S3BucketName": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.S3BucketName = ptr.String(jtv) } case "SettingsGroup": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected SettingsGroup to be of type string, got %T instead", value) } sv.SettingsGroup = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentComputeCapacityStatus(v **types.ComputeCapacityStatus, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.ComputeCapacityStatus if *v == nil { sv = &types.ComputeCapacityStatus{} } else { sv = *v } for key, value := range shape { switch key { case "Available": if value != nil { jtv, ok := value.(json.Number) if !ok { return fmt.Errorf("expected Integer to be json.Number, got %T instead", value) } i64, err := jtv.Int64() if err != nil { return err } sv.Available = ptr.Int32(int32(i64)) } case "Desired": if value != nil { jtv, ok := value.(json.Number) if !ok { return fmt.Errorf("expected Integer to be json.Number, got %T instead", value) } i64, err := jtv.Int64() if err != nil { return err } sv.Desired = ptr.Int32(int32(i64)) } case "InUse": if value != nil { jtv, ok := value.(json.Number) if !ok { return fmt.Errorf("expected Integer to be json.Number, got %T instead", value) } i64, err := jtv.Int64() if err != nil { return err } sv.InUse = ptr.Int32(int32(i64)) } case "Running": if value != nil { jtv, ok := value.(json.Number) if !ok { return fmt.Errorf("expected Integer to be json.Number, got %T instead", value) } i64, err := jtv.Int64() if err != nil { return err } sv.Running = ptr.Int32(int32(i64)) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentConcurrentModificationException(v **types.ConcurrentModificationException, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.ConcurrentModificationException if *v == nil { sv = &types.ConcurrentModificationException{} } else { sv = *v } for key, value := range shape { switch key { case "Message": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected ErrorMessage to be of type string, got %T instead", value) } sv.Message = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentDirectoryConfig(v **types.DirectoryConfig, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.DirectoryConfig if *v == nil { sv = &types.DirectoryConfig{} } else { sv = *v } for key, value := range shape { switch key { case "CreatedTime": if value != nil { switch jtv := value.(type) { case json.Number: f64, err := jtv.Float64() if err != nil { return err } sv.CreatedTime = ptr.Time(smithytime.ParseEpochSeconds(f64)) default: return fmt.Errorf("expected Timestamp to be a JSON Number, got %T instead", value) } } case "DirectoryName": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected DirectoryName to be of type string, got %T instead", value) } sv.DirectoryName = ptr.String(jtv) } case "OrganizationalUnitDistinguishedNames": if err := awsAwsjson11_deserializeDocumentOrganizationalUnitDistinguishedNamesList(&sv.OrganizationalUnitDistinguishedNames, value); err != nil { return err } case "ServiceAccountCredentials": if err := awsAwsjson11_deserializeDocumentServiceAccountCredentials(&sv.ServiceAccountCredentials, value); err != nil { return err } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentDirectoryConfigList(v *[]types.DirectoryConfig, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.([]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var cv []types.DirectoryConfig if *v == nil { cv = []types.DirectoryConfig{} } else { cv = *v } for _, value := range shape { var col types.DirectoryConfig destAddr := &col if err := awsAwsjson11_deserializeDocumentDirectoryConfig(&destAddr, value); err != nil { return err } col = *destAddr cv = append(cv, col) } *v = cv return nil } func awsAwsjson11_deserializeDocumentDomainJoinInfo(v **types.DomainJoinInfo, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.DomainJoinInfo if *v == nil { sv = &types.DomainJoinInfo{} } else { sv = *v } for key, value := range shape { switch key { case "DirectoryName": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected DirectoryName to be of type string, got %T instead", value) } sv.DirectoryName = ptr.String(jtv) } case "OrganizationalUnitDistinguishedName": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected OrganizationalUnitDistinguishedName to be of type string, got %T instead", value) } sv.OrganizationalUnitDistinguishedName = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentDomainList(v *[]string, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.([]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var cv []string if *v == nil { cv = []string{} } else { cv = *v } for _, value := range shape { var col string if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected Domain to be of type string, got %T instead", value) } col = jtv } cv = append(cv, col) } *v = cv return nil } func awsAwsjson11_deserializeDocumentEmbedHostDomains(v *[]string, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.([]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var cv []string if *v == nil { cv = []string{} } else { cv = *v } for _, value := range shape { var col string if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected EmbedHostDomain to be of type string, got %T instead", value) } col = jtv } cv = append(cv, col) } *v = cv return nil } func awsAwsjson11_deserializeDocumentFleet(v **types.Fleet, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.Fleet if *v == nil { sv = &types.Fleet{} } else { sv = *v } for key, value := range shape { switch key { case "Arn": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected Arn to be of type string, got %T instead", value) } sv.Arn = ptr.String(jtv) } case "ComputeCapacityStatus": if err := awsAwsjson11_deserializeDocumentComputeCapacityStatus(&sv.ComputeCapacityStatus, value); err != nil { return err } case "CreatedTime": if value != nil { switch jtv := value.(type) { case json.Number: f64, err := jtv.Float64() if err != nil { return err } sv.CreatedTime = ptr.Time(smithytime.ParseEpochSeconds(f64)) default: return fmt.Errorf("expected Timestamp to be a JSON Number, got %T instead", value) } } case "Description": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.Description = ptr.String(jtv) } case "DisconnectTimeoutInSeconds": if value != nil { jtv, ok := value.(json.Number) if !ok { return fmt.Errorf("expected Integer to be json.Number, got %T instead", value) } i64, err := jtv.Int64() if err != nil { return err } sv.DisconnectTimeoutInSeconds = ptr.Int32(int32(i64)) } case "DisplayName": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.DisplayName = ptr.String(jtv) } case "DomainJoinInfo": if err := awsAwsjson11_deserializeDocumentDomainJoinInfo(&sv.DomainJoinInfo, value); err != nil { return err } case "EnableDefaultInternetAccess": if value != nil { jtv, ok := value.(bool) if !ok { return fmt.Errorf("expected BooleanObject to be of type *bool, got %T instead", value) } sv.EnableDefaultInternetAccess = ptr.Bool(jtv) } case "FleetErrors": if err := awsAwsjson11_deserializeDocumentFleetErrors(&sv.FleetErrors, value); err != nil { return err } case "FleetType": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected FleetType to be of type string, got %T instead", value) } sv.FleetType = types.FleetType(jtv) } case "IamRoleArn": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected Arn to be of type string, got %T instead", value) } sv.IamRoleArn = ptr.String(jtv) } case "IdleDisconnectTimeoutInSeconds": if value != nil { jtv, ok := value.(json.Number) if !ok { return fmt.Errorf("expected Integer to be json.Number, got %T instead", value) } i64, err := jtv.Int64() if err != nil { return err } sv.IdleDisconnectTimeoutInSeconds = ptr.Int32(int32(i64)) } case "ImageArn": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected Arn to be of type string, got %T instead", value) } sv.ImageArn = ptr.String(jtv) } case "ImageName": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.ImageName = ptr.String(jtv) } case "InstanceType": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.InstanceType = ptr.String(jtv) } case "MaxConcurrentSessions": if value != nil { jtv, ok := value.(json.Number) if !ok { return fmt.Errorf("expected Integer to be json.Number, got %T instead", value) } i64, err := jtv.Int64() if err != nil { return err } sv.MaxConcurrentSessions = ptr.Int32(int32(i64)) } case "MaxUserDurationInSeconds": if value != nil { jtv, ok := value.(json.Number) if !ok { return fmt.Errorf("expected Integer to be json.Number, got %T instead", value) } i64, err := jtv.Int64() if err != nil { return err } sv.MaxUserDurationInSeconds = ptr.Int32(int32(i64)) } case "Name": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.Name = ptr.String(jtv) } case "Platform": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected PlatformType to be of type string, got %T instead", value) } sv.Platform = types.PlatformType(jtv) } case "State": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected FleetState to be of type string, got %T instead", value) } sv.State = types.FleetState(jtv) } case "StreamView": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected StreamView to be of type string, got %T instead", value) } sv.StreamView = types.StreamView(jtv) } case "UsbDeviceFilterStrings": if err := awsAwsjson11_deserializeDocumentUsbDeviceFilterStrings(&sv.UsbDeviceFilterStrings, value); err != nil { return err } case "VpcConfig": if err := awsAwsjson11_deserializeDocumentVpcConfig(&sv.VpcConfig, value); err != nil { return err } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentFleetError(v **types.FleetError, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.FleetError if *v == nil { sv = &types.FleetError{} } else { sv = *v } for key, value := range shape { switch key { case "ErrorCode": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected FleetErrorCode to be of type string, got %T instead", value) } sv.ErrorCode = types.FleetErrorCode(jtv) } case "ErrorMessage": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.ErrorMessage = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentFleetErrors(v *[]types.FleetError, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.([]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var cv []types.FleetError if *v == nil { cv = []types.FleetError{} } else { cv = *v } for _, value := range shape { var col types.FleetError destAddr := &col if err := awsAwsjson11_deserializeDocumentFleetError(&destAddr, value); err != nil { return err } col = *destAddr cv = append(cv, col) } *v = cv return nil } func awsAwsjson11_deserializeDocumentFleetList(v *[]types.Fleet, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.([]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var cv []types.Fleet if *v == nil { cv = []types.Fleet{} } else { cv = *v } for _, value := range shape { var col types.Fleet destAddr := &col if err := awsAwsjson11_deserializeDocumentFleet(&destAddr, value); err != nil { return err } col = *destAddr cv = append(cv, col) } *v = cv return nil } func awsAwsjson11_deserializeDocumentImage(v **types.Image, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.Image if *v == nil { sv = &types.Image{} } else { sv = *v } for key, value := range shape { switch key { case "Applications": if err := awsAwsjson11_deserializeDocumentApplications(&sv.Applications, value); err != nil { return err } case "AppstreamAgentVersion": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected AppstreamAgentVersion to be of type string, got %T instead", value) } sv.AppstreamAgentVersion = ptr.String(jtv) } case "Arn": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected Arn to be of type string, got %T instead", value) } sv.Arn = ptr.String(jtv) } case "BaseImageArn": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected Arn to be of type string, got %T instead", value) } sv.BaseImageArn = ptr.String(jtv) } case "CreatedTime": if value != nil { switch jtv := value.(type) { case json.Number: f64, err := jtv.Float64() if err != nil { return err } sv.CreatedTime = ptr.Time(smithytime.ParseEpochSeconds(f64)) default: return fmt.Errorf("expected Timestamp to be a JSON Number, got %T instead", value) } } case "Description": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.Description = ptr.String(jtv) } case "DisplayName": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.DisplayName = ptr.String(jtv) } case "ImageBuilderName": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.ImageBuilderName = ptr.String(jtv) } case "ImageBuilderSupported": if value != nil { jtv, ok := value.(bool) if !ok { return fmt.Errorf("expected Boolean to be of type *bool, got %T instead", value) } sv.ImageBuilderSupported = jtv } case "ImageErrors": if err := awsAwsjson11_deserializeDocumentResourceErrors(&sv.ImageErrors, value); err != nil { return err } case "ImagePermissions": if err := awsAwsjson11_deserializeDocumentImagePermissions(&sv.ImagePermissions, value); err != nil { return err } case "Name": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.Name = ptr.String(jtv) } case "Platform": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected PlatformType to be of type string, got %T instead", value) } sv.Platform = types.PlatformType(jtv) } case "PublicBaseImageReleasedDate": if value != nil { switch jtv := value.(type) { case json.Number: f64, err := jtv.Float64() if err != nil { return err } sv.PublicBaseImageReleasedDate = ptr.Time(smithytime.ParseEpochSeconds(f64)) default: return fmt.Errorf("expected Timestamp to be a JSON Number, got %T instead", value) } } case "State": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected ImageState to be of type string, got %T instead", value) } sv.State = types.ImageState(jtv) } case "StateChangeReason": if err := awsAwsjson11_deserializeDocumentImageStateChangeReason(&sv.StateChangeReason, value); err != nil { return err } case "Visibility": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected VisibilityType to be of type string, got %T instead", value) } sv.Visibility = types.VisibilityType(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentImageBuilder(v **types.ImageBuilder, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.ImageBuilder if *v == nil { sv = &types.ImageBuilder{} } else { sv = *v } for key, value := range shape { switch key { case "AccessEndpoints": if err := awsAwsjson11_deserializeDocumentAccessEndpointList(&sv.AccessEndpoints, value); err != nil { return err } case "AppstreamAgentVersion": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected AppstreamAgentVersion to be of type string, got %T instead", value) } sv.AppstreamAgentVersion = ptr.String(jtv) } case "Arn": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected Arn to be of type string, got %T instead", value) } sv.Arn = ptr.String(jtv) } case "CreatedTime": if value != nil { switch jtv := value.(type) { case json.Number: f64, err := jtv.Float64() if err != nil { return err } sv.CreatedTime = ptr.Time(smithytime.ParseEpochSeconds(f64)) default: return fmt.Errorf("expected Timestamp to be a JSON Number, got %T instead", value) } } case "Description": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.Description = ptr.String(jtv) } case "DisplayName": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.DisplayName = ptr.String(jtv) } case "DomainJoinInfo": if err := awsAwsjson11_deserializeDocumentDomainJoinInfo(&sv.DomainJoinInfo, value); err != nil { return err } case "EnableDefaultInternetAccess": if value != nil { jtv, ok := value.(bool) if !ok { return fmt.Errorf("expected BooleanObject to be of type *bool, got %T instead", value) } sv.EnableDefaultInternetAccess = ptr.Bool(jtv) } case "IamRoleArn": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected Arn to be of type string, got %T instead", value) } sv.IamRoleArn = ptr.String(jtv) } case "ImageArn": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected Arn to be of type string, got %T instead", value) } sv.ImageArn = ptr.String(jtv) } case "ImageBuilderErrors": if err := awsAwsjson11_deserializeDocumentResourceErrors(&sv.ImageBuilderErrors, value); err != nil { return err } case "InstanceType": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.InstanceType = ptr.String(jtv) } case "Name": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.Name = ptr.String(jtv) } case "NetworkAccessConfiguration": if err := awsAwsjson11_deserializeDocumentNetworkAccessConfiguration(&sv.NetworkAccessConfiguration, value); err != nil { return err } case "Platform": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected PlatformType to be of type string, got %T instead", value) } sv.Platform = types.PlatformType(jtv) } case "State": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected ImageBuilderState to be of type string, got %T instead", value) } sv.State = types.ImageBuilderState(jtv) } case "StateChangeReason": if err := awsAwsjson11_deserializeDocumentImageBuilderStateChangeReason(&sv.StateChangeReason, value); err != nil { return err } case "VpcConfig": if err := awsAwsjson11_deserializeDocumentVpcConfig(&sv.VpcConfig, value); err != nil { return err } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentImageBuilderList(v *[]types.ImageBuilder, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.([]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var cv []types.ImageBuilder if *v == nil { cv = []types.ImageBuilder{} } else { cv = *v } for _, value := range shape { var col types.ImageBuilder destAddr := &col if err := awsAwsjson11_deserializeDocumentImageBuilder(&destAddr, value); err != nil { return err } col = *destAddr cv = append(cv, col) } *v = cv return nil } func awsAwsjson11_deserializeDocumentImageBuilderStateChangeReason(v **types.ImageBuilderStateChangeReason, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.ImageBuilderStateChangeReason if *v == nil { sv = &types.ImageBuilderStateChangeReason{} } else { sv = *v } for key, value := range shape { switch key { case "Code": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected ImageBuilderStateChangeReasonCode to be of type string, got %T instead", value) } sv.Code = types.ImageBuilderStateChangeReasonCode(jtv) } case "Message": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.Message = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentImageList(v *[]types.Image, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.([]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var cv []types.Image if *v == nil { cv = []types.Image{} } else { cv = *v } for _, value := range shape { var col types.Image destAddr := &col if err := awsAwsjson11_deserializeDocumentImage(&destAddr, value); err != nil { return err } col = *destAddr cv = append(cv, col) } *v = cv return nil } func awsAwsjson11_deserializeDocumentImagePermissions(v **types.ImagePermissions, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.ImagePermissions if *v == nil { sv = &types.ImagePermissions{} } else { sv = *v } for key, value := range shape { switch key { case "allowFleet": if value != nil { jtv, ok := value.(bool) if !ok { return fmt.Errorf("expected BooleanObject to be of type *bool, got %T instead", value) } sv.AllowFleet = ptr.Bool(jtv) } case "allowImageBuilder": if value != nil { jtv, ok := value.(bool) if !ok { return fmt.Errorf("expected BooleanObject to be of type *bool, got %T instead", value) } sv.AllowImageBuilder = ptr.Bool(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentImageStateChangeReason(v **types.ImageStateChangeReason, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.ImageStateChangeReason if *v == nil { sv = &types.ImageStateChangeReason{} } else { sv = *v } for key, value := range shape { switch key { case "Code": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected ImageStateChangeReasonCode to be of type string, got %T instead", value) } sv.Code = types.ImageStateChangeReasonCode(jtv) } case "Message": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.Message = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentIncompatibleImageException(v **types.IncompatibleImageException, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.IncompatibleImageException if *v == nil { sv = &types.IncompatibleImageException{} } else { sv = *v } for key, value := range shape { switch key { case "Message": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected ErrorMessage to be of type string, got %T instead", value) } sv.Message = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentInvalidAccountStatusException(v **types.InvalidAccountStatusException, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.InvalidAccountStatusException if *v == nil { sv = &types.InvalidAccountStatusException{} } else { sv = *v } for key, value := range shape { switch key { case "Message": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected ErrorMessage to be of type string, got %T instead", value) } sv.Message = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentInvalidParameterCombinationException(v **types.InvalidParameterCombinationException, value interface{}) error
func awsAwsjson11_deserializeDocumentInvalidRoleException(v **types.InvalidRoleException, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.InvalidRoleException if *v == nil { sv = &types.InvalidRoleException{} } else { sv = *v } for key, value := range shape { switch key { case "Message": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected ErrorMessage to be of type string, got %T instead", value) } sv.Message = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentLastReportGenerationExecutionError(v **types.LastReportGenerationExecutionError, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.LastReportGenerationExecutionError if *v == nil { sv = &types.LastReportGenerationExecutionError{} } else { sv = *v } for key, value := range shape { switch key { case "ErrorCode": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected UsageReportExecutionErrorCode to be of type string, got %T instead", value) } sv.ErrorCode = types.UsageReportExecutionErrorCode(jtv) } case "ErrorMessage": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.ErrorMessage = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentLastReportGenerationExecutionErrors(v *[]types.LastReportGenerationExecutionError, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.([]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var cv []types.LastReportGenerationExecutionError if *v == nil { cv = []types.LastReportGenerationExecutionError{} } else { cv = *v } for _, value := range shape { var col types.LastReportGenerationExecutionError destAddr := &col if err := awsAwsjson11_deserializeDocumentLastReportGenerationExecutionError(&destAddr, value); err != nil { return err } col = *destAddr cv = append(cv, col) } *v = cv return nil } func awsAwsjson11_deserializeDocumentLimitExceededException(v **types.LimitExceededException, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.LimitExceededException if *v == nil { sv = &types.LimitExceededException{} } else { sv = *v } for key, value := range shape { switch key { case "Message": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected ErrorMessage to be of type string, got %T instead", value) } sv.Message = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentMetadata(v *map[string]string, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var mv map[string]string if *v == nil { mv = map[string]string{} } else { mv = *v } for key, value := range shape { var parsedVal string if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } parsedVal = jtv } mv[key] = parsedVal } *v = mv return nil } func awsAwsjson11_deserializeDocumentNetworkAccessConfiguration(v **types.NetworkAccessConfiguration, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.NetworkAccessConfiguration if *v == nil { sv = &types.NetworkAccessConfiguration{} } else { sv = *v } for key, value := range shape { switch key { case "EniId": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.EniId = ptr.String(jtv) } case "EniPrivateIpAddress": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.EniPrivateIpAddress = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentOperationNotPermittedException(v **types.OperationNotPermittedException, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.OperationNotPermittedException if *v == nil { sv = &types.OperationNotPermittedException{} } else { sv = *v } for key, value := range shape { switch key { case "Message": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected ErrorMessage to be of type string, got %T instead", value) } sv.Message = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentOrganizationalUnitDistinguishedNamesList(v *[]string, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.([]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var cv []string if *v == nil { cv = []string{} } else { cv = *v } for _, value := range shape { var col string if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected OrganizationalUnitDistinguishedName to be of type string, got %T instead", value) } col = jtv } cv = append(cv, col) } *v = cv return nil } func awsAwsjson11_deserializeDocumentPlatforms(v *[]types.PlatformType, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.([]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var cv []types.PlatformType if *v == nil { cv = []types.PlatformType{} } else { cv = *v } for _, value := range shape { var col types.PlatformType if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected PlatformType to be of type string, got %T instead", value) } col = types.PlatformType(jtv) } cv = append(cv, col) } *v = cv return nil } func awsAwsjson11_deserializeDocumentRequestLimitExceededException(v **types.RequestLimitExceededException, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.RequestLimitExceededException if *v == nil { sv = &types.RequestLimitExceededException{} } else { sv = *v } for key, value := range shape { switch key { case "Message": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected ErrorMessage to be of type string, got %T instead", value) } sv.Message = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentResourceAlreadyExistsException(v **types.ResourceAlreadyExistsException, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.ResourceAlreadyExistsException if *v == nil { sv = &types.ResourceAlreadyExistsException{} } else { sv = *v } for key, value := range shape { switch key { case "Message": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected ErrorMessage to be of type string, got %T instead", value) } sv.Message = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentResourceError(v **types.ResourceError, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.ResourceError if *v == nil { sv = &types.ResourceError{} } else { sv = *v } for key, value := range shape { switch key { case "ErrorCode": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected FleetErrorCode to be of type string, got %T instead", value) } sv.ErrorCode = types.FleetErrorCode(jtv) } case "ErrorMessage": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.ErrorMessage = ptr.String(jtv) } case "ErrorTimestamp": if value != nil { switch jtv := value.(type) { case json.Number: f64, err := jtv.Float64() if err != nil { return err } sv.ErrorTimestamp = ptr.Time(smithytime.ParseEpochSeconds(f64)) default: return fmt.Errorf("expected Timestamp to be a JSON Number, got %T instead", value) } } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentResourceErrors(v *[]types.ResourceError, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.([]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var cv []types.ResourceError if *v == nil { cv = []types.ResourceError{} } else { cv = *v } for _, value := range shape { var col types.ResourceError destAddr := &col if err := awsAwsjson11_deserializeDocumentResourceError(&destAddr, value); err != nil { return err } col = *destAddr cv = append(cv, col) } *v = cv return nil } func awsAwsjson11_deserializeDocumentResourceInUseException(v **types.ResourceInUseException, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.ResourceInUseException if *v == nil { sv = &types.ResourceInUseException{} } else { sv = *v } for key, value := range shape { switch key { case "Message": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected ErrorMessage to be of type string, got %T instead", value) } sv.Message = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentResourceNotAvailableException(v **types.ResourceNotAvailableException, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.ResourceNotAvailableException if *v == nil { sv = &types.ResourceNotAvailableException{} } else { sv = *v } for key, value := range shape { switch key { case "Message": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected ErrorMessage to be of type string, got %T instead", value) } sv.Message = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentResourceNotFoundException(v **types.ResourceNotFoundException, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.ResourceNotFoundException if *v == nil { sv = &types.ResourceNotFoundException{} } else { sv = *v } for key, value := range shape { switch key { case "Message": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected ErrorMessage to be of type string, got %T instead", value) } sv.Message = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentS3Location(v **types.S3Location, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.S3Location if *v == nil { sv = &types.S3Location{} } else { sv = *v } for key, value := range shape { switch key { case "S3Bucket": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected S3Bucket to be of type string, got %T instead", value) } sv.S3Bucket = ptr.String(jtv) } case "S3Key": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected S3Key to be of type string, got %T instead", value) } sv.S3Key = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentScriptDetails(v **types.ScriptDetails, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.ScriptDetails if *v == nil { sv = &types.ScriptDetails{} } else { sv = *v } for key, value := range shape { switch key { case "ExecutableParameters": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.ExecutableParameters = ptr.String(jtv) } case "ExecutablePath": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.ExecutablePath = ptr.String(jtv) } case "ScriptS3Location": if err := awsAwsjson11_deserializeDocumentS3Location(&sv.ScriptS3Location, value); err != nil { return err } case "TimeoutInSeconds": if value != nil { jtv, ok := value.(json.Number) if !ok { return fmt.Errorf("expected Integer to be json.Number, got %T instead", value) } i64, err := jtv.Int64() if err != nil { return err } sv.TimeoutInSeconds = ptr.Int32(int32(i64)) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentSecurityGroupIdList(v *[]string, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.([]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var cv []string if *v == nil { cv = []string{} } else { cv = *v } for _, value := range shape { var col string if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } col = jtv } cv = append(cv, col) } *v = cv return nil } func awsAwsjson11_deserializeDocumentServiceAccountCredentials(v **types.ServiceAccountCredentials, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.ServiceAccountCredentials if *v == nil { sv = &types.ServiceAccountCredentials{} } else { sv = *v } for key, value := range shape { switch key { case "AccountName": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected AccountName to be of type string, got %T instead", value) } sv.AccountName = ptr.String(jtv) } case "AccountPassword": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected AccountPassword to be of type string, got %T instead", value) } sv.AccountPassword = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentSession(v **types.Session, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.Session if *v == nil { sv = &types.Session{} } else { sv = *v } for key, value := range shape { switch key { case "AuthenticationType": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected AuthenticationType to be of type string, got %T instead", value) } sv.AuthenticationType = types.AuthenticationType(jtv) } case "ConnectionState": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected SessionConnectionState to be of type string, got %T instead", value) } sv.ConnectionState = types.SessionConnectionState(jtv) } case "FleetName": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.FleetName = ptr.String(jtv) } case "Id": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.Id = ptr.String(jtv) } case "MaxExpirationTime": if value != nil { switch jtv := value.(type) { case json.Number: f64, err := jtv.Float64() if err != nil { return err } sv.MaxExpirationTime = ptr.Time(smithytime.ParseEpochSeconds(f64)) default: return fmt.Errorf("expected Timestamp to be a JSON Number, got %T instead", value) } } case "NetworkAccessConfiguration": if err := awsAwsjson11_deserializeDocumentNetworkAccessConfiguration(&sv.NetworkAccessConfiguration, value); err != nil { return err } case "StackName": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.StackName = ptr.String(jtv) } case "StartTime": if value != nil { switch jtv := value.(type) { case json.Number: f64, err := jtv.Float64() if err != nil { return err } sv.StartTime = ptr.Time(smithytime.ParseEpochSeconds(f64)) default: return fmt.Errorf("expected Timestamp to be a JSON Number, got %T instead", value) } } case "State": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected SessionState to be of type string, got %T instead", value) } sv.State = types.SessionState(jtv) } case "UserId": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected UserId to be of type string, got %T instead", value) } sv.UserId = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentSessionList(v *[]types.Session, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.([]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var cv []types.Session if *v == nil { cv = []types.Session{} } else { cv = *v } for _, value := range shape { var col types.Session destAddr := &col if err := awsAwsjson11_deserializeDocumentSession(&destAddr, value); err != nil { return err } col = *destAddr cv = append(cv, col) } *v = cv return nil } func awsAwsjson11_deserializeDocumentSharedImagePermissions(v **types.SharedImagePermissions, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.SharedImagePermissions if *v == nil { sv = &types.SharedImagePermissions{} } else { sv = *v } for key, value := range shape { switch key { case "imagePermissions": if err := awsAwsjson11_deserializeDocumentImagePermissions(&sv.ImagePermissions, value); err != nil { return err } case "sharedAccountId": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected AwsAccountId to be of type string, got %T instead", value) } sv.SharedAccountId = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentSharedImagePermissionsList(v *[]types.SharedImagePermissions, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.([]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var cv []types.SharedImagePermissions if *v == nil { cv = []types.SharedImagePermissions{} } else { cv = *v } for _, value := range shape { var col types.SharedImagePermissions destAddr := &col if err := awsAwsjson11_deserializeDocumentSharedImagePermissions(&destAddr, value); err != nil { return err } col = *destAddr cv = append(cv, col) } *v = cv return nil } func awsAwsjson11_deserializeDocumentStack(v **types.Stack, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.Stack if *v == nil { sv = &types.Stack{} } else { sv = *v } for key, value := range shape { switch key { case "AccessEndpoints": if err := awsAwsjson11_deserializeDocumentAccessEndpointList(&sv.AccessEndpoints, value); err != nil { return err } case "ApplicationSettings": if err := awsAwsjson11_deserializeDocumentApplicationSettingsResponse(&sv.ApplicationSettings, value); err != nil { return err } case "Arn": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected Arn to be of type string, got %T instead", value) } sv.Arn = ptr.String(jtv) } case "CreatedTime": if value != nil { switch jtv := value.(type) { case json.Number: f64, err := jtv.Float64() if err != nil { return err } sv.CreatedTime = ptr.Time(smithytime.ParseEpochSeconds(f64)) default: return fmt.Errorf("expected Timestamp to be a JSON Number, got %T instead", value) } } case "Description": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.Description = ptr.String(jtv) } case "DisplayName": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.DisplayName = ptr.String(jtv) } case "EmbedHostDomains": if err := awsAwsjson11_deserializeDocumentEmbedHostDomains(&sv.EmbedHostDomains, value); err != nil { return err } case "FeedbackURL": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected FeedbackURL to be of type string, got %T instead", value) } sv.FeedbackURL = ptr.String(jtv) } case "Name": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.Name = ptr.String(jtv) } case "RedirectURL": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected RedirectURL to be of type string, got %T instead", value) } sv.RedirectURL = ptr.String(jtv) } case "StackErrors": if err := awsAwsjson11_deserializeDocumentStackErrors(&sv.StackErrors, value); err != nil { return err } case "StorageConnectors": if err := awsAwsjson11_deserializeDocumentStorageConnectorList(&sv.StorageConnectors, value); err != nil { return err } case "UserSettings": if err := awsAwsjson11_deserializeDocumentUserSettingList(&sv.UserSettings, value); err != nil { return err } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentStackError(v **types.StackError, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.StackError if *v == nil { sv = &types.StackError{} } else { sv = *v } for key, value := range shape { switch key { case "ErrorCode": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected StackErrorCode to be of type string, got %T instead", value) } sv.ErrorCode = types.StackErrorCode(jtv) } case "ErrorMessage": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.ErrorMessage = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentStackErrors(v *[]types.StackError, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.([]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var cv []types.StackError if *v == nil { cv = []types.StackError{} } else { cv = *v } for _, value := range shape { var col types.StackError destAddr := &col if err := awsAwsjson11_deserializeDocumentStackError(&destAddr, value); err != nil { return err } col = *destAddr cv = append(cv, col) } *v = cv return nil } func awsAwsjson11_deserializeDocumentStackList(v *[]types.Stack, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.([]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var cv []types.Stack if *v == nil { cv = []types.Stack{} } else { cv = *v } for _, value := range shape { var col types.Stack destAddr := &col if err := awsAwsjson11_deserializeDocumentStack(&destAddr, value); err != nil { return err } col = *destAddr cv = append(cv, col) } *v = cv return nil } func awsAwsjson11_deserializeDocumentStorageConnector(v **types.StorageConnector, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.StorageConnector if *v == nil { sv = &types.StorageConnector{} } else { sv = *v } for key, value := range shape { switch key { case "ConnectorType": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected StorageConnectorType to be of type string, got %T instead", value) } sv.ConnectorType = types.StorageConnectorType(jtv) } case "Domains": if err := awsAwsjson11_deserializeDocumentDomainList(&sv.Domains, value); err != nil { return err } case "ResourceIdentifier": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected ResourceIdentifier to be of type string, got %T instead", value) } sv.ResourceIdentifier = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentStorageConnectorList(v *[]types.StorageConnector, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.([]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var cv []types.StorageConnector if *v == nil { cv = []types.StorageConnector{} } else { cv = *v } for _, value := range shape { var col types.StorageConnector destAddr := &col if err := awsAwsjson11_deserializeDocumentStorageConnector(&destAddr, value); err != nil { return err } col = *destAddr cv = append(cv, col) } *v = cv return nil } func awsAwsjson11_deserializeDocumentStringList(v *[]string, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.([]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var cv []string if *v == nil { cv = []string{} } else { cv = *v } for _, value := range shape { var col string if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } col = jtv } cv = append(cv, col) } *v = cv return nil } func awsAwsjson11_deserializeDocumentSubnetIdList(v *[]string, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.([]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var cv []string if *v == nil { cv = []string{} } else { cv = *v } for _, value := range shape { var col string if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } col = jtv } cv = append(cv, col) } *v = cv return nil } func awsAwsjson11_deserializeDocumentTags(v *map[string]string, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var mv map[string]string if *v == nil { mv = map[string]string{} } else { mv = *v } for key, value := range shape { var parsedVal string if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected TagValue to be of type string, got %T instead", value) } parsedVal = jtv } mv[key] = parsedVal } *v = mv return nil } func awsAwsjson11_deserializeDocumentUsageReportSubscription(v **types.UsageReportSubscription, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.UsageReportSubscription if *v == nil { sv = &types.UsageReportSubscription{} } else { sv = *v } for key, value := range shape { switch key { case "LastGeneratedReportDate": if value != nil { switch jtv := value.(type) { case json.Number: f64, err := jtv.Float64() if err != nil { return err } sv.LastGeneratedReportDate = ptr.Time(smithytime.ParseEpochSeconds(f64)) default: return fmt.Errorf("expected Timestamp to be a JSON Number, got %T instead", value) } } case "S3BucketName": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.S3BucketName = ptr.String(jtv) } case "Schedule": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected UsageReportSchedule to be of type string, got %T instead", value) } sv.Schedule = types.UsageReportSchedule(jtv) } case "SubscriptionErrors": if err := awsAwsjson11_deserializeDocumentLastReportGenerationExecutionErrors(&sv.SubscriptionErrors, value); err != nil { return err } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentUsageReportSubscriptionList(v *[]types.UsageReportSubscription, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.([]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var cv []types.UsageReportSubscription if *v == nil { cv = []types.UsageReportSubscription{} } else { cv = *v } for _, value := range shape { var col types.UsageReportSubscription destAddr := &col if err := awsAwsjson11_deserializeDocumentUsageReportSubscription(&destAddr, value); err != nil { return err } col = *destAddr cv = append(cv, col) } *v = cv return nil } func awsAwsjson11_deserializeDocumentUsbDeviceFilterStrings(v *[]string, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.([]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var cv []string if *v == nil { cv = []string{} } else { cv = *v } for _, value := range shape { var col string if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected UsbDeviceFilterString to be of type string, got %T instead", value) } col = jtv } cv = append(cv, col) } *v = cv return nil } func awsAwsjson11_deserializeDocumentUser(v **types.User, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.User if *v == nil { sv = &types.User{} } else { sv = *v } for key, value := range shape { switch key { case "Arn": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected Arn to be of type string, got %T instead", value) } sv.Arn = ptr.String(jtv) } case "AuthenticationType": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected AuthenticationType to be of type string, got %T instead", value) } sv.AuthenticationType = types.AuthenticationType(jtv) } case "CreatedTime": if value != nil { switch jtv := value.(type) { case json.Number: f64, err := jtv.Float64() if err != nil { return err } sv.CreatedTime = ptr.Time(smithytime.ParseEpochSeconds(f64)) default: return fmt.Errorf("expected Timestamp to be a JSON Number, got %T instead", value) } } case "Enabled": if value != nil { jtv, ok := value.(bool) if !ok { return fmt.Errorf("expected Boolean to be of type *bool, got %T instead", value) } sv.Enabled = jtv } case "FirstName": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected UserAttributeValue to be of type string, got %T instead", value) } sv.FirstName = ptr.String(jtv) } case "LastName": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected UserAttributeValue to be of type string, got %T instead", value) } sv.LastName = ptr.String(jtv) } case "Status": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.Status = ptr.String(jtv) } case "UserName": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected Username to be of type string, got %T instead", value) } sv.UserName = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentUserList(v *[]types.User, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.([]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var cv []types.User if *v == nil { cv = []types.User{} } else { cv = *v } for _, value := range shape { var col types.User destAddr := &col if err := awsAwsjson11_deserializeDocumentUser(&destAddr, value); err != nil { return err } col = *destAddr cv = append(cv, col) } *v = cv return nil } func awsAwsjson11_deserializeDocumentUserSetting(v **types.UserSetting, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.UserSetting if *v == nil { sv = &types.UserSetting{} } else { sv = *v } for key, value := range shape { switch key { case "Action": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected Action to be of type string, got %T instead", value) } sv.Action = types.Action(jtv) } case "Permission": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected Permission to be of type string, got %T instead", value) } sv.Permission = types.Permission(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentUserSettingList(v *[]types.UserSetting, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.([]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var cv []types.UserSetting if *v == nil { cv = []types.UserSetting{} } else { cv = *v } for _, value := range shape { var col types.UserSetting destAddr := &col if err := awsAwsjson11_deserializeDocumentUserSetting(&destAddr, value); err != nil { return err } col = *destAddr cv = append(cv, col) } *v = cv return nil } func awsAwsjson11_deserializeDocumentUserStackAssociation(v **types.UserStackAssociation, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.UserStackAssociation if *v == nil { sv = &types.UserStackAssociation{} } else { sv = *v } for key, value := range shape { switch key { case "AuthenticationType": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected AuthenticationType to be of type string, got %T instead", value) } sv.AuthenticationType = types.AuthenticationType(jtv) } case "SendEmailNotification": if value != nil { jtv, ok := value.(bool) if !ok { return fmt.Errorf("expected Boolean to be of type *bool, got %T instead", value) } sv.SendEmailNotification = jtv } case "StackName": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.StackName = ptr.String(jtv) } case "UserName": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected Username to be of type string, got %T instead", value) } sv.UserName = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentUserStackAssociationError(v **types.UserStackAssociationError, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.UserStackAssociationError if *v == nil { sv = &types.UserStackAssociationError{} } else { sv = *v } for key, value := range shape { switch key { case "ErrorCode": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected UserStackAssociationErrorCode to be of type string, got %T instead", value) } sv.ErrorCode = types.UserStackAssociationErrorCode(jtv) } case "ErrorMessage": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.ErrorMessage = ptr.String(jtv) } case "UserStackAssociation": if err := awsAwsjson11_deserializeDocumentUserStackAssociation(&sv.UserStackAssociation, value); err != nil { return err } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeDocumentUserStackAssociationErrorList(v *[]types.UserStackAssociationError, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.([]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var cv []types.UserStackAssociationError if *v == nil { cv = []types.UserStackAssociationError{} } else { cv = *v } for _, value := range shape { var col types.UserStackAssociationError destAddr := &col if err := awsAwsjson11_deserializeDocumentUserStackAssociationError(&destAddr, value); err != nil { return err } col = *destAddr cv = append(cv, col) } *v = cv return nil } func awsAwsjson11_deserializeDocumentUserStackAssociationList(v *[]types.UserStackAssociation, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.([]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var cv []types.UserStackAssociation if *v == nil { cv = []types.UserStackAssociation{} } else { cv = *v } for _, value := range shape { var col types.UserStackAssociation destAddr := &col if err := awsAwsjson11_deserializeDocumentUserStackAssociation(&destAddr, value); err != nil { return err } col = *destAddr cv = append(cv, col) } *v = cv return nil } func awsAwsjson11_deserializeDocumentVpcConfig(v **types.VpcConfig, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.VpcConfig if *v == nil { sv = &types.VpcConfig{} } else { sv = *v } for key, value := range shape { switch key { case "SecurityGroupIds": if err := awsAwsjson11_deserializeDocumentSecurityGroupIdList(&sv.SecurityGroupIds, value); err != nil { return err } case "SubnetIds": if err := awsAwsjson11_deserializeDocumentSubnetIdList(&sv.SubnetIds, value); err != nil { return err } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentAssociateApplicationFleetOutput(v **AssociateApplicationFleetOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *AssociateApplicationFleetOutput if *v == nil { sv = &AssociateApplicationFleetOutput{} } else { sv = *v } for key, value := range shape { switch key { case "ApplicationFleetAssociation": if err := awsAwsjson11_deserializeDocumentApplicationFleetAssociation(&sv.ApplicationFleetAssociation, value); err != nil { return err } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentAssociateFleetOutput(v **AssociateFleetOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *AssociateFleetOutput if *v == nil { sv = &AssociateFleetOutput{} } else { sv = *v } for key, value := range shape { switch key { default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentBatchAssociateUserStackOutput(v **BatchAssociateUserStackOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *BatchAssociateUserStackOutput if *v == nil { sv = &BatchAssociateUserStackOutput{} } else { sv = *v } for key, value := range shape { switch key { case "errors": if err := awsAwsjson11_deserializeDocumentUserStackAssociationErrorList(&sv.Errors, value); err != nil { return err } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentBatchDisassociateUserStackOutput(v **BatchDisassociateUserStackOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *BatchDisassociateUserStackOutput if *v == nil { sv = &BatchDisassociateUserStackOutput{} } else { sv = *v } for key, value := range shape { switch key { case "errors": if err := awsAwsjson11_deserializeDocumentUserStackAssociationErrorList(&sv.Errors, value); err != nil { return err } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentCopyImageOutput(v **CopyImageOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *CopyImageOutput if *v == nil { sv = &CopyImageOutput{} } else { sv = *v } for key, value := range shape { switch key { case "DestinationImageName": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected Name to be of type string, got %T instead", value) } sv.DestinationImageName = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentCreateAppBlockOutput(v **CreateAppBlockOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *CreateAppBlockOutput if *v == nil { sv = &CreateAppBlockOutput{} } else { sv = *v } for key, value := range shape { switch key { case "AppBlock": if err := awsAwsjson11_deserializeDocumentAppBlock(&sv.AppBlock, value); err != nil { return err } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentCreateApplicationOutput(v **CreateApplicationOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *CreateApplicationOutput if *v == nil { sv = &CreateApplicationOutput{} } else { sv = *v } for key, value := range shape { switch key { case "Application": if err := awsAwsjson11_deserializeDocumentApplication(&sv.Application, value); err != nil { return err } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentCreateDirectoryConfigOutput(v **CreateDirectoryConfigOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *CreateDirectoryConfigOutput if *v == nil { sv = &CreateDirectoryConfigOutput{} } else { sv = *v } for key, value := range shape { switch key { case "DirectoryConfig": if err := awsAwsjson11_deserializeDocumentDirectoryConfig(&sv.DirectoryConfig, value); err != nil { return err } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentCreateFleetOutput(v **CreateFleetOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *CreateFleetOutput if *v == nil { sv = &CreateFleetOutput{} } else { sv = *v } for key, value := range shape { switch key { case "Fleet": if err := awsAwsjson11_deserializeDocumentFleet(&sv.Fleet, value); err != nil { return err } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentCreateImageBuilderOutput(v **CreateImageBuilderOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *CreateImageBuilderOutput if *v == nil { sv = &CreateImageBuilderOutput{} } else { sv = *v } for key, value := range shape { switch key { case "ImageBuilder": if err := awsAwsjson11_deserializeDocumentImageBuilder(&sv.ImageBuilder, value); err != nil { return err } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentCreateImageBuilderStreamingURLOutput(v **CreateImageBuilderStreamingURLOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *CreateImageBuilderStreamingURLOutput if *v == nil { sv = &CreateImageBuilderStreamingURLOutput{} } else { sv = *v } for key, value := range shape { switch key { case "Expires": if value != nil { switch jtv := value.(type) { case json.Number: f64, err := jtv.Float64() if err != nil { return err } sv.Expires = ptr.Time(smithytime.ParseEpochSeconds(f64)) default: return fmt.Errorf("expected Timestamp to be a JSON Number, got %T instead", value) } } case "StreamingURL": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.StreamingURL = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentCreateStackOutput(v **CreateStackOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *CreateStackOutput if *v == nil { sv = &CreateStackOutput{} } else { sv = *v } for key, value := range shape { switch key { case "Stack": if err := awsAwsjson11_deserializeDocumentStack(&sv.Stack, value); err != nil { return err } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentCreateStreamingURLOutput(v **CreateStreamingURLOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *CreateStreamingURLOutput if *v == nil { sv = &CreateStreamingURLOutput{} } else { sv = *v } for key, value := range shape { switch key { case "Expires": if value != nil { switch jtv := value.(type) { case json.Number: f64, err := jtv.Float64() if err != nil { return err } sv.Expires = ptr.Time(smithytime.ParseEpochSeconds(f64)) default: return fmt.Errorf("expected Timestamp to be a JSON Number, got %T instead", value) } } case "StreamingURL": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.StreamingURL = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentCreateUpdatedImageOutput(v **CreateUpdatedImageOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *CreateUpdatedImageOutput if *v == nil { sv = &CreateUpdatedImageOutput{} } else { sv = *v } for key, value := range shape { switch key { case "canUpdateImage": if value != nil { jtv, ok := value.(bool) if !ok { return fmt.Errorf("expected Boolean to be of type *bool, got %T instead", value) } sv.CanUpdateImage = jtv } case "image": if err := awsAwsjson11_deserializeDocumentImage(&sv.Image, value); err != nil { return err } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentCreateUsageReportSubscriptionOutput(v **CreateUsageReportSubscriptionOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *CreateUsageReportSubscriptionOutput if *v == nil { sv = &CreateUsageReportSubscriptionOutput{} } else { sv = *v } for key, value := range shape { switch key { case "S3BucketName": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.S3BucketName = ptr.String(jtv) } case "Schedule": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected UsageReportSchedule to be of type string, got %T instead", value) } sv.Schedule = types.UsageReportSchedule(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentCreateUserOutput(v **CreateUserOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *CreateUserOutput if *v == nil { sv = &CreateUserOutput{} } else { sv = *v } for key, value := range shape { switch key { default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentDeleteAppBlockOutput(v **DeleteAppBlockOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *DeleteAppBlockOutput if *v == nil { sv = &DeleteAppBlockOutput{} } else { sv = *v } for key, value := range shape { switch key { default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentDeleteApplicationOutput(v **DeleteApplicationOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *DeleteApplicationOutput if *v == nil { sv = &DeleteApplicationOutput{} } else { sv = *v } for key, value := range shape { switch key { default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentDeleteDirectoryConfigOutput(v **DeleteDirectoryConfigOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *DeleteDirectoryConfigOutput if *v == nil { sv = &DeleteDirectoryConfigOutput{} } else { sv = *v } for key, value := range shape { switch key { default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentDeleteFleetOutput(v **DeleteFleetOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *DeleteFleetOutput if *v == nil { sv = &DeleteFleetOutput{} } else { sv = *v } for key, value := range shape { switch key { default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentDeleteImageBuilderOutput(v **DeleteImageBuilderOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *DeleteImageBuilderOutput if *v == nil { sv = &DeleteImageBuilderOutput{} } else { sv = *v } for key, value := range shape { switch key { case "ImageBuilder": if err := awsAwsjson11_deserializeDocumentImageBuilder(&sv.ImageBuilder, value); err != nil { return err } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentDeleteImageOutput(v **DeleteImageOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *DeleteImageOutput if *v == nil { sv = &DeleteImageOutput{} } else { sv = *v } for key, value := range shape { switch key { case "Image": if err := awsAwsjson11_deserializeDocumentImage(&sv.Image, value); err != nil { return err } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentDeleteImagePermissionsOutput(v **DeleteImagePermissionsOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *DeleteImagePermissionsOutput if *v == nil { sv = &DeleteImagePermissionsOutput{} } else { sv = *v } for key, value := range shape { switch key { default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentDeleteStackOutput(v **DeleteStackOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *DeleteStackOutput if *v == nil { sv = &DeleteStackOutput{} } else { sv = *v } for key, value := range shape { switch key { default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentDeleteUsageReportSubscriptionOutput(v **DeleteUsageReportSubscriptionOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *DeleteUsageReportSubscriptionOutput if *v == nil { sv = &DeleteUsageReportSubscriptionOutput{} } else { sv = *v } for key, value := range shape { switch key { default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentDeleteUserOutput(v **DeleteUserOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *DeleteUserOutput if *v == nil { sv = &DeleteUserOutput{} } else { sv = *v } for key, value := range shape { switch key { default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentDescribeAppBlocksOutput(v **DescribeAppBlocksOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *DescribeAppBlocksOutput if *v == nil { sv = &DescribeAppBlocksOutput{} } else { sv = *v } for key, value := range shape { switch key { case "AppBlocks": if err := awsAwsjson11_deserializeDocumentAppBlocks(&sv.AppBlocks, value); err != nil { return err } case "NextToken": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.NextToken = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentDescribeApplicationFleetAssociationsOutput(v **DescribeApplicationFleetAssociationsOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *DescribeApplicationFleetAssociationsOutput if *v == nil { sv = &DescribeApplicationFleetAssociationsOutput{} } else { sv = *v } for key, value := range shape { switch key { case "ApplicationFleetAssociations": if err := awsAwsjson11_deserializeDocumentApplicationFleetAssociationList(&sv.ApplicationFleetAssociations, value); err != nil { return err } case "NextToken": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.NextToken = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentDescribeApplicationsOutput(v **DescribeApplicationsOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *DescribeApplicationsOutput if *v == nil { sv = &DescribeApplicationsOutput{} } else { sv = *v } for key, value := range shape { switch key { case "Applications": if err := awsAwsjson11_deserializeDocumentApplications(&sv.Applications, value); err != nil { return err } case "NextToken": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.NextToken = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentDescribeDirectoryConfigsOutput(v **DescribeDirectoryConfigsOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *DescribeDirectoryConfigsOutput if *v == nil { sv = &DescribeDirectoryConfigsOutput{} } else { sv = *v } for key, value := range shape { switch key { case "DirectoryConfigs": if err := awsAwsjson11_deserializeDocumentDirectoryConfigList(&sv.DirectoryConfigs, value); err != nil { return err } case "NextToken": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.NextToken = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentDescribeFleetsOutput(v **DescribeFleetsOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *DescribeFleetsOutput if *v == nil { sv = &DescribeFleetsOutput{} } else { sv = *v } for key, value := range shape { switch key { case "Fleets": if err := awsAwsjson11_deserializeDocumentFleetList(&sv.Fleets, value); err != nil { return err } case "NextToken": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.NextToken = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentDescribeImageBuildersOutput(v **DescribeImageBuildersOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *DescribeImageBuildersOutput if *v == nil { sv = &DescribeImageBuildersOutput{} } else { sv = *v } for key, value := range shape { switch key { case "ImageBuilders": if err := awsAwsjson11_deserializeDocumentImageBuilderList(&sv.ImageBuilders, value); err != nil { return err } case "NextToken": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.NextToken = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentDescribeImagePermissionsOutput(v **DescribeImagePermissionsOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *DescribeImagePermissionsOutput if *v == nil { sv = &DescribeImagePermissionsOutput{} } else { sv = *v } for key, value := range shape { switch key { case "Name": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected Name to be of type string, got %T instead", value) } sv.Name = ptr.String(jtv) } case "NextToken": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.NextToken = ptr.String(jtv) } case "SharedImagePermissionsList": if err := awsAwsjson11_deserializeDocumentSharedImagePermissionsList(&sv.SharedImagePermissionsList, value); err != nil { return err } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentDescribeImagesOutput(v **DescribeImagesOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *DescribeImagesOutput if *v == nil { sv = &DescribeImagesOutput{} } else { sv = *v } for key, value := range shape { switch key { case "Images": if err := awsAwsjson11_deserializeDocumentImageList(&sv.Images, value); err != nil { return err } case "NextToken": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.NextToken = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentDescribeSessionsOutput(v **DescribeSessionsOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *DescribeSessionsOutput if *v == nil { sv = &DescribeSessionsOutput{} } else { sv = *v } for key, value := range shape { switch key { case "NextToken": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.NextToken = ptr.String(jtv) } case "Sessions": if err := awsAwsjson11_deserializeDocumentSessionList(&sv.Sessions, value); err != nil { return err } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentDescribeStacksOutput(v **DescribeStacksOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *DescribeStacksOutput if *v == nil { sv = &DescribeStacksOutput{} } else { sv = *v } for key, value := range shape { switch key { case "NextToken": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.NextToken = ptr.String(jtv) } case "Stacks": if err := awsAwsjson11_deserializeDocumentStackList(&sv.Stacks, value); err != nil { return err } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentDescribeUsageReportSubscriptionsOutput(v **DescribeUsageReportSubscriptionsOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *DescribeUsageReportSubscriptionsOutput if *v == nil { sv = &DescribeUsageReportSubscriptionsOutput{} } else { sv = *v } for key, value := range shape { switch key { case "NextToken": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.NextToken = ptr.String(jtv) } case "UsageReportSubscriptions": if err := awsAwsjson11_deserializeDocumentUsageReportSubscriptionList(&sv.UsageReportSubscriptions, value); err != nil { return err } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentDescribeUsersOutput(v **DescribeUsersOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *DescribeUsersOutput if *v == nil { sv = &DescribeUsersOutput{} } else { sv = *v } for key, value := range shape { switch key { case "NextToken": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.NextToken = ptr.String(jtv) } case "Users": if err := awsAwsjson11_deserializeDocumentUserList(&sv.Users, value); err != nil { return err } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentDescribeUserStackAssociationsOutput(v **DescribeUserStackAssociationsOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *DescribeUserStackAssociationsOutput if *v == nil { sv = &DescribeUserStackAssociationsOutput{} } else { sv = *v } for key, value := range shape { switch key { case "NextToken": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.NextToken = ptr.String(jtv) } case "UserStackAssociations": if err := awsAwsjson11_deserializeDocumentUserStackAssociationList(&sv.UserStackAssociations, value); err != nil { return err } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentDisableUserOutput(v **DisableUserOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *DisableUserOutput if *v == nil { sv = &DisableUserOutput{} } else { sv = *v } for key, value := range shape { switch key { default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentDisassociateApplicationFleetOutput(v **DisassociateApplicationFleetOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *DisassociateApplicationFleetOutput if *v == nil { sv = &DisassociateApplicationFleetOutput{} } else { sv = *v } for key, value := range shape { switch key { default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentDisassociateFleetOutput(v **DisassociateFleetOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *DisassociateFleetOutput if *v == nil { sv = &DisassociateFleetOutput{} } else { sv = *v } for key, value := range shape { switch key { default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentEnableUserOutput(v **EnableUserOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *EnableUserOutput if *v == nil { sv = &EnableUserOutput{} } else { sv = *v } for key, value := range shape { switch key { default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentExpireSessionOutput(v **ExpireSessionOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *ExpireSessionOutput if *v == nil { sv = &ExpireSessionOutput{} } else { sv = *v } for key, value := range shape { switch key { default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentListAssociatedFleetsOutput(v **ListAssociatedFleetsOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *ListAssociatedFleetsOutput if *v == nil { sv = &ListAssociatedFleetsOutput{} } else { sv = *v } for key, value := range shape { switch key { case "Names": if err := awsAwsjson11_deserializeDocumentStringList(&sv.Names, value); err != nil { return err } case "NextToken": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.NextToken = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentListAssociatedStacksOutput(v **ListAssociatedStacksOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *ListAssociatedStacksOutput if *v == nil { sv = &ListAssociatedStacksOutput{} } else { sv = *v } for key, value := range shape { switch key { case "Names": if err := awsAwsjson11_deserializeDocumentStringList(&sv.Names, value); err != nil { return err } case "NextToken": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected String to be of type string, got %T instead", value) } sv.NextToken = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentListTagsForResourceOutput(v **ListTagsForResourceOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *ListTagsForResourceOutput if *v == nil { sv = &ListTagsForResourceOutput{} } else { sv = *v } for key, value := range shape { switch key { case "Tags": if err := awsAwsjson11_deserializeDocumentTags(&sv.Tags, value); err != nil { return err } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentStartFleetOutput(v **StartFleetOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *StartFleetOutput if *v == nil { sv = &StartFleetOutput{} } else { sv = *v } for key, value := range shape { switch key { default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentStartImageBuilderOutput(v **StartImageBuilderOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *StartImageBuilderOutput if *v == nil { sv = &StartImageBuilderOutput{} } else { sv = *v } for key, value := range shape { switch key { case "ImageBuilder": if err := awsAwsjson11_deserializeDocumentImageBuilder(&sv.ImageBuilder, value); err != nil { return err } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentStopFleetOutput(v **StopFleetOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *StopFleetOutput if *v == nil { sv = &StopFleetOutput{} } else { sv = *v } for key, value := range shape { switch key { default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentStopImageBuilderOutput(v **StopImageBuilderOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *StopImageBuilderOutput if *v == nil { sv = &StopImageBuilderOutput{} } else { sv = *v } for key, value := range shape { switch key { case "ImageBuilder": if err := awsAwsjson11_deserializeDocumentImageBuilder(&sv.ImageBuilder, value); err != nil { return err } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentTagResourceOutput(v **TagResourceOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *TagResourceOutput if *v == nil { sv = &TagResourceOutput{} } else { sv = *v } for key, value := range shape { switch key { default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentUntagResourceOutput(v **UntagResourceOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *UntagResourceOutput if *v == nil { sv = &UntagResourceOutput{} } else { sv = *v } for key, value := range shape { switch key { default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentUpdateApplicationOutput(v **UpdateApplicationOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *UpdateApplicationOutput if *v == nil { sv = &UpdateApplicationOutput{} } else { sv = *v } for key, value := range shape { switch key { case "Application": if err := awsAwsjson11_deserializeDocumentApplication(&sv.Application, value); err != nil { return err } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentUpdateDirectoryConfigOutput(v **UpdateDirectoryConfigOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *UpdateDirectoryConfigOutput if *v == nil { sv = &UpdateDirectoryConfigOutput{} } else { sv = *v } for key, value := range shape { switch key { case "DirectoryConfig": if err := awsAwsjson11_deserializeDocumentDirectoryConfig(&sv.DirectoryConfig, value); err != nil { return err } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentUpdateFleetOutput(v **UpdateFleetOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *UpdateFleetOutput if *v == nil { sv = &UpdateFleetOutput{} } else { sv = *v } for key, value := range shape { switch key { case "Fleet": if err := awsAwsjson11_deserializeDocumentFleet(&sv.Fleet, value); err != nil { return err } default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentUpdateImagePermissionsOutput(v **UpdateImagePermissionsOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *UpdateImagePermissionsOutput if *v == nil { sv = &UpdateImagePermissionsOutput{} } else { sv = *v } for key, value := range shape { switch key { default: _, _ = key, value } } *v = sv return nil } func awsAwsjson11_deserializeOpDocumentUpdateStackOutput(v **UpdateStackOutput, value interface{}) error { if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *UpdateStackOutput if *v == nil { sv = &UpdateStackOutput{} } else { sv = *v } for key, value := range shape { switch key { case "Stack": if err := awsAwsjson11_deserializeDocumentStack(&sv.Stack, value); err != nil { return err } default: _, _ = key, value } } *v = sv return nil }
{ if v == nil { return fmt.Errorf("unexpected nil of type %T", v) } if value == nil { return nil } shape, ok := value.(map[string]interface{}) if !ok { return fmt.Errorf("unexpected JSON type %v", value) } var sv *types.InvalidParameterCombinationException if *v == nil { sv = &types.InvalidParameterCombinationException{} } else { sv = *v } for key, value := range shape { switch key { case "Message": if value != nil { jtv, ok := value.(string) if !ok { return fmt.Errorf("expected ErrorMessage to be of type string, got %T instead", value) } sv.Message = ptr.String(jtv) } default: _, _ = key, value } } *v = sv return nil }
error_meta.rs
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT. #[non_exhaustive] #[derive(std::fmt::Debug)] pub enum Error { AccessDeniedException(crate::error::AccessDeniedException), ConflictException(crate::error::ConflictException), InternalServerException(crate::error::InternalServerException), RequestAlreadyInProgressException(crate::error::RequestAlreadyInProgressException), ResourceNotFoundException(crate::error::ResourceNotFoundException), ServiceQuotaExceededException(crate::error::ServiceQuotaExceededException), ThrottlingException(crate::error::ThrottlingException), ValidationException(crate::error::ValidationException), Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>), } impl std::fmt::Display for Error { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Error::AccessDeniedException(inner) => inner.fmt(f), Error::ConflictException(inner) => inner.fmt(f), Error::InternalServerException(inner) => inner.fmt(f), Error::RequestAlreadyInProgressException(inner) => inner.fmt(f), Error::ResourceNotFoundException(inner) => inner.fmt(f), Error::ServiceQuotaExceededException(inner) => inner.fmt(f), Error::ThrottlingException(inner) => inner.fmt(f), Error::ValidationException(inner) => inner.fmt(f), Error::Unhandled(inner) => inner.fmt(f), } } } impl<R> From< smithy_http::result::SdkError< crate::error::BatchAssociateClientDeviceWithCoreDeviceError, R, >, > for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from( err: smithy_http::result::SdkError< crate::error::BatchAssociateClientDeviceWithCoreDeviceError, R, >, ) -> Self { match err { smithy_http::result::SdkError::ServiceError { err, ..} => match err.kind { crate::error::BatchAssociateClientDeviceWithCoreDeviceErrorKind::AccessDeniedException(inner) => Error::AccessDeniedException(inner), crate::error::BatchAssociateClientDeviceWithCoreDeviceErrorKind::InternalServerException(inner) => Error::InternalServerException(inner), crate::error::BatchAssociateClientDeviceWithCoreDeviceErrorKind::ResourceNotFoundException(inner) => Error::ResourceNotFoundException(inner), crate::error::BatchAssociateClientDeviceWithCoreDeviceErrorKind::ThrottlingException(inner) => Error::ThrottlingException(inner), crate::error::BatchAssociateClientDeviceWithCoreDeviceErrorKind::ValidationException(inner) => Error::ValidationException(inner), crate::error::BatchAssociateClientDeviceWithCoreDeviceErrorKind::Unhandled(inner) => Error::Unhandled(inner), } _ => Error::Unhandled(err.into()), } } } impl<R> From< smithy_http::result::SdkError< crate::error::BatchDisassociateClientDeviceFromCoreDeviceError, R, >, > for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from( err: smithy_http::result::SdkError< crate::error::BatchDisassociateClientDeviceFromCoreDeviceError, R, >, ) -> Self { match err { smithy_http::result::SdkError::ServiceError { err, ..} => match err.kind { crate::error::BatchDisassociateClientDeviceFromCoreDeviceErrorKind::AccessDeniedException(inner) => Error::AccessDeniedException(inner), crate::error::BatchDisassociateClientDeviceFromCoreDeviceErrorKind::InternalServerException(inner) => Error::InternalServerException(inner), crate::error::BatchDisassociateClientDeviceFromCoreDeviceErrorKind::ResourceNotFoundException(inner) => Error::ResourceNotFoundException(inner), crate::error::BatchDisassociateClientDeviceFromCoreDeviceErrorKind::ThrottlingException(inner) => Error::ThrottlingException(inner), crate::error::BatchDisassociateClientDeviceFromCoreDeviceErrorKind::ValidationException(inner) => Error::ValidationException(inner), crate::error::BatchDisassociateClientDeviceFromCoreDeviceErrorKind::Unhandled(inner) => Error::Unhandled(inner), } _ => Error::Unhandled(err.into()), } } } impl<R> From<smithy_http::result::SdkError<crate::error::CancelDeploymentError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from(err: smithy_http::result::SdkError<crate::error::CancelDeploymentError, R>) -> Self { match err { smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::CancelDeploymentErrorKind::AccessDeniedException(inner) => { Error::AccessDeniedException(inner) } crate::error::CancelDeploymentErrorKind::ConflictException(inner) => { Error::ConflictException(inner) } crate::error::CancelDeploymentErrorKind::InternalServerException(inner) => { Error::InternalServerException(inner) } crate::error::CancelDeploymentErrorKind::ResourceNotFoundException(inner) => { Error::ResourceNotFoundException(inner) } crate::error::CancelDeploymentErrorKind::ThrottlingException(inner) => { Error::ThrottlingException(inner) } crate::error::CancelDeploymentErrorKind::ValidationException(inner) => { Error::ValidationException(inner) } crate::error::CancelDeploymentErrorKind::Unhandled(inner) => { Error::Unhandled(inner) } }, _ => Error::Unhandled(err.into()), } } } impl<R> From<smithy_http::result::SdkError<crate::error::CreateComponentVersionError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from( err: smithy_http::result::SdkError<crate::error::CreateComponentVersionError, R>, ) -> Self { match err { smithy_http::result::SdkError::ServiceError { err, ..} => match err.kind { crate::error::CreateComponentVersionErrorKind::AccessDeniedException(inner) => Error::AccessDeniedException(inner), crate::error::CreateComponentVersionErrorKind::ConflictException(inner) => Error::ConflictException(inner), crate::error::CreateComponentVersionErrorKind::InternalServerException(inner) => Error::InternalServerException(inner), crate::error::CreateComponentVersionErrorKind::RequestAlreadyInProgressException(inner) => Error::RequestAlreadyInProgressException(inner), crate::error::CreateComponentVersionErrorKind::ServiceQuotaExceededException(inner) => Error::ServiceQuotaExceededException(inner), crate::error::CreateComponentVersionErrorKind::ThrottlingException(inner) => Error::ThrottlingException(inner), crate::error::CreateComponentVersionErrorKind::ValidationException(inner) => Error::ValidationException(inner), crate::error::CreateComponentVersionErrorKind::Unhandled(inner) => Error::Unhandled(inner), } _ => Error::Unhandled(err.into()), } } } impl<R> From<smithy_http::result::SdkError<crate::error::CreateDeploymentError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from(err: smithy_http::result::SdkError<crate::error::CreateDeploymentError, R>) -> Self { match err { smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::CreateDeploymentErrorKind::AccessDeniedException(inner) => { Error::AccessDeniedException(inner) } crate::error::CreateDeploymentErrorKind::InternalServerException(inner) => { Error::InternalServerException(inner) } crate::error::CreateDeploymentErrorKind::RequestAlreadyInProgressException( inner, ) => Error::RequestAlreadyInProgressException(inner), crate::error::CreateDeploymentErrorKind::ResourceNotFoundException(inner) => { Error::ResourceNotFoundException(inner) } crate::error::CreateDeploymentErrorKind::ThrottlingException(inner) => { Error::ThrottlingException(inner) } crate::error::CreateDeploymentErrorKind::ValidationException(inner) => { Error::ValidationException(inner) } crate::error::CreateDeploymentErrorKind::Unhandled(inner) => { Error::Unhandled(inner) } }, _ => Error::Unhandled(err.into()), } } } impl<R> From<smithy_http::result::SdkError<crate::error::DeleteComponentError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from(err: smithy_http::result::SdkError<crate::error::DeleteComponentError, R>) -> Self { match err { smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::DeleteComponentErrorKind::AccessDeniedException(inner) => { Error::AccessDeniedException(inner) } crate::error::DeleteComponentErrorKind::ConflictException(inner) => { Error::ConflictException(inner) } crate::error::DeleteComponentErrorKind::InternalServerException(inner) => { Error::InternalServerException(inner) } crate::error::DeleteComponentErrorKind::ResourceNotFoundException(inner) => { Error::ResourceNotFoundException(inner) } crate::error::DeleteComponentErrorKind::ThrottlingException(inner) => { Error::ThrottlingException(inner) } crate::error::DeleteComponentErrorKind::ValidationException(inner) => { Error::ValidationException(inner) } crate::error::DeleteComponentErrorKind::Unhandled(inner) => Error::Unhandled(inner), }, _ => Error::Unhandled(err.into()), } } } impl<R> From<smithy_http::result::SdkError<crate::error::DeleteCoreDeviceError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from(err: smithy_http::result::SdkError<crate::error::DeleteCoreDeviceError, R>) -> Self { match err { smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::DeleteCoreDeviceErrorKind::AccessDeniedException(inner) => { Error::AccessDeniedException(inner) } crate::error::DeleteCoreDeviceErrorKind::ConflictException(inner) => { Error::ConflictException(inner) } crate::error::DeleteCoreDeviceErrorKind::InternalServerException(inner) => { Error::InternalServerException(inner) } crate::error::DeleteCoreDeviceErrorKind::ResourceNotFoundException(inner) => { Error::ResourceNotFoundException(inner) } crate::error::DeleteCoreDeviceErrorKind::ThrottlingException(inner) => { Error::ThrottlingException(inner) } crate::error::DeleteCoreDeviceErrorKind::ValidationException(inner) => { Error::ValidationException(inner) } crate::error::DeleteCoreDeviceErrorKind::Unhandled(inner) => { Error::Unhandled(inner) } }, _ => Error::Unhandled(err.into()), } } } impl<R> From<smithy_http::result::SdkError<crate::error::DescribeComponentError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from(err: smithy_http::result::SdkError<crate::error::DescribeComponentError, R>) -> Self { match err { smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::DescribeComponentErrorKind::AccessDeniedException(inner) => { Error::AccessDeniedException(inner) } crate::error::DescribeComponentErrorKind::InternalServerException(inner) => { Error::InternalServerException(inner) } crate::error::DescribeComponentErrorKind::ResourceNotFoundException(inner) => { Error::ResourceNotFoundException(inner) } crate::error::DescribeComponentErrorKind::ThrottlingException(inner) => { Error::ThrottlingException(inner) } crate::error::DescribeComponentErrorKind::ValidationException(inner) => { Error::ValidationException(inner) } crate::error::DescribeComponentErrorKind::Unhandled(inner) => { Error::Unhandled(inner) } }, _ => Error::Unhandled(err.into()), } } } impl<R> From<smithy_http::result::SdkError<crate::error::GetComponentError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from(err: smithy_http::result::SdkError<crate::error::GetComponentError, R>) -> Self { match err { smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::GetComponentErrorKind::AccessDeniedException(inner) => { Error::AccessDeniedException(inner) } crate::error::GetComponentErrorKind::InternalServerException(inner) => { Error::InternalServerException(inner) } crate::error::GetComponentErrorKind::ResourceNotFoundException(inner) => { Error::ResourceNotFoundException(inner) } crate::error::GetComponentErrorKind::ThrottlingException(inner) => { Error::ThrottlingException(inner) } crate::error::GetComponentErrorKind::ValidationException(inner) => { Error::ValidationException(inner) } crate::error::GetComponentErrorKind::Unhandled(inner) => Error::Unhandled(inner), }, _ => Error::Unhandled(err.into()), } } } impl<R> From<smithy_http::result::SdkError<crate::error::GetComponentVersionArtifactError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from( err: smithy_http::result::SdkError<crate::error::GetComponentVersionArtifactError, R>, ) -> Self { match err { smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::GetComponentVersionArtifactErrorKind::AccessDeniedException( inner, ) => Error::AccessDeniedException(inner), crate::error::GetComponentVersionArtifactErrorKind::InternalServerException( inner, ) => Error::InternalServerException(inner), crate::error::GetComponentVersionArtifactErrorKind::ResourceNotFoundException( inner, ) => Error::ResourceNotFoundException(inner), crate::error::GetComponentVersionArtifactErrorKind::ThrottlingException(inner) => { Error::ThrottlingException(inner) } crate::error::GetComponentVersionArtifactErrorKind::ValidationException(inner) => { Error::ValidationException(inner) } crate::error::GetComponentVersionArtifactErrorKind::Unhandled(inner) => { Error::Unhandled(inner) } }, _ => Error::Unhandled(err.into()), } } } impl<R> From<smithy_http::result::SdkError<crate::error::GetCoreDeviceError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from(err: smithy_http::result::SdkError<crate::error::GetCoreDeviceError, R>) -> Self { match err { smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::GetCoreDeviceErrorKind::AccessDeniedException(inner) => { Error::AccessDeniedException(inner) } crate::error::GetCoreDeviceErrorKind::InternalServerException(inner) => { Error::InternalServerException(inner) } crate::error::GetCoreDeviceErrorKind::ResourceNotFoundException(inner) => { Error::ResourceNotFoundException(inner) } crate::error::GetCoreDeviceErrorKind::ThrottlingException(inner) => { Error::ThrottlingException(inner) } crate::error::GetCoreDeviceErrorKind::ValidationException(inner) => { Error::ValidationException(inner) } crate::error::GetCoreDeviceErrorKind::Unhandled(inner) => Error::Unhandled(inner), }, _ => Error::Unhandled(err.into()), } } } impl<R> From<smithy_http::result::SdkError<crate::error::GetDeploymentError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from(err: smithy_http::result::SdkError<crate::error::GetDeploymentError, R>) -> Self { match err { smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::GetDeploymentErrorKind::AccessDeniedException(inner) => { Error::AccessDeniedException(inner) } crate::error::GetDeploymentErrorKind::InternalServerException(inner) => { Error::InternalServerException(inner) } crate::error::GetDeploymentErrorKind::ResourceNotFoundException(inner) => { Error::ResourceNotFoundException(inner) } crate::error::GetDeploymentErrorKind::ThrottlingException(inner) => { Error::ThrottlingException(inner) } crate::error::GetDeploymentErrorKind::ValidationException(inner) => { Error::ValidationException(inner) } crate::error::GetDeploymentErrorKind::Unhandled(inner) => Error::Unhandled(inner), }, _ => Error::Unhandled(err.into()), } } } impl<R> From< smithy_http::result::SdkError< crate::error::ListClientDevicesAssociatedWithCoreDeviceError, R, >, > for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from( err: smithy_http::result::SdkError< crate::error::ListClientDevicesAssociatedWithCoreDeviceError, R, >, ) -> Self { match err { smithy_http::result::SdkError::ServiceError { err, ..} => match err.kind { crate::error::ListClientDevicesAssociatedWithCoreDeviceErrorKind::AccessDeniedException(inner) => Error::AccessDeniedException(inner), crate::error::ListClientDevicesAssociatedWithCoreDeviceErrorKind::InternalServerException(inner) => Error::InternalServerException(inner), crate::error::ListClientDevicesAssociatedWithCoreDeviceErrorKind::ResourceNotFoundException(inner) => Error::ResourceNotFoundException(inner), crate::error::ListClientDevicesAssociatedWithCoreDeviceErrorKind::ThrottlingException(inner) => Error::ThrottlingException(inner), crate::error::ListClientDevicesAssociatedWithCoreDeviceErrorKind::ValidationException(inner) => Error::ValidationException(inner), crate::error::ListClientDevicesAssociatedWithCoreDeviceErrorKind::Unhandled(inner) => Error::Unhandled(inner), } _ => Error::Unhandled(err.into()), } } } impl<R> From<smithy_http::result::SdkError<crate::error::ListComponentsError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from(err: smithy_http::result::SdkError<crate::error::ListComponentsError, R>) -> Self { match err { smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::ListComponentsErrorKind::AccessDeniedException(inner) => { Error::AccessDeniedException(inner) } crate::error::ListComponentsErrorKind::InternalServerException(inner) => { Error::InternalServerException(inner) } crate::error::ListComponentsErrorKind::ThrottlingException(inner) => { Error::ThrottlingException(inner) } crate::error::ListComponentsErrorKind::ValidationException(inner) => { Error::ValidationException(inner) } crate::error::ListComponentsErrorKind::Unhandled(inner) => Error::Unhandled(inner), }, _ => Error::Unhandled(err.into()), } } } impl<R> From<smithy_http::result::SdkError<crate::error::ListComponentVersionsError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from( err: smithy_http::result::SdkError<crate::error::ListComponentVersionsError, R>, ) -> Self { match err { smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::ListComponentVersionsErrorKind::AccessDeniedException(inner) => { Error::AccessDeniedException(inner) } crate::error::ListComponentVersionsErrorKind::InternalServerException(inner) => { Error::InternalServerException(inner) } crate::error::ListComponentVersionsErrorKind::ResourceNotFoundException(inner) => { Error::ResourceNotFoundException(inner) } crate::error::ListComponentVersionsErrorKind::ThrottlingException(inner) => { Error::ThrottlingException(inner) } crate::error::ListComponentVersionsErrorKind::ValidationException(inner) => { Error::ValidationException(inner) } crate::error::ListComponentVersionsErrorKind::Unhandled(inner) => { Error::Unhandled(inner) } }, _ => Error::Unhandled(err.into()), } } } impl<R> From<smithy_http::result::SdkError<crate::error::ListCoreDevicesError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from(err: smithy_http::result::SdkError<crate::error::ListCoreDevicesError, R>) -> Self { match err { smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::ListCoreDevicesErrorKind::AccessDeniedException(inner) => { Error::AccessDeniedException(inner) } crate::error::ListCoreDevicesErrorKind::InternalServerException(inner) => { Error::InternalServerException(inner) } crate::error::ListCoreDevicesErrorKind::ThrottlingException(inner) => { Error::ThrottlingException(inner) } crate::error::ListCoreDevicesErrorKind::ValidationException(inner) => { Error::ValidationException(inner) } crate::error::ListCoreDevicesErrorKind::Unhandled(inner) => Error::Unhandled(inner), }, _ => Error::Unhandled(err.into()), } } } impl<R> From<smithy_http::result::SdkError<crate::error::ListDeploymentsError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from(err: smithy_http::result::SdkError<crate::error::ListDeploymentsError, R>) -> Self { match err { smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::ListDeploymentsErrorKind::AccessDeniedException(inner) => { Error::AccessDeniedException(inner) } crate::error::ListDeploymentsErrorKind::InternalServerException(inner) => { Error::InternalServerException(inner) } crate::error::ListDeploymentsErrorKind::ThrottlingException(inner) => { Error::ThrottlingException(inner) } crate::error::ListDeploymentsErrorKind::ValidationException(inner) => { Error::ValidationException(inner) } crate::error::ListDeploymentsErrorKind::Unhandled(inner) => Error::Unhandled(inner), }, _ => Error::Unhandled(err.into()), } } } impl<R> From<smithy_http::result::SdkError<crate::error::ListEffectiveDeploymentsError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from( err: smithy_http::result::SdkError<crate::error::ListEffectiveDeploymentsError, R>, ) -> Self { match err { smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::ListEffectiveDeploymentsErrorKind::AccessDeniedException(inner) => { Error::AccessDeniedException(inner) } crate::error::ListEffectiveDeploymentsErrorKind::InternalServerException(inner) => { Error::InternalServerException(inner) } crate::error::ListEffectiveDeploymentsErrorKind::ResourceNotFoundException( inner, ) => Error::ResourceNotFoundException(inner), crate::error::ListEffectiveDeploymentsErrorKind::ThrottlingException(inner) => { Error::ThrottlingException(inner) } crate::error::ListEffectiveDeploymentsErrorKind::ValidationException(inner) => { Error::ValidationException(inner) } crate::error::ListEffectiveDeploymentsErrorKind::Unhandled(inner) => { Error::Unhandled(inner) } }, _ => Error::Unhandled(err.into()), } } } impl<R> From<smithy_http::result::SdkError<crate::error::ListInstalledComponentsError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from( err: smithy_http::result::SdkError<crate::error::ListInstalledComponentsError, R>, ) -> Self { match err { smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::ListInstalledComponentsErrorKind::AccessDeniedException(inner) => { Error::AccessDeniedException(inner) } crate::error::ListInstalledComponentsErrorKind::InternalServerException(inner) => { Error::InternalServerException(inner) } crate::error::ListInstalledComponentsErrorKind::ResourceNotFoundException( inner, ) => Error::ResourceNotFoundException(inner), crate::error::ListInstalledComponentsErrorKind::ThrottlingException(inner) => { Error::ThrottlingException(inner) } crate::error::ListInstalledComponentsErrorKind::ValidationException(inner) => { Error::ValidationException(inner) } crate::error::ListInstalledComponentsErrorKind::Unhandled(inner) => { Error::Unhandled(inner) } }, _ => Error::Unhandled(err.into()), } } } impl<R> From<smithy_http::result::SdkError<crate::error::ListTagsForResourceError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from(err: smithy_http::result::SdkError<crate::error::ListTagsForResourceError, R>) -> Self { match err { smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::ListTagsForResourceErrorKind::InternalServerException(inner) => { Error::InternalServerException(inner) } crate::error::ListTagsForResourceErrorKind::ResourceNotFoundException(inner) => { Error::ResourceNotFoundException(inner) } crate::error::ListTagsForResourceErrorKind::ValidationException(inner) => { Error::ValidationException(inner) } crate::error::ListTagsForResourceErrorKind::Unhandled(inner) => { Error::Unhandled(inner) } }, _ => Error::Unhandled(err.into()), } } } impl<R> From<smithy_http::result::SdkError<crate::error::ResolveComponentCandidatesError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from( err: smithy_http::result::SdkError<crate::error::ResolveComponentCandidatesError, R>, ) -> Self { match err { smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::ResolveComponentCandidatesErrorKind::AccessDeniedException(inner) => { Error::AccessDeniedException(inner) } crate::error::ResolveComponentCandidatesErrorKind::ConflictException(inner) => { Error::ConflictException(inner) } crate::error::ResolveComponentCandidatesErrorKind::InternalServerException( inner, ) => Error::InternalServerException(inner), crate::error::ResolveComponentCandidatesErrorKind::ResourceNotFoundException( inner, ) => Error::ResourceNotFoundException(inner), crate::error::ResolveComponentCandidatesErrorKind::ThrottlingException(inner) => { Error::ThrottlingException(inner) } crate::error::ResolveComponentCandidatesErrorKind::ValidationException(inner) => { Error::ValidationException(inner) } crate::error::ResolveComponentCandidatesErrorKind::Unhandled(inner) => { Error::Unhandled(inner) } }, _ => Error::Unhandled(err.into()), } } } impl<R> From<smithy_http::result::SdkError<crate::error::TagResourceError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from(err: smithy_http::result::SdkError<crate::error::TagResourceError, R>) -> Self { match err { smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::TagResourceErrorKind::InternalServerException(inner) => { Error::InternalServerException(inner) } crate::error::TagResourceErrorKind::ResourceNotFoundException(inner) => { Error::ResourceNotFoundException(inner) } crate::error::TagResourceErrorKind::ValidationException(inner) => { Error::ValidationException(inner) } crate::error::TagResourceErrorKind::Unhandled(inner) => Error::Unhandled(inner), }, _ => Error::Unhandled(err.into()), } } } impl<R> From<smithy_http::result::SdkError<crate::error::UntagResourceError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from(err: smithy_http::result::SdkError<crate::error::UntagResourceError, R>) -> Self { match err { smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::UntagResourceErrorKind::InternalServerException(inner) =>
crate::error::UntagResourceErrorKind::ResourceNotFoundException(inner) => { Error::ResourceNotFoundException(inner) } crate::error::UntagResourceErrorKind::ValidationException(inner) => { Error::ValidationException(inner) } crate::error::UntagResourceErrorKind::Unhandled(inner) => Error::Unhandled(inner), }, _ => Error::Unhandled(err.into()), } } } impl std::error::Error for Error {}
{ Error::InternalServerException(inner) }
footer.tsx
// <copyright file="footer.tsx" company="Microsoft"> // Copyright (c) Microsoft. All rights reserved. // </copyright> import * as React from "react"; import { Text, Flex, Button } from "@fluentui/react"; import "../styles/theme.css"; interface IAwardBadgeFooterProps { resourceStrings: any, onBackClick: (event: any) => void, onPreviewClick: (event: any) => void, submitAwardBadge: (event: any) => void, isPreviewBadge: boolean, errorMessage: string | null, isAwardBadgeLoading: boolean } const AwardBadgeFooter: React.FunctionComponent<IAwardBadgeFooterProps> = props => { const onBackClick = (event: any): void => { props.onBackClick(event); } const onPreviewClick = (event: any): void => { props.onPreviewClick(event); } const submitAwardBadge = (event: any): void => { props.submitAwardBadge(event);
} return ( <div className="footer"> <Flex gap="gap.small"> {props.isPreviewBadge === false && props.errorMessage !== null && <Text styles={{ marginLeft:"1rem" }} content={props.errorMessage} error />} </Flex> <Flex gap="gap.small"> <Button icon="icon-chevron-start" text content="Back" onClick={onBackClick} styles={{ marginLeft:"-1rem" }} /> <Flex.Item push> {props.isPreviewBadge === false ? <Button content={props.resourceStrings.Preview} onClick={onPreviewClick} /> : <span></span>} </Flex.Item> <Button loading={props.isAwardBadgeLoading} disabled={props.isAwardBadgeLoading} primary content={props.resourceStrings.Award} onClick={submitAwardBadge} /> </Flex> </div> ); } export default AwardBadgeFooter;
lib.rs
use automatica::Tf; pub mod pid_controller; // \frac{G}{1 + G} = \frac{a}{b}{1 + \frac{a}{b}} = \frac{a}{a + b} pub fn feedback_loop(system: &Tf<f64>) -> Tf<f64> { Tf::new(system.num().clone(), system.num() + system.den()) } #[cfg(test)] mod tests { #[test] fn it_works()
}
{ assert_eq!(2 + 2, 4); }
map.771438e0.js
(function(t){function e(e){for(var a,n,i=e[0],l=e[1],c=e[2],p=0,d=[];p<i.length;p++)n=i[p],Object.prototype.hasOwnProperty.call(r,n)&&r[n]&&d.push(r[n][0]),r[n]=0;for(a in l)Object.prototype.hasOwnProperty.call(l,a)&&(t[a]=l[a]);u&&u(e);while(d.length)d.shift()();return o.push.apply(o,c||[]),s()}function s(){for(var t,e=0;e<o.length;e++){for(var s=o[e],a=!0,i=1;i<s.length;i++){var l=s[i];0!==r[l]&&(a=!1)}a&&(o.splice(e--,1),t=n(n.s=s[0]))}return t}var a={},r={map:0},o=[];function n(e){if(a[e])return a[e].exports;var s=a[e]={i:e,l:!1,exports:{}};return t[e].call(s.exports,s,s.exports,n),s.l=!0,s.exports}n.m=t,n.c=a,n.d=function(t,e,s){n.o(t,e)||Object.defineProperty(t,e,{enumerable:!0,get:s})},n.r=function(t){"undefined"!==typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(t,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(t,"__esModule",{value:!0})},n.t=function(t,e){if(1&e&&(t=n(t)),8&e)return t;if(4&e&&"object"===typeof t&&t&&t.__esModule)return t;var s=Object.create(null);if(n.r(s),Object.defineProperty(s,"default",{enumerable:!0,value:t}),2&e&&"string"!=typeof t)for(var a in t)n.d(s,a,function(e){return t[e]}.bind(null,a));return s},n.n=function(t){var e=t&&t.__esModule?function(){return t["default"]}:function(){return t};return n.d(e,"a",e),e},n.o=function(t,e){return Object.prototype.hasOwnProperty.call(t,e)},n.p="/static/dist/";var i=window["webpackJsonp"]=window["webpackJsonp"]||[],l=i.push.bind(i);i.push=e,i=i.slice();for(var c=0;c<i.length;c++)e(i[c]);var u=l;o.push([1,"chunk-common"]),s()})({1:function(t,e,s){t.exports=s("53d0")},"53d0":function(t,e,s){"use strict";s.r(e);var a=s("a4b5"),r=function(){var t=this,e=t.$createElement,s=t._self._c||e;return s("div",{staticClass:"map-section m-0 pt-0 bg-gradient"},[s("div",{staticClass:"d-block d-md-none d-lg-none d-xl-none bg-gradient small-screen-map-section pb-5"},[s("div",{staticClass:"row pt-5 top-numbers-row"},[s("div",{staticClass:"col-4 offset-2 p-0"},[s("p",{staticClass:"bignumber"},[t._v(t._s((t.total_cases/1e6).toFixed(1)))])]),t._m(0)]),t._m(1),s("div",{staticClass:"row"},[s("div",{staticClass:"col-4 offset-2 p-0"},[s("div",{staticClass:"sixtwentyseven"},[t._v(t._s(t.total_reporters))])]),s("div",{staticClass:"col-4 p-0"},[s("div",{staticClass:"fortym pl-3"},[t._v(t._s((t.total_pages/1e6).toFixed(0))+"M")])])]),t._m(2)]),s("div",{staticClass:"d-none d-md-block d-lg-block d-xl-block pt-5"},[s("div",{staticClass:"row top-section-row"},[s("div",{staticClass:"col-1 text-right p-3 d-none d-lg-block"},[s("img",{attrs:{"aria-hidden":"true",src:t.urls.static+"img/white-arrow-right.svg"}})]),t._m(3)]),s("div",{staticClass:"row content-row"},[s("div",{staticClass:"col-3 offset-md-1 pr-3"},[s("div",{staticClass:"boxcontainer text-white state-numbers-boxcontainer mt-2 mb-4"},[s("div",{staticClass:"boxcontainer-body bg-black p-3 pt-2 pb-2"},[s("h5",{staticClass:"boxcontainer-title pb-1 jur_name"},[t._v(" "+t._s(t.hoveredSlug?t.jurNameForSlug(t.hoveredSlug):"State and Federal Totals")+" ")]),s("div",{staticClass:"number-set mt-3 p-0"},[s("p",{staticClass:"boxcontainer-text figure mb-0 num_cases"},[t._v(t._s(t.caseCount()))]),s("p",{staticClass:"boxcontainer-text label"},[t._v("Unique cases")])]),s("div",{staticClass:"number-set d-lg-block d-md-none p-0"},[s("p",{staticClass:"boxcontainer-text figure mb-0 num_reporters"},[t._v(t._s(t.reporterCount()))]),s("p",{staticClass:"boxcontainer-text label"},[t._v("Reporters")])]),s("div",{staticClass:"number-set p-0"},[s("p",{staticClass:"boxcontainer-text figure mb-0 num_pages"},[t._v(t._s(t.pageCount()))]),s("p",{staticClass:"boxcontainer-text label"},[t._v("Pages scanned")])]),s("div",{staticClass:"sr-only",attrs:{"aria-live":"polite"}},[t._v(" "+t._s(t.caseCount())+" cases. "+t._s(t.reporterCount())+" reporters. "+t._s(t.pageCount())+" pages. ")])]),s("div",{staticClass:"boxcontainer-body bg-black p-3 pt-2"},[s("h5",{staticClass:"boxcontainer-title pb-1 federal"},[t._v("Federal Totals")]),s("div",{staticClass:"number-set"},[s("p",{staticClass:"boxcontainer-text figure mb-0"},[t._v(t._s(t.federal_cases.toLocaleString()))]),s("p",{staticClass:"boxcontainer-text label"},[t._v("Unique cases")])]),s("div",{staticClass:"number-set d-lg-block d-md-none"},[s("p",{staticClass:"boxcontainer-text figure mb-0"},[t._v(t._s(t.federal_reporters.toLocaleString()))]),s("p",{staticClass:"boxcontainer-text label"},[t._v("Reporters")])]),s("div",{staticClass:"number-set"},[s("p",{staticClass:"boxcontainer-text figure mb-0"},[t._v(t._s(t.federal_pages.toLocaleString()))]),s("p",{staticClass:"boxcontainer-text label"},[t._v("Pages scanned")])])])])]),s("div",{staticClass:"col-7"},[s("div",{staticClass:"map bg-transparent pl-5"},[s("a",{staticClass:"skip",attrs:{href:"#section-dive-in"}},[t._v("Skip map")]),s("USMap",{attrs:{tabindex:""},on:{mouseover:t.mapMouseover,mouseleave:t.mapMouseleave,focusin:t.mapMouseover}})],1)])])])])},o=[function(){var t=this,e=t.$createElement,s=t._self._c||e;return s("div",{staticClass:"col-4 pt-4"},[s("div",{staticClass:"pt-2 pt-sm-4 vseparator"},[s("span",{staticClass:"million"},[t._v("Million")]),s("br"),s("span",{staticClass:"uniquecases"},[t._v(" Unique cases ")])])])},function(){var t=this,e=t.$createElement,s=t._self._c||e;return s("div",{staticClass:"row p-0 mb-3 mb-sm-4"},[s("div",{staticClass:"col-8 offset-2 p-0 mt-0 mb-0 vseparator"})])},function(){var t=this,e=t.$createElement,s=t._self._c||e;return s("div",{staticClass:"row"},[s("div",{staticClass:"col-4 offset-2 p-0"},[s("div",{staticClass:"bottom-text bottom-text-border pl-1 pt-4"},[t._v("Reporters")])]),s("div",{staticClass:"col-6 p-0"},[s("div",{staticClass:"bottom-text pt-4 pl-2"},[t._v("Pages scanned")])])])},function(){var t=this,e=t.$createElement,s=t._self._c||e;return s("div",{staticClass:"col-4 offset-1 offset-lg-0"},[s("h2",{staticClass:"section-title p-0"},[t._v(" Our data ")])])}],n=(s("cd24"),s("32ec"),s("cbcf"),s("57d2")),i=s("68d6"),l=s("376f"),c={components:{USMap:l["a"]},name:"Main",beforeMount:function(){this.urls=urls,this.jurData=jurData,this.total_cases=0,this.total_reporters=0,this.total_pages=0;for(var t=0,e=Object.entries(this.jurData);t<e.length;t++){var s=Object(i["a"])(e[t],2),a=s[0],r=s[1];this.total_cases+=r["case_count"],this.total_reporters+=r["reporter_count"],this.total_pages+=r["page_count"],"us"===a&&(this.federal_cases=r["case_count"],this.federal_reporters=r["reporter_count"],this.federal_pages=r["page_count"])}},mounted:function(){var t,e=Object(n["a"])(document.getElementsByClassName("state-link"));try{for(e.s();!(t=e.n()).done;){var s=t.value;s.setAttribute("href",this.urls.jurisdiction.replace("JURISDICTION",s.id))}}catch(a){e.e(a)}finally{e.f()}},data:function(){return{hoveredSlug:null}},methods:{mapMouseover:function(t){var e=t.target;e.classList.contains("state")?this.hoveredSlug=e.parentElement.id:e.classList.contains("state-link")?this.hoveredSlug=e.id:this.hoveredSlug=null},mapMouseleave:function(){this.hoveredSlug=null},jurNameForSlug:function(t){return document.getElementById(t).ariaLabel},caseCount:function(){return(this.hoveredSlug?this.jurData[this.hoveredSlug].case_count:this.total_cases).toLocaleString()},reporterCount:function(){return(this.hoveredSlug?this.jurData[this.hoveredSlug].reporter_count:this.total_reporters).toLocaleString()},pageCount:function(){return(this.hoveredSlug?this.jurData[this.hoveredSlug].page_count:this.total_pages).toLocaleString()}}},u=c,p=s("a6c2"),d=Object(p["a"])(u,r,o,!1,null,null,null),v=d.exports;new a["default"]({el:"#app",components:{Main:v},template:"<Main/>"})}});
//# sourceMappingURL=map.771438e0.js.map
query.go
package cli import ( "context" "strings" "github.com/spf13/cobra" "github.com/cosmos/cosmos-sdk/client" "github.com/cosmos/cosmos-sdk/client/flags" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/cosmos-sdk/x/slashing/types" ) // GetQueryCmd returns the cli query commands for this module func GetQueryCmd() *cobra.Command { // Group slashing queries under a subcommand slashingQueryCmd := &cobra.Command{ Use: types.ModuleName, Short: "Querying commands for the slashing module", DisableFlagParsing: true, SuggestionsMinimumDistance: 2, RunE: client.ValidateCmd, } slashingQueryCmd.AddCommand( GetCmdQuerySigningInfo(), GetCmdQueryParams(), GetCmdQuerySigningInfos(), ) return slashingQueryCmd } // GetCmdQuerySigningInfo implements the command to query signing info. func GetCmdQuerySigningInfo() *cobra.Command { cmd := &cobra.Command{ Use: "signing-info [validator-conspub]", Short: "Query a validator's signing information", Long: strings.TrimSpace(`Use a validators' consensus public key to find the signing-info for that validator: $ <appd> query slashing signing-info cosmosvalconspub1zcjduepqfhvwcmt7p06fvdgexxhmz0l8c7sgswl7ulv7aulk364x4g5xsw7sr0k2g5 `), Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { clientCtx := client.GetClientContextFromCmd(cmd) clientCtx, err := client.ReadQueryCommandFlags(clientCtx, cmd.Flags()) if err != nil { return err } queryClient := types.NewQueryClient(clientCtx) pk, err := sdk.GetPubKeyFromBech32(sdk.Bech32PubKeyTypeConsPub, args[0]) if err != nil { return err } consAddr := sdk.ConsAddress(pk.Address()) params := &types.QuerySigningInfoRequest{ConsAddress: consAddr.String()} res, err := queryClient.SigningInfo(context.Background(), params) if err != nil { return err } return clientCtx.PrintOutput(&res.ValSigningInfo) }, } flags.AddQueryFlagsToCmd(cmd) return cmd } // GetCmdQuerySigningInfos implements the command to query signing infos. func GetCmdQuerySigningInfos() *cobra.Command { cmd := &cobra.Command{ Use: "signing-infos", Short: "Query signing information of all validators", Long: strings.TrimSpace(`signing infos of validators: $ <appd> query slashing signing-infos `), Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { clientCtx := client.GetClientContextFromCmd(cmd) clientCtx, err := client.ReadQueryCommandFlags(clientCtx, cmd.Flags()) if err != nil { return err } queryClient := types.NewQueryClient(clientCtx)
pageReq, err := client.ReadPageRequest(cmd.Flags()) if err != nil { return err } params := &types.QuerySigningInfosRequest{Pagination: pageReq} res, err := queryClient.SigningInfos(context.Background(), params) if err != nil { return err } return clientCtx.PrintOutput(res) }, } flags.AddQueryFlagsToCmd(cmd) flags.AddPaginationFlagsToCmd(cmd, "signing infos") return cmd } // GetCmdQueryParams implements a command to fetch slashing parameters. func GetCmdQueryParams() *cobra.Command { cmd := &cobra.Command{ Use: "params", Short: "Query the current slashing parameters", Args: cobra.NoArgs, Long: strings.TrimSpace(`Query genesis parameters for the slashing module: $ <appd> query slashing params `), RunE: func(cmd *cobra.Command, args []string) error { clientCtx := client.GetClientContextFromCmd(cmd) clientCtx, err := client.ReadQueryCommandFlags(clientCtx, cmd.Flags()) if err != nil { return err } queryClient := types.NewQueryClient(clientCtx) params := &types.QueryParamsRequest{} res, err := queryClient.Params(context.Background(), params) if err != nil { return err } return clientCtx.PrintOutput(&res.Params) }, } flags.AddQueryFlagsToCmd(cmd) return cmd }
core.py
#!/bin/python import importlib import logging import os import sys import argparse import shutil from clint.textui import colored try: from alacrity import lib except ImportError: lib = importlib.import_module('lib', '../alacrity') def
(): """ Entry point for the package, alacrity.exe in win and alacrity in linux :return: None """ # Start the process try: from alacrity import version except ImportError: version = importlib.import_module('version', '../alacrity') # Get version information from version.py v = version.version() parser = argparse.ArgumentParser(description="Alacrity : " "Quickstart your Python " "package from a terminal") parser.add_argument('--make', action='store_true', help="Rebuild " "persistence") parser.add_argument('--debug', action='store_true', help="Display verbose " "debug messages") parser.add_argument('--version', action="version", version=v) parser.add_argument('package_name') args = parser.parse_args() if args.make: lib.rebuild_persistence() if not args.package_name: logging.error(" package_name is a required argument") sys.exit() # Initialize logging depending on debug mode if args.debug: logging.basicConfig(level=logging.DEBUG) else: logging.basicConfig(level=logging.CRITICAL) # Initialise status dictionary status = { 'structure_created': False, 'gitignore_created': False, 'setup_created': False, 'license_created': False, 'manifest_created': False, 'readme_created': False, 'requirements_created': False, 'tests_created': False, 'git_initialized': False, 'venv_created': False, 'sphinx_created': False } try: try: package_name = args.package_name # Check if the package already exists logging.debug("[-] Checking if the package already exists") check_is_file = os.path.isfile( "{0}/{0}/__init__.py".format(package_name)) # Check for clean_make if os.path.isdir(package_name) or check_is_file: logging.debug("[-] Package already exists, " "launching clean make prompt") print(colored.red("[!] A package by that name already exists, " "destroy and clean make? (y/n) : "), end="") choice = input() logging.debug("[-] Choice prompt input : {}".format(choice)) if choice == 'y': logging.debug("[-] Removing existing package") lib.remove_package(package_name) elif choice == 'n': logging.debug("[-] Clean make cancelled") print(colored.red("[!] Please pick a different package " "name, aborting.")) sys.exit() else: logging.error(colored.red(" Invalid choice")) print(colored.red("[!] Invalid choice, aborting")) sys.exit() # Create the initial structure logging.debug("[-] Creating package structure") lib.create_package_structure(package_name, status) # Create starter files logging.debug("[-] Creating starter files in package") author, version = lib.create_starter_files(package_name, status) # Create tests directory logging.debug("[-] Creating tests package in structure") lib.create_tests_package(package_name, status) # Initialize git if required and available logging.debug("[-] Launching git init submodule") lib.git_init(package_name, status) # Initialize venv if required and available logging.debug("[-] Launching venv init submodule") lib.venv_init(package_name, status) # Initialize sphinx docs if required and available logging.debug("[-] Launching sphinx init submodule") lib.sphinx_init(package_name, author, version, status) logging.debug("[-] Launching status reporter submodule") lib.report_status(status) print(colored.green("[|]")) print(colored.green("[*] Package {} was created " "successfully.".format(package_name))) except EOFError: # Catch error thrown by clint.main print(colored.yellow("\n[!] Ctrl+C : Aborting package creation.")) sys.exit() except KeyboardInterrupt: print(colored.yellow("\n[!] Ctrl+C : Aborting package creation.")) # Rollback changes if os.path.isdir(args.package_name): logging.debug("[-] Rolling back committed changes, deleting files") shutil.rmtree(args.package_name) logging.debug("[-] alacrity:ROOT :: quiting") sys.exit() if __name__ == '__main__': main()
main
bot.go
package bot import ( "encoding/xml" tgbotapi "github.com/Syfaro/telegram-bot-api" "io/ioutil" "log" "os" ) const ( configPath = "./docker_compose/src/bot/config.xml" updateTimeout = 60 ) type BotInterface interface { Run() } type Bot struct { telegramID string API *tgbotapi.BotAPI CMD Commands } func NewBot() *Bot { newBot := Bot{} newBot.loadID() newBot.setAPI() newBot.setCmd() return &newBot } func (bot *Bot) loadID() { var telegramID string xmlFile, err := os.Open(configPath) if err != nil { panic(err) } defer xmlFile.Close() byteValue, err := ioutil.ReadAll(xmlFile) if err != nil { panic(err) } xml.Unmarshal(byteValue, &telegramID) bot.telegramID = telegramID
api, err := tgbotapi.NewBotAPI(bot.telegramID) if err != nil { panic(err) } api.Debug = false bot.API = api } func (bot *Bot) setCmd() { bot.CMD = InitCommands() } func (bot *Bot) Run() { bot.greet() bot.monitorMessageCycle() } func (bot *Bot) greet() { api := bot.API log.Printf("Авторизован под аккаунтом %s!", api.Self.UserName) } func (bot *Bot) monitorMessageCycle() { updates := bot.getUpdateChannel() for update := range updates { bot.handleUpdate(&update) } } func (bot *Bot) getUpdateChannel() tgbotapi.UpdatesChannel { api := bot.API update := tgbotapi.NewUpdate(0) update.Timeout = updateTimeout updateChannel, err := api.GetUpdatesChan(update) if err != nil { panic(err) } return updateChannel } func (bot *Bot) handleUpdate(update *tgbotapi.Update) { switch { case bot.updateIsCommand(update): bot.handleCommand(update) bot.logCommand(update) } }
} func (bot *Bot) setAPI() {
partial-specialization-and-inheritance.rs
/* automatically generated by rust-bindgen */ #![allow(dead_code, non_snake_case, non_camel_case_types, non_upper_case_globals)] #[repr(C)] #[derive(Debug, Default, Copy, Clone)] pub struct Base { pub _address: u8, } #[repr(C)] #[derive(Debug, Default, Copy, Clone)] pub struct
{ pub b: bool, } #[repr(C)] #[derive(Debug, Default, Copy, Clone)] pub struct Usage { pub _address: u8, } extern "C" { #[link_name = "\u{1}_ZN5Usage13static_memberE"] pub static mut Usage_static_member: [u32; 2usize]; } #[test] fn bindgen_test_layout_Usage() { assert_eq!( ::std::mem::size_of::<Usage>(), 1usize, concat!("Size of: ", stringify!(Usage)) ); assert_eq!( ::std::mem::align_of::<Usage>(), 1usize, concat!("Alignment of ", stringify!(Usage)) ); } extern "C" { #[link_name = "\u{1}_ZN5UsageC1Ev"] pub fn Usage_Usage(this: *mut Usage); } impl Usage { #[inline] pub unsafe fn new() -> Self { let mut __bindgen_tmp = ::std::mem::uninitialized(); Usage_Usage(&mut __bindgen_tmp); __bindgen_tmp } }
Derived
workflows.py
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2012 NEC Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging from django.conf import settings import netaddr from django.conf import settings from django.core.urlresolvers import reverse # noqa from django.utils.translation import ugettext_lazy as _ # noqa from horizon import exceptions from horizon import forms from horizon import messages from horizon.utils import fields from horizon import workflows from openstack_dashboard import api LOG = logging.getLogger(__name__) class CreateNetworkInfoAction(workflows.Action): net_name = forms.CharField(max_length=255, label=_("Network Name"), required=False) if api.neutron.is_port_profiles_supported(): net_profile_id = forms.ChoiceField(label=_("Network Profile")) admin_state = forms.BooleanField(label=_("Admin State"), initial=True, required=False) if api.neutron.is_port_profiles_supported(): def __init__(self, request, *args, **kwargs): super(CreateNetworkInfoAction, self).__init__(request, *args, **kwargs) self.fields['net_profile_id'].choices = ( self.get_network_profile_choices(request)) def get_network_profile_choices(self, request): profile_choices = [('', _("Select a profile"))] for profile in self._get_profiles(request, 'network'): profile_choices.append((profile.id, profile.name)) return profile_choices def _get_profiles(self, request, type_p): try: profiles = api.neutron.profile_list(request, type_p) except Exception: profiles = [] msg = _('Network Profiles could not be retrieved.') exceptions.handle(request, msg) return profiles # TODO(absubram): Add ability to view network profile information # in the network detail if a profile is used. class Meta: name = _("Network") help_text = _("From here you can create a new network.\n" "In addition a subnet associated with the network " "can be created in the next panel.") class CreateNetworkInfo(workflows.Step): action_class = CreateNetworkInfoAction if api.neutron.is_port_profiles_supported(): contributes = ("net_name", "admin_state", "net_profile_id") else: contributes = ("net_name", "admin_state") class CreateSubnetInfoAction(workflows.Action): _ccs_enable_ipv6 = getattr(settings, 'OPENSTACK_NEUTRON_NETWORK', {}).get('enable_ipv6', False) if _ccs_enable_ipv6: ip_version_choices = [(4, 'IPv4'), (6, 'IPv6')] ip_version_fields = fields.IPv4 | fields.IPv6 else: ip_version_choices = [(4, 'IPv4')] ip_version_fields = fields.IPv4 with_subnet = forms.BooleanField(label=_("Create Subnet"), initial=True, required=False) subnet_name = forms.CharField(max_length=255, label=_("Subnet Name"), required=False) cidr = fields.IPField(label=_("Network Address"), required=False, initial="", help_text=_("Network address in CIDR format " "(e.g. 192.168.0.0/24)"), version=ip_version_fields, mask=True) ip_version = forms.ChoiceField(choices=ip_version_choices, label=_("IP Version")) gateway_ip = fields.IPField( label=_("Gateway IP"), required=False, initial="", help_text=_("IP address of Gateway (e.g. 192.168.0.254) " "The default value is the first IP of the " "network address (e.g. 192.168.0.1 for " "192.168.0.0/24). " "If you use the default, leave blank. " "If you want to use no gateway, " "check 'Disable Gateway' below."), version=ip_version_fields, mask=False) no_gateway = forms.BooleanField(label=_("Disable Gateway"), initial=False, required=False) class Meta: name = _("Subnet") help_text = _('You can create a subnet associated with the new ' 'network, in which case "Network Address" must be ' 'specified. If you wish to create a network WITHOUT a ' 'subnet, uncheck the "Create Subnet" checkbox.') def __init__(self, request, context, *args, **kwargs): super(CreateSubnetInfoAction, self).__init__(request, context, *args, **kwargs) if not getattr(settings, 'OPENSTACK_NEUTRON_NETWORK', {}).get('enable_ipv6', True): self.fields['ip_version'].widget = forms.HiddenInput() self.fields['ip_version'].initial = 4 def _check_subnet_data(self, cleaned_data, is_create=True): cidr = cleaned_data.get('cidr') ip_version = int(cleaned_data.get('ip_version')) gateway_ip = cleaned_data.get('gateway_ip') no_gateway = cleaned_data.get('no_gateway') if not cidr: msg = _('Specify "Network Address" or ' 'clear "Create Subnet" checkbox.') raise forms.ValidationError(msg) if cidr: subnet = netaddr.IPNetwork(cidr) if subnet.version != ip_version: msg = _('Network Address and IP version are inconsistent.') raise forms.ValidationError(msg) if (ip_version == 4 and subnet.prefixlen == 32) or \ (ip_version == 6 and subnet.prefixlen == 128): msg = _("The subnet in the Network Address is too small (/%s)." % subnet.prefixlen) raise forms.ValidationError(msg) if not no_gateway and gateway_ip: if netaddr.IPAddress(gateway_ip).version is not ip_version: msg = _('Gateway IP and IP version are inconsistent.') raise forms.ValidationError(msg) if not is_create and not no_gateway and not gateway_ip: msg = _('Specify IP address of gateway or ' 'check "Disable Gateway".') raise forms.ValidationError(msg) def clean(self): cleaned_data = super(CreateSubnetInfoAction, self).clean() with_subnet = cleaned_data.get('with_subnet') if not with_subnet: return cleaned_data self._check_subnet_data(cleaned_data) return cleaned_data class CreateSubnetInfo(workflows.Step): action_class = CreateSubnetInfoAction contributes = ("with_subnet", "subnet_name", "cidr", "ip_version", "gateway_ip", "no_gateway") class CreateSubnetDetailAction(workflows.Action): enable_dhcp = forms.BooleanField(label=_("Enable DHCP"), initial=True, required=False) allocation_pools = forms.CharField( widget=forms.Textarea(), label=_("Allocation Pools"), help_text=_("IP address allocation pools. Each entry is " "&lt;start_ip_address&gt;,&lt;end_ip_address&gt; " "(e.g., 192.168.1.100,192.168.1.120) " "and one entry per line."), required=False) dns_nameservers = forms.CharField( widget=forms.widgets.Textarea(), label=_("DNS Name Servers"), help_text=_("IP address list of DNS name servers for this subnet. " "One entry per line."), required=False) host_routes = forms.CharField( widget=forms.widgets.Textarea(), label=_("Host Routes"), help_text=_("Additional routes announced to the hosts. " "Each entry is &lt;destination_cidr&gt;,&lt;nexthop&gt; " "(e.g., 192.168.200.0/24,10.56.1.254) " "and one entry per line."), required=False) class Meta: name = _("Subnet Detail") help_text = _('You can specify additional attributes for the subnet.') def _convert_ip_address(self, ip, field_name): try: return netaddr.IPAddress(ip) except (netaddr.AddrFormatError, ValueError): msg = _('%(field_name)s: Invalid IP address ' '(value=%(ip)s)' % dict( field_name=field_name, ip=ip)) raise forms.ValidationError(msg) def _convert_ip_network(self, network, field_name): try: return netaddr.IPNetwork(network) except (netaddr.AddrFormatError, ValueError): msg = _('%(field_name)s: Invalid IP address ' '(value=%(network)s)' % dict( field_name=field_name, network=network)) raise forms.ValidationError(msg) def _check_allocation_pools(self, allocation_pools): for p in allocation_pools.split('\n'): p = p.strip() if not p: continue pool = p.split(',') if len(pool) != 2: msg = _('Start and end addresses must be specified ' '(value=%s)') % p raise forms.ValidationError(msg) start, end = [self._convert_ip_address(ip, "allocation_pools") for ip in pool] if start > end: msg = _('Start address is larger than end address ' '(value=%s)') % p raise forms.ValidationError(msg) def _check_dns_nameservers(self, dns_nameservers): for ns in dns_nameservers.split('\n'): ns = ns.strip() if not ns: continue self._convert_ip_address(ns, "dns_nameservers") def _check_host_routes(self, host_routes): for r in host_routes.split('\n'): r = r.strip() if not r: continue route = r.split(',') if len(route) != 2: msg = _('Host Routes format error: ' 'Destination CIDR and nexthop must be specified ' '(value=%s)') % r raise forms.ValidationError(msg) self._convert_ip_network(route[0], "host_routes") self._convert_ip_address(route[1], "host_routes") def clean(self): cleaned_data = super(CreateSubnetDetailAction, self).clean() self._check_allocation_pools(cleaned_data.get('allocation_pools')) self._check_host_routes(cleaned_data.get('host_routes')) self._check_dns_nameservers(cleaned_data.get('dns_nameservers')) return cleaned_data class CreateSubnetDetail(workflows.Step): action_class = CreateSubnetDetailAction contributes = ("enable_dhcp", "allocation_pools", "dns_nameservers", "host_routes") class CreateNetwork(workflows.Workflow): slug = "create_network" name = _("Create Network") finalize_button_name = _("Create") success_message = _('Created network "%s".') failure_message = _('Unable to create network "%s".') default_steps = (CreateNetworkInfo, CreateSubnetInfo, CreateSubnetDetail) def get_success_url(self): return reverse("horizon:project:networks:index") def get_failure_url(self): return reverse("horizon:project:networks:index") def format_status_message(self, message): name = self.context.get('net_name') or self.context.get('net_id', '') return message % name def _create_network(self, request, data): try: params = {'name': data['net_name'], 'admin_state_up': data['admin_state']} if api.neutron.is_port_profiles_supported(): params['net_profile_id'] = data['net_profile_id'] network = api.neutron.network_create(request, **params) network.set_id_as_name_if_empty() self.context['net_id'] = network.id msg = _('Network "%s" was successfully created.') % network.name LOG.debug(msg) return network except Exception as e: msg = (_('Failed to create network "%(network)s": %(reason)s') % {"network": data['net_name'], "reason": e}) LOG.info(msg) redirect = self.get_failure_url() exceptions.handle(request, msg, redirect=redirect) return False def _setup_subnet_parameters(self, params, data, is_create=True): """Setup subnet parameters This methods setups subnet parameters which are available in both create and update. """ is_update = not is_create params['enable_dhcp'] = data['enable_dhcp'] if is_create and data['allocation_pools']: pools = [dict(zip(['start', 'end'], pool.strip().split(','))) for pool in data['allocation_pools'].split('\n') if pool.strip()] params['allocation_pools'] = pools if data['host_routes'] or is_update: routes = [dict(zip(['destination', 'nexthop'], route.strip().split(','))) for route in data['host_routes'].split('\n') if route.strip()] params['host_routes'] = routes if data['dns_nameservers'] or is_update: nameservers = [ns.strip() for ns in data['dns_nameservers'].split('\n') if ns.strip()] params['dns_nameservers'] = nameservers def _create_subnet(self, request, data, network=None, tenant_id=None, no_redirect=False): if network: network_id = network.id network_name = network.name else: network_id = self.context.get('network_id') network_name = self.context.get('network_name') try: params = {'network_id': network_id, 'name': data['subnet_name'], 'cidr': data['cidr'], 'ip_version': int(data['ip_version'])} if tenant_id: params['tenant_id'] = tenant_id if data['no_gateway']: params['gateway_ip'] = None elif data['gateway_ip']: params['gateway_ip'] = data['gateway_ip'] self._setup_subnet_parameters(params, data) subnet = api.neutron.subnet_create(request, **params) self.context['subnet_id'] = subnet.id msg = _('Subnet "%s" was successfully created.') % data['cidr'] LOG.debug(msg) return subnet except Exception as e: msg = _('Failed to create subnet "%(sub)s" for network "%(net)s": ' ' %(reason)s') if no_redirect: redirect = None else: redirect = self.get_failure_url() exceptions.handle(request, msg % {"sub": data['cidr'], "net": network_name, "reason": e}, redirect=redirect) return False def _delete_network(self, request, network): """Delete the created network when subnet creation failed""" try: api.neutron.network_delete(request, network.id) msg = _('Delete the created network "%s" ' 'due to subnet creation failure.') % network.name LOG.debug(msg) redirect = self.get_failure_url() messages.info(request, msg) raise exceptions.Http302(redirect) #return exceptions.RecoverableError except Exception: msg = _('Failed to delete network "%s"') % network.name LOG.info(msg) redirect = self.get_failure_url() exceptions.handle(request, msg, redirect=redirect) def
(self, request, data): network = self._create_network(request, data) if not network: return False # If we do not need to create a subnet, return here. if not data['with_subnet']: return True subnet = self._create_subnet(request, data, network, no_redirect=True) if subnet: return True else: self._delete_network(request, network) return False
handle
drafthub_extras.py
import requests import re from django import template from django.utils.safestring import mark_safe import markdown as _markdown import bleach from pymdownx import emoji from drafthub.draft.utils import get_data_from_url markdown_kwargs = { 'extensions':[ 'pymdownx.superfences', 'markdown.extensions.tables', 'pymdownx.betterem', 'pymdownx.tilde', 'pymdownx.emoji', 'pymdownx.tasklist', 'pymdownx.magiclink', 'pymdownx.arithmatex', ], 'extension_configs':{ 'pymdownx.tilde': { 'subscript': False }, 'pymdownx.emoji':{ 'emoji_index': emoji.gemoji, 'emoji_generator': emoji.to_png, 'alt': 'short', 'options': { 'attributes': { 'align': 'absmiddle', 'height': '20px', 'width': '20px' }, } }, 'pymdownx.arithmatex':{ 'generic': True, } }
'tags': [ 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'b', 'i', 'strong', 'em', 'tt', 'del', 'p', 'br', 'span', 'div', 'blockquote', 'code', 'hr', 'pre', 'ul', 'ol', 'li', 'dd', 'dt', 'dl', 'img', 'a', 'sub', 'sup', 'table', 'thead','td', 'tr', 'th', 'tbody', 'input', # allow only type, checked and disabled ], 'attributes':{ '*': lambda *_: 1, } } register = template.Library() @register.filter @mark_safe def markdown(github_url): url = github_url data = get_data_from_url(url) raw = data['raw'] login = data['login'] repo = data['repo'] parent = data['parent'] markdown_kwargs['extension_configs']['pymdownx.magiclink'] = { 'repo_url_shortener': True, 'repo_url_shorthand': True, 'social_url_shorthand': True, 'provider': 'github', 'user': login, 'repo': repo, } url_response = requests.get(raw) unsafe_content = url_response.text re_links = '\[(.*)\]\((?!https?:\/\/|#)(.+)\)' match_links = re.compile(re_links) content_transform = match_links.sub( r'[\1](' + parent + r'\2)', unsafe_content) markdown_content = _markdown.markdown(content_transform, **markdown_kwargs) sanitized_content = bleach.clean(markdown_content, **bleach_kwargs) return sanitized_content @register.filter @mark_safe def plaintext_markdown(text): markdown_content = _markdown.markdown(text, **markdown_kwargs) sanitized_content = bleach.clean(markdown_content, **bleach_kwargs) return sanitized_content @register.filter def count_range(n): return range(1,n+1) @register.filter def in_queryset(blog, queryset): return blog in queryset @register.filter def get_model_name(queryset): return queryset[0]._meta.model_name @register.filter def timesince_format(value): value_str = value.split(',')[0] if value_str: value_str = value_str + ' ago' return value_str @register.filter def js_bool(value): return str(bool(value)).lower()
} bleach_kwargs = {
earlywarning.go
package dto import ( "time" "go-admin/app/admin/models" "go-admin/common/dto" common "go-admin/common/models" ) type EarlywarningGetPageReq struct { dto.Pagination `search:"-"` Timestamp time.Time `form:"timestamp" search:"type:exact;column:timestamp;table:earlywarning" comment:"时间戳"` Imagepath string `form:"imagepath" search:"type:exact;column:imagepath;table:earlywarning" comment:"图片地址"` Result string `form:"result" search:"type:exact;column:result;table:earlywarning" comment:"预警类型"` EarlywarningOrder } type EarlywarningOrder struct {Id int `form:"idOrder" search:"type:order;column:id;table:earlywarning"` Timestamp time.Time `form:"timestampOrder" search:"type:order;column:timestamp;table:earlywarning"` Imagepath string `form:"imagepathOrder" search:"type:order;column:imagepath;table:earlywarning"` Result string `form:"resultOrder" search:"type:order;column:result;table:earlywarning"` CreatedAt time.Time `form:"createdAtOrder" search:"type:order;column:created_at;table:earlywarning"` UpdatedAt time.Time `form:"updatedAtOrder" search:"type:order;column:updated_at;table:earlywarning"` DeletedAt time.Time `form:"deletedAtOrder" search:"type:order;column:deleted_at;table:earlywarning"` CreateBy string `form:"createByOrder" search:"type:order;column:create_by;table:earlywarning"` UpdateBy string `form:"updateByOrder" search:"type:order;column:update_by;table:earlywarning"` } func (m *EarlywarningGetPageReq) GetNeedSearch() interface{} { return *m } type EarlywarningInsertReq struct { Id int `json:"-" comment:"编码"` // 编码 Timestamp time.Time `json:"timestamp" comment:"时间戳"` Imagepath string `json:"imagepath" comment:"图片地址"` Result string `json:"result" comment:"预警类型"` common.ControlBy } func (s *EarlywarningInsertReq) Generate(model *models.Earlywarning) { if s.Id == 0 { model.Model = common.Model{ Id: s.Id } } model.Timestamp = s.Timestamp model.Imagepath = s.Imagepath model.Result = s.Result model.CreateBy = s.CreateBy // 添加这而,需要记录是被谁创建的 } func (s *EarlywarningInsertReq) GetId() interface{} { return s.Id } type EarlywarningUpdateReq struct { Id int `uri:"id" comment:"编码"` // 编码 Timestamp time.Time `json:"timestamp" comment:"时间戳"` Imagepath string `json:"imagepath" comment:"图片地址"` Result string `json:"result" comment:"预警类型"` common.ControlBy } func (s *EarlywarningUpdateReq) Generate(model *models.Earlywarning) { if s.Id == 0 { model.Model = common.Model{ Id: s.Id } } model.Timestamp = s.Timestamp model.Imagepath = s
eBy = s.UpdateBy // 添加这而,需要记录是被谁更新的 } func (s *EarlywarningUpdateReq) GetId() interface{} { return s.Id } // EarlywarningGetReq 功能获取请求参数 type EarlywarningGetReq struct { Id int `uri:"id"` } func (s *EarlywarningGetReq) GetId() interface{} { return s.Id } // EarlywarningDeleteReq 功能删除请求参数 type EarlywarningDeleteReq struct { Ids []int `json:"ids"` } func (s *EarlywarningDeleteReq) GetId() interface{} { return s.Ids }
.Imagepath model.Result = s.Result model.Updat
licenses.py
# Copyright 2014 ARM Limited # # Licensed under the Apache License, Version 2.0 # See LICENSE file for details. # standard library modules, , , from __future__ import print_function from collections import defaultdict import logging # validate, , validate things, internal from yotta.lib import validate def addOptions(parser): parser.add_argument('--all', '-a', dest='list_all', default=False, action='store_true', help='List all licenses, not just each unique license.' ) def execCommand(args, following_args):
c = validate.currentDirectoryModule() if not c: return 1 if not args.target: logging.error('No target has been set, use "yotta target" to set one.') return 1 target, errors = c.satisfyTarget(args.target) if errors: for error in errors: logging.error(error) return 1 dependencies = c.getDependenciesRecursive( target = target, available_components = [(c.getName(), c)] ) errors = [] if args.list_all: for name, dep in dependencies.items(): if not dep: errors.append(u'%s is missing: license unknown!' % name) else: print(u'%s: %s' % (name, u', '.join(dep.licenses()))) else: licenses = defaultdict(list) for name, dep in dependencies.items(): if not dep: errors.append(u'%s is missing: license unknown!' % name) else: for lic in dep.licenses(): licenses[lic].append(name) for lic in licenses: print(lic) if len(errors): for err in errors: logging.error(err) return 1 return 0
unit_test.py
from __future__ import division from leapp.libraries.actor.library import (MIN_AVAIL_BYTES_FOR_BOOT, check_avail_space_on_boot, inhibit_upgrade) from leapp import reporting from leapp.libraries.common.testutils import create_report_mocked class fake_get_avail_bytes_on_boot(object): def __init__(self, size): self.size = size def __call__(self, *args): return self.size def
(monkeypatch): monkeypatch.setattr(reporting, 'create_report', create_report_mocked()) # Test 0 bytes available /boot get_avail_bytes_on_boot = fake_get_avail_bytes_on_boot(0) check_avail_space_on_boot(get_avail_bytes_on_boot) # Test 0.1 MiB less then required in /boot get_avail_bytes_on_boot = fake_get_avail_bytes_on_boot(MIN_AVAIL_BYTES_FOR_BOOT - 0.1 * 2**20) check_avail_space_on_boot(get_avail_bytes_on_boot) assert reporting.create_report.called == 2 def test_enough_space_available(monkeypatch): monkeypatch.setattr(reporting, 'create_report', create_report_mocked()) get_avail_bytes_on_boot = fake_get_avail_bytes_on_boot(MIN_AVAIL_BYTES_FOR_BOOT) check_avail_space_on_boot(get_avail_bytes_on_boot) assert reporting.create_report.called == 0 def test_inhibit_upgrade(monkeypatch): monkeypatch.setattr(reporting, 'create_report', create_report_mocked()) # Test 4.2 MiB available on /boot bytes_available = 4.2 * 2**20 inhibit_upgrade(bytes_available) assert reporting.create_report.called == 1 assert 'inhibitor' in reporting.create_report.report_fields['flags'] mib_needed = (MIN_AVAIL_BYTES_FOR_BOOT - bytes_available) / 2**20 assert "needs additional {0} MiB".format(mib_needed) in reporting.create_report.report_fields['summary']
test_not_enough_space_available
LightStrings.py
"""Defines basic light string data and functions.""" import os import sys import atexit import inspect import time import logging from typing import Any, Optional, Sequence, Union, overload from nptyping import NDArray import numpy as np from LightBerries.LightBerryExceptions import LightStringException from LightBerries.RpiWS281xPatch import rpi_ws281x from LightBerries.LightPixels import Pixel, PixelColors LOGGER = logging.getLogger("LightBerries") class LightString(Sequence[np.int_]): """Defines basic LED array data and functions.""" def __init__( self, ledCount: Optional[int] = None, pixelStrip: rpi_ws281x.PixelStrip = None, simulate: bool = False, ) -> None: """Creates a pixel array using the rpipixelStrip library and Pixels. Args: ledCount: the number of LEDs desired in the LightString pixelStrip: the ws281x object that actually controls the LED signaling simulate: dont use GPIO Raises: Warning: if something unexpected could happen SystemExit: if exiting KeyboardInterrupt: if user quits LightStringException: if something bad happens """ # cant run GPIO stuff without root, tell the user if they forgot # linux check is just for debugging with fake GPIO on windows if sys.platform == "linux" and not os.getuid() == 0: # pylint: disable = no-member raise LightStringException( "GPIO functionality requires root privilege. Please run command again as root" ) # catch error cases first if ledCount is None and pixelStrip is None and simulate is False: raise LightStringException( "Cannot create LightString object without ledCount or " + "pixelStrip object being specified" ) # catch error cases first # if ledCount is not None and pixelStrip is not None: # raise Warning( # "ledCount is overridden when pixelStrip is and ledcount " # + "are both passed to LightString constructor" # ) try: self.simulate = simulate # use passed led count if it is valid if ledCount is not None: self._ledCount = ledCount # used passed pixel strip if it is not none if pixelStrip is not None: self.pixelStrip = pixelStrip self.pixelStrip.begin() self._ledCount = self.pixelStrip.numPixels() LOGGER.debug( "%s.%s Created WS281X object", self.__class__.__name__, inspect.stack()[0][3], ) except SystemExit: # pylint:disable=try-except-raise raise except KeyboardInterrupt: # pylint:disable=try-except-raise raise except Exception as ex: LOGGER.exception( "%s.%s Exception: %s", self.__class__.__name__, inspect.stack()[0][3], ex, ) raise LightStringException(str(ex)).with_traceback(ex.__traceback__) try: # validate led count if not isinstance(self._ledCount, int): raise LightStringException( f'Cannot create LightString object with LED count "{self._ledCount}"', ) # if led count is good, create our pixel sequence self.rgbArray: NDArray[(3, Any), np.int32] = np.zeros((self._ledCount, 3)) self.rgbArray[:] = np.array([Pixel().array for i in range(self._ledCount)]) LOGGER.debug( "%s.%s Created Numpy Light array", self.__class__.__name__, inspect.stack()[0][3], ) except SystemExit: # pylint:disable=try-except-raise raise except KeyboardInterrupt: # pylint:disable=try-except-raise raise except Exception as ex: LOGGER.exception( "%s.%s Exception: %s", self.__class__.__name__, inspect.stack()[0][3], ex, ) raise LightStringException(str(ex)).with_traceback(ex.__traceback__) # try to force cleanup of underlying c objects when user exits atexit.register(self.__del__) def __del__( self, ) -> None: """Properly disposes of the rpipixelStrip object. Prevents memory leaks (hopefully) that were happening in the rpi.PixelStrip module. Raises: SystemExit: if exiting KeyboardInterrupt: if user quits LightStringException: if something bad happens """ # check if pixel strip has been created if isinstance(self.pixelStrip, rpi_ws281x.PixelStrip): # turn off leds self.off() # cleanup c memory usage try: self.pixelStrip._cleanup() except SystemExit: # pylint:disable=try-except-raise raise except KeyboardInterrupt: # pylint:disable=try-except-raise raise except Exception as ex: LOGGER.exception("Failed to clean up WS281X object: %s", str(ex)) raise LightStringException(str(ex)).with_traceback(ex.__traceback__) def __len__( self, ) -> int: """Return length of the light string (the number of LEDs). Returns: the number of LEDs in the array """ if self.rgbArray is not None: return len(self.rgbArray) else: return 0 @overload def __getitem__( # noqa D105 self, idx: int, ) -> NDArray[(3,), np.int32]: ... # pylint: disable=pointless-statement @overload def __getitem__( # noqa D105 # pylint: disable=function-redefined self, s: slice, ) -> NDArray[(3, Any), np.int32]: ... # pylint: disable=pointless-statement def __getitem__( # pylint: disable=function-redefined self, key: Union[int, slice] ) -> Union[NDArray[(3,), np.int32], NDArray[(3, Any), np.int32]]: """Return a LED index or slice from LED array. Args: key: an index of a single LED, or a slice specifying a range of LEDs Returns: the LED value or values as requested Raises: SystemExit: if exiting KeyboardInterrupt: if user quits LightStringException: if something bad happens """ try: if isinstance(self.rgbArray, np.ndarray): return self.rgbArray[key].array else: raise LightStringException("Cannot index into uninitialized LightString object") except SystemExit: # pylint:disable=try-except-raise raise except KeyboardInterrupt: # pylint:disable=try-except-raise raise except Exception as ex: LOGGER.exception('Failed to get key "%s" from %s: %s', key, self.rgbArray, ex) raise LightStringException(str(ex)).with_traceback(ex.__traceback__) def __setitem__( self, key: Union[int, slice], value: Union[NDArray[(3,), np.int32], NDArray[(3, Any), np.int32]], ) -> None: """Set LED value(s) in the array. Args: key: the index or slice specifying one or more LED indices value: the RGB value or values to assign to the given LED indices Raises: SystemExit: if exiting KeyboardInterrupt: if user quits LightStringException: if something bad happens """ try: if isinstance(self.rgbArray, np.ndarray): if isinstance(key, slice): if isinstance(value, np.ndarray): self.rgbArray.__setitem__(key, value) elif isinstance(value, Sequence): self.rgbArray.__setitem__(key, [Pixel(v).array for v in value]) else: raise LightStringException( "Cannot assign multiple indices of LightString using a single value" ) else: if isinstance(value, np.ndarray): self.rgbArray.__setitem__(key, value) elif isinstance(value, Pixel): self.rgbArray.__setitem__(key, Pixel(value).array) else: raise LightStringException( "Cannot assign single index of LightString using multiple values" ) else: raise LightStringException("Cannot index into uninitialized LightString object") except SystemExit: # pylint:disable=try-except-raise raise except KeyboardInterrupt: # pylint:disable=try-except-raise raise except Exception as ex: LOGGER.exception("Failed to set light %s to value %s: %s", key, value, ex) raise LightStringException(str(ex)).with_traceback(ex.__traceback__) def __enter__( self, ) -> "LightString": """Get an instance of this object object. Returns: an instance of LightString """ return self def __exit__( self, *args, ) -> None: """Cleanup the instance of this object. Args: args: ignored """ self.__del__() def
( self, ) -> None: """Turn all of the LEDs in the LightString off. Raises: SystemExit: if exiting KeyboardInterrupt: if user quits LightStringException: if something bad happens """ for index in range(len(self.rgbArray)): try: self[index] = PixelColors.OFF.array except SystemExit: # pylint:disable=try-except-raise raise except KeyboardInterrupt: # pylint:disable=try-except-raise raise except Exception as ex: LOGGER.exception( "Failed to set pixel %s in WS281X to value %s: %s", index, LightString(0), ex, ) raise LightStringException(str(ex)).with_traceback(ex.__traceback__) self.refresh() def refresh( self, ) -> None: """Update the ws281x signal using the numpy array. Raises: SystemExit: if exiting KeyboardInterrupt: if user quits LightStringException: if something bad happens """ try: # define callback for map method (fast iterator) if self.simulate is False: def SetPixel(irgb): try: i = irgb[0] rgb = irgb[1] value = (int(rgb[0]) << 16) + (int(rgb[1]) << 8) + int(rgb[2]) self.pixelStrip.setPixelColor(i, value) except SystemExit: # pylint:disable=try-except-raise raise except KeyboardInterrupt: # pylint:disable=try-except-raise raise except Exception as ex: LOGGER.exception( "Failed to set pixel %d in WS281X to value %d: %s", i, value, str(ex), ) raise LightStringException(str(ex)).with_traceback(ex.__traceback__) # copy this class's array into the ws281x array if self.simulate is False: list( map( SetPixel, enumerate(self.rgbArray), ) ) # send the signal out self.pixelStrip.show() except SystemExit: # pylint:disable=try-except-raise raise except KeyboardInterrupt: # pylint:disable=try-except-raise raise except Exception as ex: LOGGER.exception('Function call "show" in WS281X object failed: %s', str(ex)) raise LightStringException(str(ex)).with_traceback(ex.__traceback__) if __name__ == "__main__": LOGGER.info("Running LightString") # the number of pixels in the light string PIXEL_COUNT = 100 # GPIO pin to use for PWM signal GPIO_PWM_PIN = 18 # DMA channel DMA_CHANNEL = 5 # frequency to run the PWM signal at PWM_FREQUENCY = 800000 GAMMA = None LED_STRIP_TYPE = None INVERT = False PWM_CHANNEL = 0 with LightString( pixelStrip=rpi_ws281x.PixelStrip( num=PIXEL_COUNT, pin=GPIO_PWM_PIN, dma=DMA_CHANNEL, freq_hz=PWM_FREQUENCY, channel=PWM_CHANNEL, invert=INVERT, gamma=GAMMA, strip_type=LED_STRIP_TYPE, ), ) as liteStr: liteStr.refresh() p = Pixel((255, 0, 0)) liteStr[4] = PixelColors.RED liteStr.refresh() time.sleep(1)
off
tests.py
from django.test import TestCase from thumbs.fields import validate_size, split_original, determine_thumb, \ sting2tuple from thumbs.fields import SizeError, OriginalError import logging logger = logging.getLogger(__name__) try: from PIL import Image, ImageOps except ImportError: import Image import ImageOps class SplitOriginalTest(TestCase): '''Tests split_original.''' def test_type(self): self.assertRaises(OriginalError, split_original, None) self.assertRaises(OriginalError, split_original, {}) self.assertRaises(OriginalError, split_original, False) def test_empty_string(self): self.assertRaises(OriginalError, split_original, '') def test_no_extension(self): split = split_original('photo') self.assertEqual(split['base'], 'photo') self.assertEqual(split['ext'], '') def test_multiple_dots(self): split = split_original('photo.photo.photo.jpg') self.assertEqual(split['base'], 'photo.photo.photo') self.assertEqual(split['ext'], 'jpg') def test_string(self): split = split_original('photo.jpg') self.assertEqual(split['base'], 'photo') self.assertEqual(split['ext'], 'jpg') def test_unicode(self): split = split_original(u'photo.jpg') self.assertEqual(split['base'], 'photo') self.assertEqual(split['ext'], 'jpg') class DetermineThumbTest(TestCase): '''Tests determine_name.''' def setUp(self): self.size = {'code': 'small', 'wxh': '100x100'} def test_delimiter(self): thumb = determine_thumb(self.size, 'original.jpg', jpg=False, delimiter='-') self.assertEqual('original-small.jpg', thumb) thumb = determine_thumb(self.size, 'original.jpg', jpg=False, delimiter='_') self.assertEqual('original_small.jpg', thumb) def test_jpg_true(self): thumb = determine_thumb(self.size, 'original.jpg', jpg=True, delimiter='-') self.assertEqual('original-small.jpg', thumb) thumb = determine_thumb(self.size, 'original.png', jpg=True, delimiter='-') self.assertEqual('original-small.jpg', thumb) thumb = determine_thumb(self.size, 'original.gif', jpg=True, delimiter='-') self.assertEqual('original-small.jpg', thumb) thumb = determine_thumb(self.size, 'original.jpeg', jpg=True, delimiter='-') self.assertEqual('original-small.jpg', thumb) def test_jpg_false(self): thumb = determine_thumb(self.size, 'original.jpg', jpg=False, delimiter='-') self.assertEqual('original-small.jpg', thumb) thumb = determine_thumb(self.size, 'original.png', jpg=False, delimiter='-') self.assertEqual('original-small.png', thumb) thumb = determine_thumb(self.size, 'original.gif', jpg=False, delimiter='-') self.assertEqual('original-small.gif', thumb) thumb = determine_thumb(self.size, 'original.jpeg', jpg=False, delimiter='-') self.assertEqual('original-small.jpeg', thumb) def test_no_extension(self): # with no ext, jpg true false doesn't matter thumb = determine_thumb(self.size, 'original', jpg=True, delimiter='-') self.assertEqual('original-small.jpg', thumb) thumb = determine_thumb(self.size, 'original', jpg=False, delimiter='-') self.assertEqual('original-small.jpg', thumb) class ValidateSizeTest(TestCase): '''Tests validate size.''' def test_type(self): self.assertRaises(SizeError, validate_size, None) self.assertRaises(SizeError, validate_size, '') self.assertRaises(SizeError, validate_size, u'') self.assertRaises(SizeError, validate_size, False) def test_code_wxh_required(self): validate_size({'code': 'small', 'wxh': '100x100'}) self.assertRaises(SizeError, validate_size, {'code': 'small'}) self.assertRaises(SizeError, validate_size, {'wxh': '100x100'}) self.assertRaises(SizeError, validate_size, {}) def test_code_re(self): validate_size({'code': 'small', 'wxh': '100x100'}) validate_size({'code': 's', 'wxh': '100x100'}) validate_size({'code': '2', 'wxh': '100x100'}) self.assertRaises(SizeError, validate_size, {'code': ' ', 'wxh': '100x100'}) self.assertRaises(SizeError, validate_size, {'code': '&', 'wxh': '100x100'}) self.assertRaises(SizeError, validate_size, {'code': 's ', 'wxh': '100x100'}) self.assertRaises(SizeError, validate_size, {'code': 's!', 'wxh': '100x100'}) self.assertRaises(SizeError, validate_size, {'code': '2+', 'wxh': '100x100'}) def test_wxh_re(self): validate_size({'code': 'small', 'wxh': '100x100'}) validate_size({'code': 'small', 'wxh': '5x5'}) validate_size({'code': 'small', 'wxh': '1024x512'}) # '100x' 'x100' now supported for fixed width, fixed heigh validate_size({'code': 'fixedwidth', 'wxh': '100x'}) validate_size({'code': 'fixedheight', 'wxh': 'x100'}) self.assertRaises(SizeError, validate_size, {'code': 'small', 'wxh': ''}) self.assertRaises(SizeError, validate_size, {'code': 'small', 'wxh': 'x'}) self.assertRaises(SizeError, validate_size, {'code': 'small', 'wxh': '100xx100'}) self.assertRaises(SizeError, validate_size, {'code': 'small', 'wxh': 'x100x100'}) self.assertRaises(SizeError, validate_size, {'code': 'small', 'wxh': '100x100x'}) self.assertRaises(SizeError, validate_size, {'code': 'small', 'wxh': '1-0x100'}) self.assertRaises(SizeError, validate_size, {'code': 'small', 'wxh': '100x 100'}) def test_sting2tuple_notrelevant(self): # on wxh string, original_w_h is not relevant
def test_sting2tuple_downscale(self): original_w_h = (2400, 1200) w, h = sting2tuple('240x', original_w_h) self.assertEqual(w, 240) self.assertEqual(h, 120) w, h = sting2tuple('x60', original_w_h) self.assertEqual(w, 120) self.assertEqual(h, 60) def test_sting2tuple_upscale(self): ''' Does a small image upscale on a fixed width? ''' original_w_h = (150, 242) w, h = sting2tuple('300x', original_w_h) self.assertEqual(w, 300) self.assertEqual(h, 484) w, h = sting2tuple('x300', original_w_h) self.assertEqual(w, 185) self.assertEqual(h, 300) def test_resize_optional(self): validate_size({'code': 'small', 'wxh': '100x100'}) def test_resize_valid(self): validate_size({'code': 'small', 'wxh': '100x100', 'resize': 'crop'}) validate_size({'code': 'small', 'wxh': '100x100', 'resize': 'scale'}) def test_resize_invalid(self): self.assertRaises(SizeError, validate_size, {'code': 'small', 'wxh': '100x100', 'resize': 'fail'})
original_w_h = (1250, 750) w, h = sting2tuple('100x100', original_w_h) self.assertEqual(w, 100) self.assertEqual(h, 100)
tree_leaf.ngfactory.ts
/** * This file is hand tweeked based on * the out put of the Angular template compiler * and then hand tweeked to show possible future output. */ /* tslint:disable */ import * as import7 from '@angular/core/src/change_detection/change_detection'; import * as import5 from '@angular/core/src/di/injector'; import * as import9 from '@angular/core/src/linker/component_factory'; import * as import1 from '@angular/core/src/linker/view'; import * as import2 from '@angular/core/src/linker/view_container'; import * as import6 from '@angular/core/src/linker/view_type'; import * as import4 from '@angular/core/src/linker/view_utils'; import * as import8 from '@angular/core/src/metadata/view'; import * as import0 from '@angular/core/src/render/api'; import * as import10 from '@angular/core/src/security'; import {checkBinding} from './ftl_util'; import * as import3 from './tree'; export class
{ context: import3.TreeLeafComponent; _el_0: any; _text_1: any; /*private*/ _expr_0: any; /*private*/ _expr_1: any; /*private*/ _expr_2: any; constructor(parentRenderNode: any) { this.context = new import3.TreeLeafComponent(); this._el_0 = document.createElement('span'); parentRenderNode.appendChild(this._el_0); this._text_1 = document.createTextNode(''); this._el_0.appendChild(this._text_1); } updateData(currVal_2: any) { if (checkBinding(false, this._expr_2, currVal_2)) { this.context.data = currVal_2; this._expr_2 = currVal_2; } } destroyInternal() {} detectChangesInternal(throwOnChange: boolean): void { const currVal_0: any = ((this.context.data.depth % 2) ? '' : 'grey'); if (checkBinding(throwOnChange, this._expr_0, currVal_0)) { this._el_0.style.backgroundColor = currVal_0; this._expr_0 = currVal_0; } const currVal_1: any = import4.inlineInterpolate(1, ' ', this.context.data.value, ' '); if (checkBinding(throwOnChange, this._expr_1, currVal_1)) { this._text_1.nodeValue = currVal_1; this._expr_1 = currVal_1; } } }
View_TreeLeafComponent
category.ts
import axios from '@/util/axios'; export async function
(): Promise<ApiReturn> { return axios.get('article/categories'); }
list
select.js
import { select as d3_select } from 'd3-selection'; import { geoMetersToLat, geoMetersToLon } from '@id-sdk/math'; import { utilArrayIntersection, utilArrayUnion, utilDeepMemberSelector, utilEntityOrDeepMemberSelector, utilEntitySelector, utilGetAllNodes } from '@id-sdk/util'; import { t } from '../core/localizer'; import { actionAddMidpoint } from '../actions/add_midpoint'; import { actionDeleteRelation } from '../actions/delete_relation'; import { actionMove } from '../actions/move'; import { actionScale } from '../actions/scale'; import { behaviorBreathe } from '../behavior/breathe'; import { behaviorHover } from '../behavior/hover'; import { behaviorLasso } from '../behavior/lasso'; import { behaviorPaste } from '../behavior/paste'; import { behaviorSelect } from '../behavior/select'; import { operationMove } from '../operations/move'; import { prefs } from '../core/preferences'; import { geoChooseEdge } from '../geo'; import { modeBrowse } from './browse'; import { modeDragNode } from './drag_node'; import { modeDragNote } from './drag_note'; import { osmNode, osmWay } from '../osm'; import * as Operations from '../operations/index'; import { uiCmd } from '../ui/cmd'; import { utilKeybinding, utilTotalExtent } from '../util'; export function modeSelect(context, selectedIDs) { var mode = { id: 'select', button: 'browse' }; var keybinding = utilKeybinding('select'); var _breatheBehavior = behaviorBreathe(context); var _modeDragNode = modeDragNode(context); var _selectBehavior; var _behaviors = []; var _operations = []; var _newFeature = false; var _follow = false; // `_focusedParentWayId` is used when we visit a vertex with multiple // parents, and we want to remember which parent line we started on. var _focusedParentWayId; var _focusedVertexIds; function singular() { if (selectedIDs && selectedIDs.length === 1) { return context.hasEntity(selectedIDs[0]); } } function selectedEntities() { return selectedIDs.map(function(id) { return context.hasEntity(id); }).filter(Boolean); } function checkSelectedIDs() { var ids = []; if (Array.isArray(selectedIDs)) { ids = selectedIDs.filter(function(id) { return context.hasEntity(id); }); } if (!ids.length) { context.enter(modeBrowse(context)); return false; } else if ((selectedIDs.length > 1 && ids.length === 1) || (selectedIDs.length === 1 && ids.length > 1)) { // switch between single- and multi-select UI context.enter(modeSelect(context, ids)); return false; } selectedIDs = ids; return true; } // find the parent ways for nextVertex, previousVertex, and selectParent function parentWaysIdsOfSelection(onlyCommonParents) { var graph = context.graph(); var parents = []; for (var i = 0; i < selectedIDs.length; i++) { var entity = context.hasEntity(selectedIDs[i]); if (!entity || entity.geometry(graph) !== 'vertex') { return []; // selection includes some non-vertices } var currParents = graph.parentWays(entity).map(function(w) { return w.id; }); if (!parents.length) { parents = currParents; continue; } parents = (onlyCommonParents ? utilArrayIntersection : utilArrayUnion)(parents, currParents); if (!parents.length) { return []; } } return parents; } // find the child nodes for selected ways function childNodeIdsOfSelection(onlyCommon) { var graph = context.graph(); var childs = []; for (var i = 0; i < selectedIDs.length; i++) { var entity = context.hasEntity(selectedIDs[i]); if (!entity || !['area', 'line'].includes(entity.geometry(graph))){ return []; // selection includes non-area/non-line } var currChilds = graph.childNodes(entity).map(function(node) { return node.id; }); if (!childs.length) { childs = currChilds; continue; } childs = (onlyCommon ? utilArrayIntersection : utilArrayUnion)(childs, currChilds); if (!childs.length) { return []; } } return childs; } function checkFocusedParent() { if (_focusedParentWayId) { var parents = parentWaysIdsOfSelection(true); if (parents.indexOf(_focusedParentWayId) === -1) _focusedParentWayId = null; } } function parentWayIdForVertexNavigation() { var parentIds = parentWaysIdsOfSelection(true); if (_focusedParentWayId && parentIds.indexOf(_focusedParentWayId) !== -1) { // prefer the previously seen parent return _focusedParentWayId; } return parentIds.length ? parentIds[0] : null; } mode.selectedIDs = function(val) { if (!arguments.length) return selectedIDs; selectedIDs = val; return mode; }; mode.zoomToSelected = function() { context.map().zoomToEase(selectedEntities()); }; mode.newFeature = function(val) { if (!arguments.length) return _newFeature; _newFeature = val; return mode; }; mode.selectBehavior = function(val) { if (!arguments.length) return _selectBehavior; _selectBehavior = val; return mode; }; mode.follow = function(val) { if (!arguments.length) return _follow; _follow = val; return mode; }; function loadOperations() { _operations.forEach(function(operation) { if (operation.behavior) { context.uninstall(operation.behavior); } }); _operations = Object.values(Operations) .map(function(o) { return o(context, selectedIDs); }) .filter(function(o) { return o.id !== 'delete' && o.id !== 'downgrade' && o.id !== 'copy'; }) .concat([ // group copy/downgrade/delete operation together at the end of the list Operations.operationCopy(context, selectedIDs), Operations.operationDowngrade(context, selectedIDs), Operations.operationDelete(context, selectedIDs) ]).filter(function(operation) { return operation.available(); }); _operations.forEach(function(operation) { if (operation.behavior) { context.install(operation.behavior); } }); // remove any displayed menu context.ui().closeEditMenu(); } mode.operations = function() { return _operations; }; mode.enter = function() { if (!checkSelectedIDs()) return; context.features().forceVisible(selectedIDs); _modeDragNode.restoreSelectedIDs(selectedIDs); loadOperations(); if (!_behaviors.length) { if (!_selectBehavior) _selectBehavior = behaviorSelect(context); _behaviors = [ behaviorPaste(context), _breatheBehavior, behaviorHover(context).on('hover', context.ui().sidebar.hoverModeSelect), _selectBehavior, behaviorLasso(context), _modeDragNode.behavior, modeDragNote(context).behavior ]; } _behaviors.forEach(context.install); keybinding .on(t('inspector.zoom_to.key'), mode.zoomToSelected) .on(['[', 'pgup'], previousVertex) .on([']', 'pgdown'], nextVertex) .on(['{', uiCmd('⌘['), 'home'], firstVertex) .on(['}', uiCmd('⌘]'), 'end'], lastVertex) .on(uiCmd('⇧←'), nudgeSelection([-10, 0])) .on(uiCmd('⇧↑'), nudgeSelection([0, -10])) .on(uiCmd('⇧→'), nudgeSelection([10, 0])) .on(uiCmd('⇧↓'), nudgeSelection([0, 10])) .on(uiCmd('⇧⌥←'), nudgeSelection([-100, 0])) .on(uiCmd('⇧⌥↑'), nudgeSelection([0, -100])) .on(uiCmd('⇧⌥→'), nudgeSelection([100, 0])) .on(uiCmd('⇧⌥↓'), nudgeSelection([0, 100])) .on(utilKeybinding.plusKeys.map((key) => uiCmd('⇧' + key)), scaleSelection(1.05)) .on(utilKeybinding.plusKeys.map((key) => uiCmd('⇧⌥' + key)), scaleSelection(Math.pow(1.05, 5))) .on(utilKeybinding.minusKeys.map((key) => uiCmd('⇧' + key)), scaleSelection(1/1.05)) .on(utilKeybinding.minusKeys.map((key) => uiCmd('⇧⌥' + key)), scaleSelection(1/Math.pow(1.05, 5))) .on(['\\', 'pause'], focusNextParent) .on(uiCmd('⌘↑'), selectParent) .on(uiCmd('⌘↓'), selectChild) .on('⎋', esc, true); d3_select(document) .call(keybinding); context.ui().sidebar .select(selectedIDs, _newFeature); context.history() .on('change.select', function() { loadOperations(); // reselect after change in case relation members were removed or added selectElements(); }) .on('undone.select', checkSelectedIDs) .on('redone.select', checkSelectedIDs); context.map() .on('drawn.select', selectElements) .on('crossEditableZoom.select', function() { selectElements(); _breatheBehavior.restartIfNeeded(context.surface()); }); context.map().doubleUpHandler() .on('doubleUp.modeSelect', didDoubleUp); selectElements(); if (_follow) { var extent = utilTotalExtent(selectedIDs, context.graph()); var loc = extent.center(); context.map().centerEase(loc); // we could enter the mode multiple times, so reset follow for next time _follow = false; } function nudgeSelection(delta) { return function() { // prevent nudging during low zoom selection if (!context.map().withinEditableZoom()) return; var moveOp = operationMove(context, selectedIDs); if (moveOp.disabled()) { context.ui().flash .duration(4000) .iconName('#iD-operation-' + moveOp.id) .iconClass('operation disabled') .label(moveOp.tooltip)(); } else { context.perform(actionMove(selectedIDs, delta, context.projection), moveOp.annotation()); context.validator().validate(); } }; } function scaleSelection(factor) { return function() { // prevent scaling during low zoom selection if (!context.map().withinEditableZoom()) return; let nodes = utilGetAllNodes(selectedIDs, context.graph()); let isUp = factor > 1; // can only scale if multiple nodes are selected if (nodes.length <= 1) return; let extent = utilTotalExtent(selectedIDs, context.graph()); // These disabled checks would normally be handled by an operation // object, but we don't want an actual scale operation at this point. function scalingDisabled() { const allowLargeEdits = prefs('rapid-internal-feature.allowLargeEdits') === 'true'; if (tooSmall()) { return 'too_small'; } else if (!allowLargeEdits && extent.percentContainedIn(context.map().extent()) < 0.8) { return 'too_large'; } else if (someMissing() || selectedIDs.some(incompleteRelation)) { return 'not_downloaded'; } else if (selectedIDs.some(context.hasHiddenConnections)) { return 'connected_to_hidden'; } return false; function tooSmall() { if (isUp) return false; let dLon = Math.abs(extent.max[0] - extent.min[0]); let dLat = Math.abs(extent.max[1] - extent.min[1]); return dLon < geoMetersToLon(1, extent.max[1]) && dLat < geoMetersToLat(1); } function someMissing() { if (context.inIntro()) return false; let osm = context.connection(); if (osm) { let missing = nodes.filter(function(n) { return !osm.isDataLoaded(n.loc); }); if (missing.length) { missing.forEach(function(loc) { context.loadTileAtLoc(loc); }); return true; } } return false; } function incompleteRelation(id) { let entity = context.entity(id); return entity.type === 'relation' && !entity.isComplete(context.graph()); } } const disabled = scalingDisabled(); if (disabled) { let multi = (selectedIDs.length === 1 ? 'single' : 'multiple'); context.ui().flash .duration(4000) .iconName('#iD-icon-no') .iconClass('operation disabled') .label(t('operations.scale.' + disabled + '.' + multi))(); } else { const pivot = context.projection(extent.center()); const annotation = t('operations.scale.annotation.' + (isUp ? 'up' : 'down') + '.feature', { n: selectedIDs.length }); context.perform(actionScale(selectedIDs, pivot, factor, context.projection), annotation); context.validator().validate(); } }; } function didDoubleUp(d3_event, loc) { if (!context.map().withinEditableZoom()) return; var target = d3_select(d3_event.target); var datum = target.datum(); var entity = datum && datum.properties && datum.properties.entity; if (!entity) return; if (entity instanceof osmWay && target.classed('target')) { var choice = geoChooseEdge(context.graph().childNodes(entity), loc, context.projection); var prev = entity.nodes[choice.index - 1]; var next = entity.nodes[choice.index]; context.perform( actionAddMidpoint({ loc: choice.loc, edge: [prev, next] }, osmNode()), t('operations.add.annotation.vertex') ); context.validator().validate(); } else if (entity.type === 'midpoint') { context.perform( actionAddMidpoint({ loc: entity.loc, edge: entity.edge }, osmNode()), t('operations.add.annotation.vertex') ); context.validator().validate(); } } function selectElements() { if (!checkSelectedIDs()) return; var surface = context.surface(); surface.selectAll('.selected-member') .classed('selected-member', false); surface.selectAll('.selected') .classed('selected', false); surface.selectAll('.related') .classed('related', false); // reload `_focusedParentWayId` based on the current selection checkFocusedParent(); if (_focusedParentWayId) { surface.selectAll(utilEntitySelector([_focusedParentWayId])) .classed('related', true); } if (context.map().withinEditableZoom()) { // Apply selection styling if not in wide selection surface .selectAll(utilDeepMemberSelector(selectedIDs, context.graph(), true /* skipMultipolgonMembers */)) .classed('selected-member', true); surface .selectAll(utilEntityOrDeepMemberSelector(selectedIDs, context.graph())) .classed('selected', true); } } function esc() { if (context.container().select('.combobox').size()) return; context.enter(modeBrowse(context)); } function firstVertex(d3_event) { d3_event.preventDefault(); var entity = singular(); var parentId = parentWayIdForVertexNavigation(); var way; if (entity && entity.type === 'way') { way = entity; } else if (parentId) { way = context.entity(parentId); } _focusedParentWayId = way && way.id; if (way) { context.enter( mode.selectedIDs([way.first()]) .follow(true) ); } } function lastVertex(d3_event) { d3_event.preventDefault(); var entity = singular(); var parentId = parentWayIdForVertexNavigation(); var way; if (entity && entity.type === 'way') { way = entity; } else if (parentId) { way = context.entity(parentId); } _focusedParentWayId = way && way.id; if (way) { context.enter( mode.selectedIDs([way.last()]) .follow(true) ); } } function previousVertex(d3_event) { d3_event.preventDefault(); var parentId = parentWayIdForVertexNavigation(); _focusedParentWayId = parentId; if (!parentId) return; var way = context.entity(parentId); var length = way.nodes.length; var curr = way.nodes.indexOf(selectedIDs[0]); var index = -1; if (curr > 0) { index = curr - 1; } else if (way.isClosed()) { index = length - 2; } if (index !== -1) { context.enter( mode.selectedIDs([way.nodes[index]]) .follow(true) ); } } function nextVertex(d3_event) { d3_event.preventDefault(); var parentId = parentWayIdForVertexNavigation(); _focusedParentWayId = parentId; if (!parentId) return; var way = context.entity(parentId); var length = way.nodes.length;
if (curr < length - 1) { index = curr + 1; } else if (way.isClosed()) { index = 0; } if (index !== -1) { context.enter( mode.selectedIDs([way.nodes[index]]) .follow(true) ); } } function focusNextParent(d3_event) { d3_event.preventDefault(); var parents = parentWaysIdsOfSelection(true); if (!parents || parents.length < 2) return; var index = parents.indexOf(_focusedParentWayId); if (index < 0 || index > parents.length - 2) { _focusedParentWayId = parents[0]; } else { _focusedParentWayId = parents[index + 1]; } var surface = context.surface(); surface.selectAll('.related') .classed('related', false); if (_focusedParentWayId) { surface.selectAll(utilEntitySelector([_focusedParentWayId])) .classed('related', true); } } function selectParent(d3_event) { d3_event.preventDefault(); var currentSelectedIds = mode.selectedIDs(); var parentIds = _focusedParentWayId ? [_focusedParentWayId] : parentWaysIdsOfSelection(false); if (!parentIds.length) return; context.enter( mode.selectedIDs(parentIds) ); // set this after re-entering the selection since we normally want it cleared on exit _focusedVertexIds = currentSelectedIds; } function selectChild(d3_event) { d3_event.preventDefault(); var currentSelectedIds = mode.selectedIDs(); var childIds = _focusedVertexIds ? _focusedVertexIds.filter(id => context.hasEntity(id)) : childNodeIdsOfSelection(true); if (!childIds || !childIds.length) return; if (currentSelectedIds.length === 1) _focusedParentWayId = currentSelectedIds[0]; context.enter( mode.selectedIDs(childIds) ); } }; mode.exit = function() { // we could enter the mode multiple times but it's only new the first time _newFeature = false; _focusedVertexIds = null; _operations.forEach(function(operation) { if (operation.behavior) { context.uninstall(operation.behavior); } }); _operations = []; _behaviors.forEach(context.uninstall); d3_select(document) .call(keybinding.unbind); context.ui().closeEditMenu(); context.history() .on('change.select', null) .on('undone.select', null) .on('redone.select', null); var surface = context.surface(); surface .selectAll('.selected-member') .classed('selected-member', false); surface .selectAll('.selected') .classed('selected', false); surface .selectAll('.highlighted') .classed('highlighted', false); surface .selectAll('.related') .classed('related', false); context.map().on('drawn.select', null); context.ui().sidebar.hide(); context.features().forceVisible([]); var entity = singular(); if (_newFeature && entity && entity.type === 'relation' && // no tags Object.keys(entity.tags).length === 0 && // no parent relations context.graph().parentRelations(entity).length === 0 && // no members or one member with no role (entity.members.length === 0 || (entity.members.length === 1 && !entity.members[0].role)) ) { // the user added this relation but didn't edit it at all, so just delete it var deleteAction = actionDeleteRelation(entity.id, true /* don't delete untagged members */); context.perform(deleteAction, t('operations.delete.annotation.relation')); context.validator().validate(); } }; return mode; }
var curr = way.nodes.indexOf(selectedIDs[0]); var index = -1;
montage_pt.py
#!/usr/bin/python3 def make_montage(basedir, depths):
if __name__ == '__main__': import sys make_montage(sys.argv[1], sys.argv[2:])
""" makes a montage of passive tracer animation from runs.animate_pt run with different depths Arguments: basedir - basedir to which depths are appended i.e., runew-03-pt-z- depths - depths at which stuff has been outputted Returns none Deepak Cherian - 23/01/2014 """ import subprocess import glob import os # first find number of files flist = glob.glob(basedir + str(depths[0]) + '/*.png') N = len(flist) print(depths) outdir = 'temp_pt' outfmt_av = './' + outdir + '/output_%06d.png' outfmt_mo = './' + outdir + '/output_{0:06d}.png' # get runname by splitting by '/' # and partitioning at the -z introduced by runs.animate_pt outname = basedir.split('/')[-1].rpartition('-z')[0] + '.mp4' # avconv options frameRate = 5 bitRate = 25000000 # avconvArgs = '' # make temp dir try: os.mkdir(outdir) except os.FileExistsError: subprocess.call(['rm', '-rf', outdir]) os.mkdir(outdir) for ii in range(1, N+1): # range(1,N): print('Processing image ' + str(ii) + '/' + str(N)) fname = '/mm_frame_{0:06d}.png'.format(ii) # arguments for montage command argument = 'montage ' for jj in depths: argument += basedir + str(jj) + fname + ' ' argument += '-geometry 1600x900 ' + outfmt_mo.format(ii) # call the montage command for each set of images subprocess.call(argument.split()) # all output images have been created # now execute avconv command avconv = ('avconv -r {0} -f image2 -i {1} -q:v 1 -g 1 -b:v {2} {3}'. format(frameRate, outfmt_av, bitRate, outname)) print(avconv) subprocess.call(avconv.split())
engine_linux.go
// Copyright (c) 2021 Apptainer a Series of LF Projects LLC // For website terms of use, trademark policy, privacy policy and other // project policies see https://lfprojects.org/policies // Copyright (c) 2018-2019, Sylabs Inc. All rights reserved. // This software is licensed under a 3-clause BSD license. Please consult the // LICENSE.md file distributed with the sources of this project regarding your // rights to use or distribute this software. package oci import ( "github.com/apptainer/apptainer/internal/pkg/runtime/engine" "github.com/apptainer/apptainer/internal/pkg/runtime/engine/apptainer/rpc/server" ociServer "github.com/apptainer/apptainer/internal/pkg/runtime/engine/oci/rpc/server" "github.com/apptainer/apptainer/pkg/runtime/engine/config" ) // EngineOperations is an Apptainer OCI runtime engine that implements engine.Operations. // Basically, this is the core of `apptainer oci` commands. type EngineOperations struct { CommonConfig *config.Common `json:"-"` EngineConfig *EngineConfig `json:"engineConfig"` } // InitConfig stores the parsed config.Common inside the engine. // // Since this method simply stores config.Common, it does not matter // whether or not there are any elevated privileges during this call. func (e *EngineOperations) InitConfig(cfg *config.Common) { e.CommonConfig = cfg } // Config returns a pointer to EngineConfig literal as a config.EngineConfig // interface. This pointer gets stored in the Engine.Common field. // // Since this method simply returns a zero value of the concrete // EngineConfig, it does not matter whether or not there are any elevated // privileges during this call. func (e *EngineOperations) Config() config.EngineConfig { return e.EngineConfig } func
() { engine.RegisterOperations( Name, &EngineOperations{ EngineConfig: &EngineConfig{}, }, ) ocimethods := new(ociServer.Methods) ocimethods.Methods = new(server.Methods) engine.RegisterRPCMethods( Name, ocimethods, ) }
init
cell_size.rs
use brainfuck_interpreter::run_program; static PROGRAM: &str = r#"Calculate the value 256 and test if it's zero If the interpreter errors on overflow this is where it'll happen ++++++++[>++++++++<-]>[<++++>-] +<[>-< Not zero so multiply by 256 again to get 65536 [>++++<-]>[<++++++++>-]<[>++++++++<-] +>[> # Print "32" ++++++++++[>+++++<-]>+.-.[-]< <[-]<->] <[>> # Print "16" +++++++[>+++++++<-]>.+++++.[-]< <<-]] >[> # Print "8" ++++++++[>+++++++<-]>.[-]< <-]< # Print " bit cells\n" +++++++++++[>+++>+++++++++>+++++++++>+<<<<-]>-.>-.+++++++.+++++++++++.<. >>.++.+++++++..<-.>>- Clean up used cells. [[-]<]"#; fn
() { run_program(PROGRAM); }
main
ze_generated_example_queries_client_test.go
//go:build go1.18 // +build go1.18 // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is regenerated. package armoperationalinsights_test import ( "context" "log" "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" "github.com/Azure/azure-sdk-for-go/sdk/azidentity" "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/operationalinsights/armoperationalinsights" ) // Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2019-09-01/examples/QueryPackQueriesList.json func ExampleQueriesClient_NewListPager() { cred, err := azidentity.NewDefaultAzureCredential(nil) if err != nil { log.Fatalf("failed to obtain a credential: %v", err) } ctx := context.Background() client, err := armoperationalinsights.NewQueriesClient("<subscription-id>", cred, nil) if err != nil { log.Fatalf("failed to create client: %v", err) } pager := client.NewListPager("<resource-group-name>", "<query-pack-name>", &armoperationalinsights.QueriesClientListOptions{Top: nil, IncludeBody: to.Ptr(true), SkipToken: nil, }) for pager.More() { nextResult, err := pager.NextPage(ctx) if err != nil { log.Fatalf("failed to advance page: %v", err) return } for _, v := range nextResult.Value { // TODO: use page item _ = v } } } // Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2019-09-01/examples/QueryPackQueriesSearch.json func ExampleQueriesClient_NewSearchPager() { cred, err := azidentity.NewDefaultAzureCredential(nil) if err != nil { log.Fatalf("failed to obtain a credential: %v", err) } ctx := context.Background() client, err := armoperationalinsights.NewQueriesClient("<subscription-id>", cred, nil) if err != nil { log.Fatalf("failed to create client: %v", err) } pager := client.NewSearchPager("<resource-group-name>", "<query-pack-name>", armoperationalinsights.LogAnalyticsQueryPackQuerySearchProperties{ Related: &armoperationalinsights.LogAnalyticsQueryPackQuerySearchPropertiesRelated{ Categories: []*string{
Tags: map[string][]*string{ "my-label": { to.Ptr("label1")}, }, }, &armoperationalinsights.QueriesClientSearchOptions{Top: to.Ptr[int64](3), IncludeBody: to.Ptr(true), SkipToken: nil, }) for pager.More() { nextResult, err := pager.NextPage(ctx) if err != nil { log.Fatalf("failed to advance page: %v", err) return } for _, v := range nextResult.Value { // TODO: use page item _ = v } } } // Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2019-09-01/examples/QueryPackQueriesGet.json func ExampleQueriesClient_Get() { cred, err := azidentity.NewDefaultAzureCredential(nil) if err != nil { log.Fatalf("failed to obtain a credential: %v", err) } ctx := context.Background() client, err := armoperationalinsights.NewQueriesClient("<subscription-id>", cred, nil) if err != nil { log.Fatalf("failed to create client: %v", err) } res, err := client.Get(ctx, "<resource-group-name>", "<query-pack-name>", "<id>", nil) if err != nil { log.Fatalf("failed to finish the request: %v", err) } // TODO: use response item _ = res } // Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2019-09-01/examples/QueryPackQueriesPut.json func ExampleQueriesClient_Put() { cred, err := azidentity.NewDefaultAzureCredential(nil) if err != nil { log.Fatalf("failed to obtain a credential: %v", err) } ctx := context.Background() client, err := armoperationalinsights.NewQueriesClient("<subscription-id>", cred, nil) if err != nil { log.Fatalf("failed to create client: %v", err) } res, err := client.Put(ctx, "<resource-group-name>", "<query-pack-name>", "<id>", armoperationalinsights.LogAnalyticsQueryPackQuery{ Properties: &armoperationalinsights.LogAnalyticsQueryPackQueryProperties{ Description: to.Ptr("<description>"), Body: to.Ptr("<body>"), DisplayName: to.Ptr("<display-name>"), Related: &armoperationalinsights.LogAnalyticsQueryPackQueryPropertiesRelated{ Categories: []*string{ to.Ptr("analytics")}, }, Tags: map[string][]*string{ "my-label": { to.Ptr("label1")}, "my-other-label": { to.Ptr("label2")}, }, }, }, nil) if err != nil { log.Fatalf("failed to finish the request: %v", err) } // TODO: use response item _ = res } // Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2019-09-01/examples/QueryPackQueriesUpdate.json func ExampleQueriesClient_Update() { cred, err := azidentity.NewDefaultAzureCredential(nil) if err != nil { log.Fatalf("failed to obtain a credential: %v", err) } ctx := context.Background() client, err := armoperationalinsights.NewQueriesClient("<subscription-id>", cred, nil) if err != nil { log.Fatalf("failed to create client: %v", err) } res, err := client.Update(ctx, "<resource-group-name>", "<query-pack-name>", "<id>", armoperationalinsights.LogAnalyticsQueryPackQuery{ Properties: &armoperationalinsights.LogAnalyticsQueryPackQueryProperties{ Description: to.Ptr("<description>"), Body: to.Ptr("<body>"), DisplayName: to.Ptr("<display-name>"), Related: &armoperationalinsights.LogAnalyticsQueryPackQueryPropertiesRelated{ Categories: []*string{ to.Ptr("analytics")}, }, Tags: map[string][]*string{ "my-label": { to.Ptr("label1")}, "my-other-label": { to.Ptr("label2")}, }, }, }, nil) if err != nil { log.Fatalf("failed to finish the request: %v", err) } // TODO: use response item _ = res } // Generated from example definition: https://github.com/Azure/azure-rest-api-specs/tree/main/specification/operationalinsights/resource-manager/Microsoft.OperationalInsights/stable/2019-09-01/examples/QueryPackQueriesDelete.json func ExampleQueriesClient_Delete() { cred, err := azidentity.NewDefaultAzureCredential(nil) if err != nil { log.Fatalf("failed to obtain a credential: %v", err) } ctx := context.Background() client, err := armoperationalinsights.NewQueriesClient("<subscription-id>", cred, nil) if err != nil { log.Fatalf("failed to create client: %v", err) } _, err = client.Delete(ctx, "<resource-group-name>", "<query-pack-name>", "<id>", nil) if err != nil { log.Fatalf("failed to finish the request: %v", err) } }
to.Ptr("other"), to.Ptr("analytics")}, },
steering.rs
use bevy::prelude::*; use crate::components::physics::*; use crate::components::steering::*; use crate::game::DEBUG_SORT; use crate::util::*; pub fn update(mut steering_behaviors: Query<SteeringQueryMut>)
pub fn update_debug( agents: Query<(&Steering, &Children), Without<SteeringTargetDebug>>, mut steering_debug: Query<TransformQueryMut, With<SteeringTargetDebug>>, ) { for (steering, children) in agents.iter() { for &child in children.iter() { if let Ok(mut transform) = steering_debug.get_mut(child) { transform.transform.set_world_translation( transform.global_transform, steering.target.extend(DEBUG_SORT), ); } } } } pub fn update_seek(mut seeking: Query<(SeekQueryMut, PhysicalQuery)>) { for (mut steering, physical) in seeking.iter_mut() { let force = steering.seek.force(&steering.steering, &physical); steering .steering .accumulate_force(physical.physical, force, 1.0); } } pub fn update_arrive(mut arriving: Query<(ArriveQueryMut, PhysicalQuery)>) { for (mut steering, physical) in arriving.iter_mut() { let force = steering.arrive.force(&steering.steering, &physical); steering .steering .accumulate_force(physical.physical, force, 1.0); } }
{ for mut steering in steering_behaviors.iter_mut() { steering.steering.update(&mut steering.physical); } }
history_loader.py
# Copyright 2016 Quantopian, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from abc import ( ABCMeta, abstractmethod, abstractproperty, ) from numpy import concatenate from lru import LRU from pandas import isnull from pandas.tslib import normalize_date from toolz import sliding_window from six import with_metaclass from zipline.assets import Equity, Future from zipline.assets.continuous_futures import ContinuousFuture from zipline.lib._int64window import AdjustedArrayWindow as Int64Window from zipline.lib._float64window import AdjustedArrayWindow as Float64Window from zipline.lib.adjustment import Float64Multiply, Float64Add from zipline.utils.cache import ExpiringCache from zipline.utils.math_utils import number_of_decimal_places from zipline.utils.memoize import lazyval from zipline.utils.numpy_utils import float64_dtype from zipline.utils.pandas_utils import find_in_sorted_index # Default number of decimal places used for rounding asset prices. DEFAULT_ASSET_PRICE_DECIMALS = 3 class HistoryCompatibleUSEquityAdjustmentReader(object): def __init__(self, adjustment_reader): self._adjustments_reader = adjustment_reader def load_adjustments(self, columns, dts, assets): """ Returns ------- adjustments : list[dict[int -> Adjustment]] A list, where each element corresponds to the `columns`, of mappings from index to adjustment objects to apply at that index. """ out = [None] * len(columns) for i, column in enumerate(columns): adjs = {} for asset in assets: adjs.update(self._get_adjustments_in_range( asset, dts, column)) out[i] = adjs return out def _get_adjustments_in_range(self, asset, dts, field): """ Get the Float64Multiply objects to pass to an AdjustedArrayWindow. For the use of AdjustedArrayWindow in the loader, which looks back from current simulation time back to a window of data the dictionary is structured with: - the key into the dictionary for adjustments is the location of the day from which the window is being viewed. - the start of all multiply objects is always 0 (in each window all adjustments are overlapping) - the end of the multiply object is the location before the calendar location of the adjustment action, making all days before the event adjusted. Parameters ---------- asset : Asset The assets for which to get adjustments. dts : iterable of datetime64-like The dts for which adjustment data is needed. field : str OHLCV field for which to get the adjustments. Returns ------- out : dict[loc -> Float64Multiply] The adjustments as a dict of loc -> Float64Multiply """ sid = int(asset) start = normalize_date(dts[0]) end = normalize_date(dts[-1]) adjs = {} if field != 'volume': mergers = self._adjustments_reader.get_adjustments_for_sid( 'mergers', sid) for m in mergers: dt = m[0] if start < dt <= end: end_loc = dts.searchsorted(dt) adj_loc = end_loc mult = Float64Multiply(0, end_loc - 1, 0, 0, m[1]) try: adjs[adj_loc].append(mult) except KeyError: adjs[adj_loc] = [mult] divs = self._adjustments_reader.get_adjustments_for_sid( 'dividends', sid) for d in divs: dt = d[0] if start < dt <= end: end_loc = dts.searchsorted(dt) adj_loc = end_loc mult = Float64Multiply(0, end_loc - 1, 0, 0, d[1]) try: adjs[adj_loc].append(mult) except KeyError: adjs[adj_loc] = [mult] splits = self._adjustments_reader.get_adjustments_for_sid( 'splits', sid) for s in splits: dt = s[0] if start < dt <= end: if field == 'volume': ratio = 1.0 / s[1] else: ratio = s[1] end_loc = dts.searchsorted(dt) adj_loc = end_loc mult = Float64Multiply(0, end_loc - 1, 0, 0, ratio) try: adjs[adj_loc].append(mult) except KeyError: adjs[adj_loc] = [mult] return adjs class ContinuousFutureAdjustmentReader(object): """ Calculates adjustments for continuous futures, based on the close and open of the contracts on the either side of each roll. """ def __init__(self, trading_calendar, asset_finder, bar_reader, roll_finders, frequency): self._trading_calendar = trading_calendar self._asset_finder = asset_finder self._bar_reader = bar_reader self._roll_finders = roll_finders self._frequency = frequency def load_adjustments(self, columns, dts, assets): """ Returns ------- adjustments : list[dict[int -> Adjustment]] A list, where each element corresponds to the `columns`, of mappings from index to adjustment objects to apply at that index. """ out = [None] * len(columns) for i, column in enumerate(columns): adjs = {} for asset in assets: adjs.update(self._get_adjustments_in_range( asset, dts, column)) out[i] = adjs return out def _make_adjustment(self, adjustment_type, front_close, back_close, end_loc): adj_base = back_close - front_close if adjustment_type == 'mul': adj_value = 1.0 + adj_base / front_close adj_class = Float64Multiply elif adjustment_type == 'add': adj_value = adj_base adj_class = Float64Add return adj_class(0, end_loc, 0, 0, adj_value) def _get_adjustments_in_range(self, cf, dts, field): if field == 'volume' or field == 'sid': return {} if cf.adjustment is None: return {} rf = self._roll_finders[cf.roll_style] partitions = [] rolls = rf.get_rolls(cf.root_symbol, dts[0], dts[-1], cf.offset) tc = self._trading_calendar adjs = {} for front, back in sliding_window(2, rolls): front_sid, roll_dt = front back_sid = back[0] dt = tc.previous_session_label(roll_dt) if self._frequency == 'minute': dt = tc.open_and_close_for_session(dt)[1] roll_dt = tc.open_and_close_for_session(roll_dt)[0] partitions.append((front_sid, back_sid, dt, roll_dt)) for partition in partitions: front_sid, back_sid, dt, roll_dt = partition last_front_dt = self._bar_reader.get_last_traded_dt( self._asset_finder.retrieve_asset(front_sid), dt) last_back_dt = self._bar_reader.get_last_traded_dt( self._asset_finder.retrieve_asset(back_sid), dt) if isnull(last_front_dt) or isnull(last_back_dt): continue front_close = self._bar_reader.get_value( front_sid, last_front_dt, 'close') back_close = self._bar_reader.get_value( back_sid, last_back_dt, 'close') adj_loc = dts.searchsorted(roll_dt) end_loc = adj_loc - 1 adj = self._make_adjustment(cf.adjustment, front_close, back_close, end_loc) try: adjs[adj_loc].append(adj) except KeyError: adjs[adj_loc] = [adj] return adjs class SlidingWindow(object): """ Wrapper around an AdjustedArrayWindow which supports monotonically increasing (by datetime) requests for a sized window of data. Parameters ---------- window : AdjustedArrayWindow Window of pricing data with prefetched values beyond the current simulation dt. cal_start : int Index in the overall calendar at which the window starts. """ def __init__(self, window, size, cal_start, offset): self.window = window self.cal_start = cal_start self.current = next(window) self.offset = offset self.most_recent_ix = self.cal_start + size def get(self, end_ix): """ Returns ------- out : A np.ndarray of the equity pricing up to end_ix after adjustments and rounding have been applied. """ if self.most_recent_ix == end_ix: return self.current target = end_ix - self.cal_start - self.offset + 1 self.current = self.window.seek(target) self.most_recent_ix = end_ix return self.current class HistoryLoader(with_metaclass(ABCMeta)): """ Loader for sliding history windows, with support for adjustments. Parameters ---------- trading_calendar: TradingCalendar Contains the grouping logic needed to assign minutes to periods. reader : DailyBarReader, MinuteBarReader Reader for pricing bars. adjustment_reader : SQLiteAdjustmentReader Reader for adjustment data. """ FIELDS = ('open', 'high', 'low', 'close', 'volume', 'sid') def __init__(self, trading_calendar, reader, equity_adjustment_reader, asset_finder, roll_finders=None, sid_cache_size=1000, prefetch_length=0): self.trading_calendar = trading_calendar self._asset_finder = asset_finder self._reader = reader self._adjustment_readers = {} if equity_adjustment_reader is not None: self._adjustment_readers[Equity] = \ HistoryCompatibleUSEquityAdjustmentReader( equity_adjustment_reader) if roll_finders: self._adjustment_readers[ContinuousFuture] =\ ContinuousFutureAdjustmentReader(trading_calendar, asset_finder, reader, roll_finders, self._frequency) self._window_blocks = { field: ExpiringCache(LRU(sid_cache_size)) for field in self.FIELDS } self._prefetch_length = prefetch_length @abstractproperty def _frequency(self): pass @abstractproperty def _calendar(self): pass @abstractmethod def _array(self, start, end, assets, field): pass def _decimal_places_for_asset(self, asset, reference_date): if isinstance(asset, Future) and asset.tick_size: return number_of_decimal_places(asset.tick_size) elif isinstance(asset, ContinuousFuture): # Tick size should be the same for all contracts of a continuous # future, so arbitrarily get the contract with next upcoming auto # close date. oc = self._asset_finder.get_ordered_contracts(asset.root_symbol) contract_sid = oc.contract_before_auto_close(reference_date.value) if contract_sid is not None: contract = self._asset_finder.retrieve_asset(contract_sid) if contract.tick_size: return number_of_decimal_places(contract.tick_size) return DEFAULT_ASSET_PRICE_DECIMALS def _ensure_sliding_windows(self, assets, dts, field, is_perspective_after): """ Ensure that there is a Float64Multiply window for each asset that can provide data for the given parameters. If the corresponding window for the (assets, len(dts), field) does not exist, then create a new one. If a corresponding window does exist for (assets, len(dts), field), but can not provide data for the current dts range, then create a new one and replace the expired window. Parameters ---------- assets : iterable of Assets The assets in the window dts : iterable of datetime64-like The datetimes for which to fetch data. Makes an assumption that all dts are present and contiguous, in the calendar. field : str The OHLCV field for which to retrieve data. is_perspective_after : bool see: `PricingHistoryLoader.history` Returns ------- out : list of Float64Window with sufficient data so that each asset's window can provide `get` for the index corresponding with the last value in `dts` """ end = dts[-1] size = len(dts) asset_windows = {} needed_assets = [] cal = self._calendar assets = self._asset_finder.retrieve_all(assets) end_ix = find_in_sorted_index(cal, end) for asset in assets: try: window = self._window_blocks[field].get( (asset, size, is_perspective_after), end) except KeyError: needed_assets.append(asset) else: if end_ix < window.most_recent_ix: # Window needs reset. Requested end index occurs before the # end index from the previous history call for this window. # Grab new window instead of rewinding adjustments. needed_assets.append(asset) else: asset_windows[asset] = window if needed_assets: offset = 0 start_ix = find_in_sorted_index(cal, dts[0]) prefetch_end_ix = min(end_ix + self._prefetch_length, len(cal) - 1) prefetch_end = cal[prefetch_end_ix] prefetch_dts = cal[start_ix:prefetch_end_ix + 1] if is_perspective_after:
else: adj_dts = prefetch_dts prefetch_len = len(prefetch_dts) array = self._array(prefetch_dts, needed_assets, field) if field == 'sid': window_type = Int64Window else: window_type = Float64Window view_kwargs = {} if field == 'volume': array = array.astype(float64_dtype) for i, asset in enumerate(needed_assets): adj_reader = None try: adj_reader = self._adjustment_readers[type(asset)] except KeyError: adj_reader = None if adj_reader is not None: adjs = adj_reader.load_adjustments( [field], adj_dts, [asset])[0] else: adjs = {} window = window_type( array[:, i].reshape(prefetch_len, 1), view_kwargs, adjs, offset, size, int(is_perspective_after), self._decimal_places_for_asset(asset, dts[-1]), ) sliding_window = SlidingWindow(window, size, start_ix, offset) asset_windows[asset] = sliding_window self._window_blocks[field].set( (asset, size, is_perspective_after), sliding_window, prefetch_end) return [asset_windows[asset] for asset in assets] def history(self, assets, dts, field, is_perspective_after): """ A window of pricing data with adjustments applied assuming that the end of the window is the day before the current simulation time. Parameters ---------- assets : iterable of Assets The assets in the window. dts : iterable of datetime64-like The datetimes for which to fetch data. Makes an assumption that all dts are present and contiguous, in the calendar. field : str The OHLCV field for which to retrieve data. is_perspective_after : bool True, if the window is being viewed immediately after the last dt in the sliding window. False, if the window is viewed on the last dt. This flag is used for handling the case where the last dt in the requested window immediately precedes a corporate action, e.g.: - is_perspective_after is True When the viewpoint is after the last dt in the window, as when a daily history window is accessed from a simulation that uses a minute data frequency, the history call to this loader will not include the current simulation dt. At that point in time, the raw data for the last day in the window will require adjustment, so the most recent adjustment with respect to the simulation time is applied to the last dt in the requested window. An example equity which has a 0.5 split ratio dated for 05-27, with the dts for a history call of 5 bars with a '1d' frequency at 05-27 9:31. Simulation frequency is 'minute'. (In this case this function is called with 4 daily dts, and the calling function is responsible for stitching back on the 'current' dt) | | | | | last dt | <-- viewer is here | | | 05-23 | 05-24 | 05-25 | 05-26 | 05-27 9:31 | | raw | 10.10 | 10.20 | 10.30 | 10.40 | | | adj | 5.05 | 5.10 | 5.15 | 5.25 | | The adjustment is applied to the last dt, 05-26, and all previous dts. - is_perspective_after is False, daily When the viewpoint is the same point in time as the last dt in the window, as when a daily history window is accessed from a simulation that uses a daily data frequency, the history call will include the current dt. At that point in time, the raw data for the last day in the window will be post-adjustment, so no adjustment is applied to the last dt. An example equity which has a 0.5 split ratio dated for 05-27, with the dts for a history call of 5 bars with a '1d' frequency at 05-27 0:00. Simulation frequency is 'daily'. | | | | | | <-- viewer is here | | | | | | | last dt | | | 05-23 | 05-24 | 05-25 | 05-26 | 05-27 | | raw | 10.10 | 10.20 | 10.30 | 10.40 | 5.25 | | adj | 5.05 | 5.10 | 5.15 | 5.20 | 5.25 | Adjustments are applied 05-23 through 05-26 but not to the last dt, 05-27 Returns ------- out : np.ndarray with shape(len(days between start, end), len(assets)) """ block = self._ensure_sliding_windows(assets, dts, field, is_perspective_after) end_ix = self._calendar.searchsorted(dts[-1]) return concatenate( [window.get(end_ix) for window in block], axis=1, ) class DailyHistoryLoader(HistoryLoader): @property def _frequency(self): return 'daily' @property def _calendar(self): return self._reader.sessions def _array(self, dts, assets, field): return self._reader.load_raw_arrays( [field], dts[0], dts[-1], assets, )[0] class MinuteHistoryLoader(HistoryLoader): @property def _frequency(self): return 'minute' @lazyval def _calendar(self): mm = self.trading_calendar.all_minutes start = mm.searchsorted(self._reader.first_trading_day) end = mm.searchsorted(self._reader.last_available_dt, side='right') return mm[start:end] def _array(self, dts, assets, field): return self._reader.load_raw_arrays( [field], dts[0], dts[-1], assets, )[0]
adj_end_ix = min(prefetch_end_ix + 1, len(cal) - 1) adj_dts = cal[start_ix:adj_end_ix + 1]
presenceMode.js
/** * Copyright (c) 2014 [email protected] * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to * deal in the Software without restriction, including without limitation the * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or * sell copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS * IN THE SOFTWARE. */ /** * @author [email protected] * @fileoverview Change SmartThings mode based on user presence. */ module.exports = (function () { 'use strict'; return { version : 20161101, stillAway : true, translate : function (token, lang) { var translate = require(__dirname + '/../lib/translate'); return translate.translate('{{i18n_' + token + '}}', 'smartthings', lang); }, isPresent : function (devices, presence) { var present = [], subdevice; for (subdevice in devices) { if ((devices[subdevice].type === 'presence') && (devices[subdevice].state === 'on') && (presence) && (presence.indexOf(devices[subdevice].label) !== -1)) { present.push(devices[subdevice].label); } } return present; }, changeMode : function (deviceId, newMode, present, controllers, lang) { var sharedUtil = require(__dirname + '/../lib/sharedUtil').util, notify = require(__dirname + '/../lib/notify'), runCommand = require(__dirname + '/../lib/runCommand'), plural = present.length > 1 ? 'ARE' : 'IS', message = '', people = ''; if (newMode === 'Away') { message = this.translate('NOBODY_HOME', lang); } else { people = sharedUtil.arrayList(present, 'smartthings', lang); message = this.translate('SOMEBODY_HOME', lang).split('{{PEOPLE}}').join(people); message = message.split('{{PLURAL}}').join(this.translate(plural, lang)); } runCommand.runCommand(deviceId, 'subdevice-mode-' + newMode); notify.notify(message, controllers, deviceId); }, presenceMode : function (deviceId, command, controllers, values, config) { var that = this, deviceState = require(__dirname + '/../lib/deviceState'), lang = controllers.config.language, newMode = null, currDevice, weatherState, present = [],
delay = config.delay || 10, presence = config.presence || []; if (command.indexOf('subdevice-state-presence-') === 0) { if ((values.value) && (values.value.devices)) { present = this.isPresent(values.value.devices, presence); if (present.length === 0) { // Only mark "Away" based on a presence sensor going off. if (command.indexOf('-off') === (command.length - 4)) { this.stillAway = true; newMode = 'Away'; } } else { // Only mark "Home" or "Night" based on a presence sensor going on. if (command.indexOf('-on') === (command.length - 3)) { this.stillAway = false; newMode = 'Home'; for (currDevice in controllers) { if ((controllers[currDevice].config) && (controllers[currDevice].config.typeClass === 'weather')) { weatherState = deviceState.getDeviceState(currDevice); newMode = weatherState.value.phase === 'Day' ? config.dayMode : config.nightMode; } } } } if ((newMode) && (values.value.mode !== newMode)) { if (newMode === 'Away') { setTimeout(function () { var currentState = deviceState.getDeviceState(deviceId), present = that.isPresent(currentState.value.devices); if ((that.stillAway) && (present.length === 0)) { that.stillAway = false; that.changeMode(deviceId, newMode, present, controllers, lang); } }, delay * 60000); } else { this.changeMode(deviceId, newMode, present, controllers, lang); } } } } } }; }());
createChainedFunction.js
'use strict'; exports.__esModule = true; function createChainedFunction() { for (var _len = arguments.length, funcs = Array(_len), _key = 0; _key < _len; _key++) { funcs[_key] = arguments[_key]; } return funcs.filter(function (f) { return f != null; }).reduce(function (acc, f) { if (typeof f !== 'function') { throw new Error('Invalid Argument Type, must only provide functions, undefined, or null.'); } if (acc === null) { return f; } return function chainedFunction() { for (var _len2 = arguments.length, args = Array(_len2), _key2 = 0; _key2 < _len2; _key2++) { args[_key2] = arguments[_key2]; } acc.apply(this, args); f.apply(this, args); };
}, null); } exports.default = createChainedFunction;
replaceAll.d.ts
export = replaceAll /** * Modifies provided text with specified transformations. * * @param text base text * @param transformations descriptions of changes to the text */
text: string, transformations: { offset: number; length: number; change: string }[], ): string
declare function replaceAll(
etetaphim.go
// Copyright 2017 The go-hep Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package fmom import ( "math" ) type EtEtaPhiM [4]float64 func
(et, eta, phi, m float64) EtEtaPhiM { return EtEtaPhiM([4]float64{et, eta, phi, m}) } func (p4 *EtEtaPhiM) Clone() P4 { pp := *p4 return &pp } func (p4 *EtEtaPhiM) Et() float64 { return p4[0] } func (p4 *EtEtaPhiM) Eta() float64 { return p4[1] } func (p4 *EtEtaPhiM) Phi() float64 { return p4[2] } func (p4 *EtEtaPhiM) M() float64 { return p4[3] } func (p4 *EtEtaPhiM) M2() float64 { m := p4.M() return m * m } func (p4 *EtEtaPhiM) P() float64 { m := p4.M() e := p4.E() if m == 0 { return e } sign := 1.0 if e < 0 { sign = -1.0 } return sign * math.Sqrt(e*e-m*m) } func (p4 *EtEtaPhiM) P2() float64 { m := p4.M() e := p4.E() return e*e - m*m } func (p4 *EtEtaPhiM) CosPhi() float64 { phi := p4.Phi() return math.Cos(phi) } func (p4 *EtEtaPhiM) SinPhi() float64 { phi := p4.Phi() return math.Sin(phi) } func (p4 *EtEtaPhiM) TanTh() float64 { eta := p4.Eta() abseta := math.Abs(eta) // avoid numeric overflow if very large eta if abseta > 710 { if eta > 0 { eta = +710 } else { eta = -710 } } return 1. / math.Sinh(eta) } func (p4 *EtEtaPhiM) CotTh() float64 { eta := p4.Eta() return math.Sinh(eta) } func (p4 *EtEtaPhiM) CosTh() float64 { eta := p4.Eta() return math.Tanh(eta) } func (p4 *EtEtaPhiM) SinTh() float64 { eta := p4.Eta() abseta := math.Abs(eta) if abseta > 710 { abseta = 710 } return 1 / math.Cosh(abseta) } func (p4 *EtEtaPhiM) Pt() float64 { p := p4.P() sinth := p4.SinTh() return p * sinth } func (p4 *EtEtaPhiM) E() float64 { et := p4.Et() sinth := p4.SinTh() return et / sinth } func (p4 *EtEtaPhiM) IPt() float64 { pt := p4.Pt() return 1 / pt } func (p4 *EtEtaPhiM) Rapidity() float64 { e := p4.E() pz := p4.Pz() return 0.5 * math.Log((e+pz)/(e-pz)) } func (p4 *EtEtaPhiM) Px() float64 { pt := p4.Pt() cosphi := p4.CosPhi() return pt * cosphi } func (p4 *EtEtaPhiM) Py() float64 { pt := p4.Pt() sinphi := p4.SinPhi() return pt * sinphi } func (p4 *EtEtaPhiM) Pz() float64 { p := p4.P() costh := p4.CosTh() return p * costh } func (p4 *EtEtaPhiM) Set(p P4) { p4[0] = p.Et() p4[1] = p.Eta() p4[2] = p.Phi() p4[3] = p.M() }
NewEtEtaPhiM
gtoken.go
package gtoken import ( "fmt" "github.com/gogf/gf/crypto/gaes" "github.com/gogf/gf/crypto/gmd5" "github.com/gogf/gf/encoding/gbase64" "github.com/gogf/gf/frame/g" "github.com/gogf/gf/net/ghttp" "github.com/gogf/gf/os/glog" "github.com/gogf/gf/os/gtime" "github.com/gogf/gf/text/gstr" "github.com/gogf/gf/util/gconv" "github.com/gogf/gf/util/grand" "strings" ) const ( CacheModeCache = 1 CacheModeRedis = 2 ) // GfToken gtoken结构体 type GfToken struct { // GoFrame server name ServerName string // 缓存模式 1 gcache 2 gredis 默认1 CacheMode int8 // 缓存key CacheKey string // 超时时间 默认10天(毫秒) Timeout int // 缓存刷新时间 默认为超时时间的一半(毫秒) MaxRefresh int // Token分隔符 TokenDelimiter string // Token加密key EncryptKey []byte // 认证失败中文提示 AuthFailMsg string // 是否支持多端登录,默认false MultiLogin bool // 是否是全局认证 GlobalMiddleware bool // 登录路径 LoginPath string // 登录验证方法 return userKey 用户标识 如果userKey为空,结束执行 LoginBeforeFunc func(r *ghttp.Request) (string, interface{}) // 登录返回方法 LoginAfterFunc func(r *ghttp.Request, respData Resp) // 登出地址 LogoutPath string // 登出验证方法 return true 继续执行,否则结束执行 LogoutBeforeFunc func(r *ghttp.Request) bool // 登出返回方法 LogoutAfterFunc func(r *ghttp.Request, respData Resp) // 拦截地址 AuthPaths g.SliceStr // 拦截排除地址 AuthExcludePaths g.SliceStr // 认证验证方法 return true 继续执行,否则结束执行 AuthBeforeFunc func(r *ghttp.Request) bool // 认证返回方法 AuthAfterFunc func(r *ghttp.Request, respData Resp) } // Init 初始化 func (m *GfToken) Init() bool { if m.CacheMode == 0 { m.CacheMode = CacheModeCache } if m.CacheKey == "" { m.CacheKey = "GToken:" } if m.Timeout == 0 { m.Timeout = 10 * 24 * 60 * 60 * 1000 } if m.MaxRefresh == 0 { m.MaxRefresh = m.Timeout / 2 } if m.TokenDelimiter == "" { m.TokenDelimiter = "_" } if len(m.EncryptKey) == 0 { m.EncryptKey = []byte("12345678912345678912345678912345") } if m.AuthFailMsg == "" { m.AuthFailMsg = "请求错误或登录超时" } if m.LoginAfterFunc == nil { m.LoginAfterFunc = func(r *ghttp.Request, respData Resp) { if !respData.Success() { r.Response.WriteJson(respData) } else { r.Response.WriteJson(Succ(g.Map{ "token": respData.GetString("token"), })) } } } if m.LogoutBeforeFunc == nil { m.LogoutBeforeFunc = func(r *ghttp.Request) bool { return true } } if m.LogoutAfterFunc == nil { m.LogoutAfterFunc = func(r *ghttp.Request, respData Resp) { if respData.Success() { r.Response.WriteJson(Succ("Logout success")) } else { r.Response.WriteJson(respData) } } } if m.AuthBeforeFunc == nil { m.AuthBeforeFunc = func(r *ghttp.Request) bool { // 静态页面不拦截 if r.IsFileRequest() { return false } return true } } if m.AuthAfterFunc == nil { m.AuthAfterFunc = func(r *ghttp.Request, respData Resp) { if respData.Success() { r.Middleware.Next() } else { var params map[string]interface{} if r.Method == "GET" { params = r.GetMap() } else if r.Method == "POST" { params = r.GetMap() } else { r.Response.Writeln("Request Method is ERROR! ") return } no := gconv.String(gtime.TimestampMilli()) glog.Info(fmt.Sprintf("[AUTH_%s][url:%s][params:%s][data:%s]", no, r.URL.Path, params, respData.Json())) respData.Msg = m.AuthFailMsg r.Response.WriteJson(respData) r.ExitAll() } } } return true } // Start 启动 func (m *GfToken) Start() bool { if !m.Init() { return false } glog.Info("[GToken][params:" + m.String() + "]start... ") s := g.Server(m.ServerName) // 缓存模式 if m.CacheMode > CacheModeRedis { glog.Error("[GToken]CacheMode set error") return false } // 认证拦截器 if m.AuthPaths == nil { glog.Error("[GToken]HookPathList not set") return false } // 是否是全局拦截 if m.GlobalMiddleware { s.BindMiddlewareDefault(m.AuthMiddleware) } else { for _, authPath := range m.AuthPaths { tmpPath := authPath if !strings.HasSuffix(authPath, "/*") { tmpPath += "/*" } s.BindMiddleware(tmpPath, m.AuthMiddleware) } } // 登录 if m.LoginPath == "" || m.LoginBeforeFunc == nil { glog.Error("[GToken]LoginPath or LoginBeforeFunc not set") return false } s.BindHandler(m.LoginPath, m.Login) // 登出 if m.LogoutPath == "" { glog.Error("[GToken]LogoutPath or LogoutFunc not set") return false } s.BindHandler(m.LogoutPath, m.Logout) return true } // Start 结束 func (m *GfToken) Stop() bool { glog.Info("[GToken]stop. ") return true } // GetTokenData 通过token获取对象 func (m *GfToken) GetTokenData(r *ghttp.Request) Resp { respData := m.getRequestToken(r) if respData.Success() { // 验证token respData = m.validToken(respData.DataString()) } return respData } // Login 登录 func (m *GfToken) Login(r *ghttp.Request) { userKey, data := m.LoginBeforeFunc(r) if userKey == "" { glog.Error("[GToken]Login userKey is empty") return } if m.MultiLogin { // 支持多端重复登录,返回相同token userCacheResp := m.getToken(userKey) if userCacheResp.Success() { respToken := m.EncryptToken(userKey, userCacheResp.GetString("uuid")) m.LoginAfterFunc(r, respToken) return } } // 生成token respToken := m.genToken(userKey, data) m.LoginAfterFunc(r, respToken) } // Logout 登出 func (m *GfToken) Logout(r *ghttp.Request) { if m.LogoutBeforeFunc(r) { // 获取请求token respData := m.getRequestToken(r) if respData.Success() { // 删除token m.RemoveToken(respData.DataString()) } m.LogoutAfterFunc(r, respData) } } // AuthMiddleware 认证拦截 func (m *GfToken) AuthMiddleware(r *ghttp.Request) { urlPath := r.URL.Path if !m.AuthPath(urlPath) { // 如果不需要认证,继续 r.Middleware.Next() return } // 不需要认证,直接下一步 if !m.AuthBeforeFunc(r) { r.Middleware.Next() return } // 获取请求token tokenResp := m.getRequestToken(r) if tokenResp.Success() { // 验证token tokenResp = m.validToken(tokenResp.DataString()) } m.AuthAfterFunc(r, tokenResp)
// 判断路径是否需要进行认证拦截 // return true 需要认证 func (m *GfToken) AuthPath(urlPath string) bool { // 去除后斜杠 if strings.HasSuffix(urlPath, "/") { urlPath = gstr.SubStr(urlPath, 0, len(urlPath)-1) } // 全局处理,认证路径拦截处理 if m.GlobalMiddleware { var authFlag bool for _, authPath := range m.AuthPaths { tmpPath := authPath if strings.HasSuffix(tmpPath, "/*") { tmpPath = gstr.SubStr(tmpPath, 0, len(tmpPath)-2) } if gstr.HasPrefix(urlPath, tmpPath) { authFlag = true break } } if !authFlag { // 拦截路径不匹配 return false } } // 排除路径处理,到这里nextFlag为true for _, excludePath := range m.AuthExcludePaths { tmpPath := excludePath // 前缀匹配 if strings.HasSuffix(tmpPath, "/*") { tmpPath = gstr.SubStr(tmpPath, 0, len(tmpPath)-2) if gstr.HasPrefix(urlPath, tmpPath) { // 前缀匹配不拦截 return false } } else { // 全路径匹配 if strings.HasSuffix(tmpPath, "/") { tmpPath = gstr.SubStr(tmpPath, 0, len(tmpPath)-1) } if urlPath == tmpPath { // 全路径匹配不拦截 return false } } } return true } // getRequestToken 返回请求Token func (m *GfToken) getRequestToken(r *ghttp.Request) Resp { authHeader := r.Header.Get("Authorization") if authHeader != "" { parts := strings.SplitN(authHeader, " ", 2) if !(len(parts) == 2 && parts[0] == "Bearer") { glog.Warning("[GToken]authHeader:" + authHeader + " get token key fail") return Unauthorized("get token key fail", "") } else if parts[1] == "" { glog.Warning("[GToken]authHeader:" + authHeader + " get token fail") return Unauthorized("get token fail", "") } return Succ(parts[1]) } authHeader = r.GetString("token") if authHeader == "" { return Unauthorized("query token fail", "") } return Succ(authHeader) } // genToken 生成Token func (m *GfToken) genToken(userKey string, data interface{}) Resp { token := m.EncryptToken(userKey, "") if !token.Success() { return token } cacheKey := m.CacheKey + userKey userCache := g.Map{ "userKey": userKey, "uuid": token.GetString("uuid"), "data": data, "createTime": gtime.Now().TimestampMilli(), "refreshTime": gtime.Now().TimestampMilli() + gconv.Int64(m.MaxRefresh), } cacheResp := m.setCache(cacheKey, userCache) if !cacheResp.Success() { return cacheResp } return token } // validToken 验证Token func (m *GfToken) validToken(token string) Resp { if token == "" { return Unauthorized("valid token empty", "") } decryptToken := m.DecryptToken(token) if !decryptToken.Success() { return decryptToken } userKey := decryptToken.GetString("userKey") uuid := decryptToken.GetString("uuid") userCacheResp := m.getToken(userKey) if !userCacheResp.Success() { return userCacheResp } if uuid != userCacheResp.GetString("uuid") { glog.Error("[GToken]user auth error, decryptToken:" + decryptToken.Json() + " cacheValue:" + gconv.String(userCacheResp.Data)) return Unauthorized("user auth error", "") } return userCacheResp } // getToken 通过userKey获取Token func (m *GfToken) getToken(userKey string) Resp { cacheKey := m.CacheKey + userKey userCacheResp := m.getCache(cacheKey) if !userCacheResp.Success() { return userCacheResp } userCache := gconv.Map(userCacheResp.Data) nowTime := gtime.Now().TimestampMilli() refreshTime := userCache["refreshTime"] // 需要进行缓存超时时间刷新 if gconv.Int64(refreshTime) == 0 || nowTime > gconv.Int64(refreshTime) { userCache["createTime"] = gtime.Now().TimestampMilli() userCache["refreshTime"] = gtime.Now().TimestampMilli() + gconv.Int64(m.MaxRefresh) glog.Debug("[GToken]refreshToken:" + gconv.String(userCache)) return m.setCache(cacheKey, userCache) } return Succ(userCache) } // removeToken 删除Token func (m *GfToken) RemoveToken(token string) Resp { decryptToken := m.DecryptToken(token) if !decryptToken.Success() { return decryptToken } cacheKey := m.CacheKey + decryptToken.GetString("userKey") return m.removeCache(cacheKey) } // EncryptToken token加密方法 func (m *GfToken) EncryptToken(userKey string, uuid string) Resp { if userKey == "" { return Fail("encrypt userKey empty") } if uuid == "" { // 重新生成uuid newUuid, err := gmd5.Encrypt(grand.Letters(10)) if err != nil { glog.Error("[GToken]uuid error", err) return Error("uuid error") } uuid = newUuid } tokenStr := userKey + m.TokenDelimiter + uuid token, err := gaes.Encrypt([]byte(tokenStr), m.EncryptKey) if err != nil { glog.Error("[GToken]encrypt error", err) return Error("encrypt error") } return Succ(g.Map{ "userKey": userKey, "uuid": uuid, "token": gbase64.Encode(token), }) } // DecryptToken token解密方法 func (m *GfToken) DecryptToken(token string) Resp { if token == "" { return Fail("decrypt token empty") } token64, err := gbase64.Decode([]byte(token)) if err != nil { glog.Error("[GToken]decode error", err) return Error("decode error") } decryptToken, err2 := gaes.Decrypt([]byte(token64), m.EncryptKey) if err2 != nil { glog.Error("[GToken]decrypt error", err2) return Error("decrypt error") } tokenArray := gstr.Split(string(decryptToken), m.TokenDelimiter) if len(tokenArray) < 2 { glog.Error("[GToken]token len error") return Error("token len error") } return Succ(g.Map{ "userKey": tokenArray[0], "uuid": tokenArray[1], }) } // String token解密方法 func (m *GfToken) String() string { return gconv.String(g.Map{ // 缓存模式 1 gcache 2 gredis 默认1 "CacheMode": m.CacheMode, "CacheKey": m.CacheKey, "Timeout": m.Timeout, "TokenDelimiter": m.TokenDelimiter, "EncryptKey": string(m.EncryptKey), "LoginPath": m.LoginPath, "LogoutPath": m.LogoutPath, "AuthPaths": gconv.String(m.AuthPaths), }) }
}
test_pci_get_input_values.py
#!/usr/bin/python ############################################################################### # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # # # # Licensed under the Apache License Version 2.0 (the "License"). You may not # # use this file except in compliance with the License. A copy of the License # # is located at # # # # http://www.apache.org/licenses/LICENSE-2.0/ # # # # or in the "license" file accompanying this file. This file is distributed # # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express # # or implied. See the License for the specific language governing permis- # # sions and limitations under the License. # ############################################################################### import pytest from pci_get_input_values import verify def expected(): return { "filter_name": "SHARR_Filter_PCI_321_Finding_CW1_RootAccountUsage", "filter_pattern": '{$.userIdentity.type="Root" && $.userIdentity.invokedBy NOT EXISTS && $.eventType !="AwsServiceEvent"}', "metric_name": "SHARR_PCI_321_Finding_CW1_RootAccountUsage",
"alarm_name": "SHARR_Alarm_PCI_321_Finding_CW1_RootAccountUsage", "alarm_desc": "Alarm for PCI finding CW.1 RootAccountUsage", "alarm_threshold": 1 } def test_verify(): assert verify({'ControlId': 'PCI.CW.1'}, {}) == expected()
"metric_value": 1,
index.tsx
import React, { Dispatch, useMemo } from 'react'; import { observer } from 'mobx-react'; import { Box, Grid } from '@material-ui/core'; import { SearchStoreV3 } from 'DriverApp/store/SearchStoreV3'; import LoadCard from 'components/v3/LoadCardV3'; import FOInfiniteLoader from 'components/v3/FOInfiniteLoader'; import RALSubmitted from 'components/v3/RALPostContent/RALSubmitted'; import LoadSkeleton from 'components/v3/LoadSkeleton'; import SearchLoadsContentNoResults from './SearchLoadsContentNoResults'; import { SearchSteps } from '../SearchLoadsContentForm'; interface ISearchLoadsContentResultsProps { searchStoreV3: SearchStoreV3; pickupDateFieldValue; reflectDrawerState: Dispatch<boolean>;
programmaticFormSumitToRAL: () => void; hideRalSubmittedConfirmation: () => void; ralSubmitLoading: boolean; enqueueSnackbarStore: () => void; setCurrentStep: (steps: SearchSteps) => void; showRALSubmitConfirmation?: boolean; isRALSearch?: boolean; } const SearchLoadsContentResults = observer(({ searchStoreV3: { searchResults, downloadSearchResults }, pickupDateFieldValue, reflectDrawerState, programmaticFormSumitToRAL, setCurrentStep, hideRalSubmittedConfirmation, ralSubmitLoading, showRALSubmitConfirmation, isRALSearch, enqueueSnackbarStore, }: ISearchLoadsContentResultsProps) => { const resultsCount = useMemo(() => searchResults.results.length, [searchResults.results]); if(!searchResults.loading && !isRALSearch){ if(searchResults.results.length == 0){ enqueueSnackbarStore('No results, try increasing your search radius.', { variant: 'success' }); } else if(searchResults.results.length > 100){ enqueueSnackbarStore('Too many results, try filtering the loads.', { variant: 'success' }); } else if(searchResults.results.length < 10){ enqueueSnackbarStore('Low results on search, try another lane.', { variant: 'success' }); } } const NoResultsComponent = () => ( <SearchLoadsContentNoResults resultsCount={resultsCount} pickupDateFieldValue={pickupDateFieldValue} reflectDrawerState={reflectDrawerState} ralSubmitLoading={ralSubmitLoading} setCurrentStep={setCurrentStep} programmaticFormSumitToRAL={programmaticFormSumitToRAL} /> ); const ResultsComponent = () => ( <Grid container spacing={1}> { searchResults.results.map((load, index) => ( <Grid item xs={12}> <Box px={1} py={index < 2 ? 1 : 2}> <LoadCard load={load} showMap={index < 3} /> </Box> </Grid> )) } </Grid> ); return ( <> { isRALSearch && showRALSubmitConfirmation ? ( <RALSubmitted resultsCount={resultsCount} reflectDrawerState={reflectDrawerState} showSearchResults={hideRalSubmittedConfirmation} setCurrentStep={setCurrentStep} /> ) : ( <FOInfiniteLoader resultsCount={resultsCount} NoResultsComponent={( <NoResultsComponent /> )} LoadingMockComponent={<LoadSkeleton />} loading={searchResults.loading} error={null} downloadResults={downloadSearchResults} getMoreResults={searchResults.downloadNextResults} pagination={searchResults.pagination} ResultsComponent={( <ResultsComponent /> )} ErrorComponent={<div />} /> ) } </> ); }); export default SearchLoadsContentResults;
Iframe.py
# AUTO GENERATED FILE - DO NOT EDIT from dash.development.base_component import Component, _explicitize_args class Iframe(Component): """An Iframe component. Iframe is a wrapper for the <iframe> HTML5 element. For detailed attribute info see: https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe Keyword arguments: - children (a list of or a singular dash component, string or number; optional): The children of this component. - id (string; optional): The ID of this component, used to identify dash components in callbacks. The ID needs to be unique across all of the components in an app. - accessKey (string; optional): Keyboard shortcut to activate or add focus to the element. - allow (string; optional): Specifies a feature-policy for the iframe. - aria-* (string; optional): A wildcard aria attribute. - className (string; optional): Often used with CSS to style elements with common properties. - contentEditable (string; optional): Indicates whether the element's content is editable. - contextMenu (string; optional): Defines the ID of a <menu> element which will serve as the element's context menu. - data-* (string; optional): A wildcard data attribute. - dir (string; optional): Defines the text direction. Allowed values are ltr (Left-To-Right) or rtl (Right-To-Left). - draggable (string; optional): Defines whether the element can be dragged. - height (string | number; optional): Specifies the height of elements listed here. For all other elements, use the CSS height property. Note: In some instances, such as <div>, this is a legacy attribute, in which case the CSS height property should be used instead. - hidden (a value equal to: 'hidden', 'HIDDEN' | boolean; optional): Prevents rendering of given element, while keeping child elements, e.g. script elements, active. - key (string; optional): A unique identifier for the component, used to improve performance by React.js while rendering components See https://reactjs.org/docs/lists-and-keys.html for more info. - lang (string; optional): Defines the language used in the element. - loading_state (dict; optional): Object that holds the loading state object coming from dash-renderer. `loading_state` is a dict with keys: - component_name (string; optional): Holds the name of the component that is loading. - is_loading (boolean; optional): Determines if the component is loading or not. - prop_name (string; optional): Holds which property is loading. - n_clicks (number; default 0): An integer that represents the number of times that this element has been clicked on. - n_clicks_timestamp (number; default -1): An integer that represents the time (in ms since 1970) at which n_clicks changed. This can be used to tell which button was changed most recently. - name (string; optional): Name of the element. For example used by the server to identify the fields in form submits. - referrerPolicy (string; optional): Specifies which referrer is sent when fetching the resource. - role (string; optional): The ARIA role attribute. - sandbox (string; optional): Stops a document loaded in an iframe from using certain features (such as submitting forms or opening new windows). - spellCheck (string; optional): Indicates whether spell checking is allowed for the element. - src (string; optional): The URL of the embeddable content. - srcDoc (string; optional) - style (dict; optional): Defines CSS styles which will override styles previously set. - tabIndex (string; optional): Overrides the browser's default tab order and follows the one specified instead. - title (string; optional): Text to be displayed in a tooltip when hovering over the element. - width (string | number; optional): For the elements listed here, this establishes the element's width. Note: For all other instances, such as <div>, this is a legacy attribute, in which case the CSS width property should be used instead.""" @_explicitize_args def
( self, children=None, id=Component.UNDEFINED, n_clicks=Component.UNDEFINED, n_clicks_timestamp=Component.UNDEFINED, key=Component.UNDEFINED, role=Component.UNDEFINED, allow=Component.UNDEFINED, height=Component.UNDEFINED, name=Component.UNDEFINED, referrerPolicy=Component.UNDEFINED, sandbox=Component.UNDEFINED, src=Component.UNDEFINED, srcDoc=Component.UNDEFINED, width=Component.UNDEFINED, accessKey=Component.UNDEFINED, className=Component.UNDEFINED, contentEditable=Component.UNDEFINED, contextMenu=Component.UNDEFINED, dir=Component.UNDEFINED, draggable=Component.UNDEFINED, hidden=Component.UNDEFINED, lang=Component.UNDEFINED, spellCheck=Component.UNDEFINED, style=Component.UNDEFINED, tabIndex=Component.UNDEFINED, title=Component.UNDEFINED, loading_state=Component.UNDEFINED, **kwargs ): self._prop_names = [ "children", "id", "accessKey", "allow", "aria-*", "className", "contentEditable", "contextMenu", "data-*", "dir", "draggable", "height", "hidden", "key", "lang", "loading_state", "n_clicks", "n_clicks_timestamp", "name", "referrerPolicy", "role", "sandbox", "spellCheck", "src", "srcDoc", "style", "tabIndex", "title", "width", ] self._type = "Iframe" self._namespace = "dash_html_components" self._valid_wildcard_attributes = ["data-", "aria-"] self.available_properties = [ "children", "id", "accessKey", "allow", "aria-*", "className", "contentEditable", "contextMenu", "data-*", "dir", "draggable", "height", "hidden", "key", "lang", "loading_state", "n_clicks", "n_clicks_timestamp", "name", "referrerPolicy", "role", "sandbox", "spellCheck", "src", "srcDoc", "style", "tabIndex", "title", "width", ] self.available_wildcard_properties = ["data-", "aria-"] _explicit_args = kwargs.pop("_explicit_args") _locals = locals() _locals.update(kwargs) # For wildcard attrs args = {k: _locals[k] for k in _explicit_args if k != "children"} for k in []: if k not in args: raise TypeError("Required argument `" + k + "` was not specified.") super(Iframe, self).__init__(children=children, **args)
__init__
cnn_classifier.py
from keras.preprocessing.image import ImageDataGenerator from keras.models import Sequential from keras.layers import Convolution2D, MaxPooling2D from keras.layers import Activation, Dropout, Flatten, Dense, Lambda, ELU from keras.optimizers import Adam from sklearn.model_selection import train_test_split from keras.models import model_from_json from sklearn.preprocessing import normalize import cv2 import numpy as np import glob import json from keras.layers import merge from keras.layers.core import Lambda from keras.models import Model import tensorflow as tf def make_parallel(model, gpu_count): def get_slice(data, idx, parts): shape = tf.shape(data) size = tf.concat(0, [shape[:1] // parts, shape[1:]]) stride = tf.concat(0, [shape[:1] // parts, shape[1:] * 0]) start = stride * idx return tf.slice(data, start, size) outputs_all = [] for i in range(len(model.outputs)): outputs_all.append([]) # Place a copy of the model on each GPU, each getting a slice of the batch for i in range(gpu_count): with tf.device('/gpu:%d' % i): with tf.name_scope('tower_%d' % i) as scope: inputs = [] # Slice each input into a piece for processing on this GPU for x in model.inputs: input_shape = tuple(x.get_shape().as_list())[1:] slice_n = Lambda(get_slice, output_shape=input_shape, arguments={'idx': i, 'parts': gpu_count})(x) inputs.append(slice_n) outputs = model(inputs) if not isinstance(outputs, list): outputs = [outputs] # Save all the outputs for merging back together later for l in range(len(outputs)): outputs_all[l].append(outputs[l]) # merge outputs on CPU with tf.device('/cpu:0'): merged = [] for outputs in outputs_all: merged.append(merge(outputs, mode='concat', concat_axis=0)) return Model(input=model.inputs, output=merged) class CNNClassifier: def __init__(self): self.classifier = None def get_model(self, parallel=False): model = Sequential() #model.add(Lambda(lambda x: x / 127.5 - 1., input_shape=(64, 64, 3))) model.add(Convolution2D(8, 8, 8, subsample=(4, 4), border_mode="same", activation='elu', name='Conv1')) model.add(Convolution2D(16, 5, 5, subsample=(2, 2), border_mode="same", activation='elu', name='Conv2')) model.add(Convolution2D(32, 5, 5, subsample=(2, 2), border_mode="same", activation='elu', name='Conv3')) model.add(Flatten()) model.add(ELU()) model.add(Dense(1024, activation='elu')) model.add(Dropout(.5)) model.add(ELU()) model.add(Dense(512, activation='elu')) model.add(Dropout(.5)) model.add(Dense(1, name='output')) model.add(Activation('sigmoid')) if parallel: model = make_parallel(model, 2) #model.compile(optimizer='sgd', loss='binary_crossentropy', metrics=['accuracy']) self.model = model return model def _model(self): img_width, img_height = 64, 64 model = Sequential() model.add(Convolution2D(8, 3, 3, input_shape=(img_width, img_height, 3))) model.add(Activation('elu')) model.add(MaxPooling2D(pool_size=(2, 2))) #model.add(Convolution2D(16, 3, 3)) #model.add(Activation('elu')) #model.add(MaxPooling2D(pool_size=(2, 2))) #model.add(Convolution2D(32, 3, 3)) #model.add(Activation('elu')) #model.add(MaxPooling2D(pool_size=(2, 2))) model.add(Flatten()) model.add(Dense(512)) model.add(Dropout(0.5)) model.add(Dense(1, activation='sigmoid')) #model = make_parallel(model, 2) self.model = model def compile(self): self.model.compile(loss='binary_crossentropy', optimizer='rmsprop', class_mode='binary', metrics=['accuracy']) def save(self): model_json = self.model.to_json() with open("./model.json", "w") as json_file: json.dump(model_json, json_file) self.model.save_weights("./model.h5") print("Saved model to disk") def load(self): with open('./model.json', 'r') as jfile: self.model = model_from_json(json.load(jfile)) self.compile() self.model.load_weights('./model.h5') def get_list(self): vehicles = np.array(glob.glob('training_data/vehicles/*/*')) y_vehicles = np.zeros(vehicles.shape) + 1 non_vehicles = np.array(glob.glob('training_data/non-vehicles/*/*')) y_non_vehicles = np.zeros(non_vehicles.shape) X_data = np.concatenate((vehicles, non_vehicles)) Y_data = np.concatenate((y_vehicles, y_non_vehicles)) return X_data, Y_data def
(self, image): #img = np.copy(image) #img = cv2.resize(img, (64, 64)) x = image[None, :, :, :] result = self.model.predict(x, 1) return result def train(self, file_list, labels, test_size=0.2, nb_epoch=30, batch_size=128): X_train, X_test, Y_train, Y_test = train_test_split(file_list, labels, test_size=test_size, random_state=100) test_images = build_images(X_test) train_images = build_images(X_train) train_datagen = ImageDataGenerator( rescale=1. / 255, shear_range=0.05, zoom_range=0.05, width_shift_range=0.1, height_shift_range=0.1, rotation_range=5, horizontal_flip=True) test_datagen = ImageDataGenerator(rescale=1. / 255) train_generator = train_datagen.flow(train_images, Y_train, batch_size) test_generator = test_datagen.flow(test_images, Y_test, batch_size) nb_train_samples = (batch_size-1)*100 nb_validation_samples = (batch_size-1)*20 #self.get_model(parallel=False) self._model() self.compile() self.model.fit_generator( train_generator, samples_per_epoch=nb_train_samples, nb_epoch=nb_epoch, show_accuracy=True, validation_data=test_generator, nb_val_samples=nb_validation_samples) def build_images(x): images = np.zeros((len(x), 64, 64, 3)) for idx, img_fname in enumerate(x): im = cv2.imread(img_fname) im = cv2.cvtColor(im, cv2.COLOR_BGR2RGB) im = cv2.resize(im, (64, 64), interpolation=cv2.INTER_AREA) images[idx] = im return images def do_all(nb_epoch=30, batch_size=256): clf = CNNClassifier() x, y = clf.get_list() clf.train(x, y, nb_epoch=nb_epoch, batch_size=batch_size) clf.save()
predict
mod.rs
use crate::utils::pckg; use duckscript::types::command::{Command, CommandResult}; #[cfg(test)] #[path = "./mod_test.rs"] mod mod_test; #[derive(Clone)] pub(crate) struct
{ package: String, } impl Command for CommandImpl { fn name(&self) -> String { pckg::concat(&self.package, "HexDecode") } fn aliases(&self) -> Vec<String> { vec!["hex_decode".to_string()] } fn help(&self) -> String { include_str!("help.md").to_string() } fn clone_and_box(&self) -> Box<dyn Command> { Box::new((*self).clone()) } fn run(&self, arguments: Vec<String>) -> CommandResult { if arguments.is_empty() { CommandResult::Error("Value not provided.".to_string()) } else { match u64::from_str_radix(arguments[0].trim_start_matches("0x"), 16) { Ok(num) => CommandResult::Continue(Some(num.to_string())), Err(error) => CommandResult::Error(error.to_string()) } } } } pub(crate) fn create(package: &str) -> Box<dyn Command> { Box::new(CommandImpl { package: package.to_string(), }) }
CommandImpl
context.ts
import { createInjectionKey } from '../../_utils' import type { ConfigProviderInjection } from './internal-interface' export const configProviderInjectionKey =
createInjectionKey<ConfigProviderInjection>('n-config-provider')
script4.js
"use strict"; var date = 12; var money = 100; var ob = { 3: 0.12/12, 6: 0.16/12, 12: 0.18/12 }; function
(datePar, moneyPar, obPar) { for (var i in ob) { if (i == datePar) { var sum = moneyPar; for (var j = 1; j <= datePar; j++) { sum += sum * obPar[i]; console.log('month = ' + sum); } console.log('all = ' + sum); } } }; calDep(date, money, ob); // Пометки: // оптимальный - не оптимальный код // оптимальность - соответсвие // вес программы
calDep
permissions.go
// Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Package permissions contains codes share between IC and DAM. package permissions import ( "net/url" "strings" "sync" "time" "github.com/GoogleCloudPlatform/healthcare-federated-access-services/lib/ga4gh" /* copybara-comment: ga4gh */ "github.com/GoogleCloudPlatform/healthcare-federated-access-services/lib/storage" /* copybara-comment: storage */ glog "github.com/golang/glog" /* copybara-comment */ cpb "github.com/GoogleCloudPlatform/healthcare-federated-access-services/proto/common/v1" /* copybara-comment: go_proto */ ) const cacheTimeout = time.Minute * 5 // Permissions type exposes functions access user permissions. type Permissions struct { store storage.Store cachedPermissions *cpb.Permissions cacheExpiry time.Time mutex sync.Mutex } // New creates Permissions. func New(store storage.Store) *Permissions { return &Permissions{store: store} } // loadPermissions loads permission from storage/config. func (p *Permissions) loadPermissions() (*cpb.Permissions, error) { p.mutex.Lock() defer p.mutex.Unlock() now := time.Now() // return if valid cached value available if p.cachedPermissions != nil && p.cacheExpiry.After(now) { return p.cachedPermissions, nil } p.cacheExpiry = now.Add(cacheTimeout) p.cachedPermissions = &cpb.Permissions{} if err := p.store.Read(storage.PermissionsDatatype, storage.DefaultRealm, storage.DefaultUser, storage.DefaultID, storage.LatestRev, p.cachedPermissions); err != nil { return nil, err } return p.cachedPermissions, nil } // CheckAdmin returns if user has valid admin permission. func (p *Permissions) CheckAdmin(id *ga4gh.Identity) (bool, error) { perm, err := p.loadPermissions() if err != nil { return false, err } return isAdmin(perm, id), nil } func extractIdentitiesFromVisas(id *ga4gh.Identity) []string { var subjects []string for _, j := range id.VisaJWTs { v, err := ga4gh.NewVisaFromJWT(ga4gh.VisaJWT(j)) if err != nil
if v.Data().Assertion.Type != ga4gh.LinkedIdentities { continue } // TODO Need to verify JWT before use. // TODO Need to verify JWT from trust issuer and source. subjects = append(subjects, v.Data().Subject) for _, s := range strings.Split(string(v.Data().Assertion.Value), ";") { ss := strings.Split(s, ",") if len(ss) != 2 { glog.Warning("LinkedIdentities in wrong format") continue } email, err := url.QueryUnescape(ss[0]) if err != nil { glog.Warningf("url.QueryUnescape(email) failed: %v", err) continue } subjects = append(subjects, email) } } return subjects } // isAdmin returns true if the identity's underlying account has // administrative privileges without checking scopes or other // restrictions related to the auth token itself. func isAdmin(perm *cpb.Permissions, id *ga4gh.Identity) bool { if id == nil { return false } now := time.Now() if isAdminUser(perm, id.Subject, now) { return true } for user := range id.Identities { if isAdminUser(perm, user, now) { return true } } for _, sub := range extractIdentitiesFromVisas(id) { if isAdminUser(perm, sub, now) { return true } } return false } func isAdminUser(perm *cpb.Permissions, user string, now time.Time) bool { // Only allowing "sub" that contain an "@" symbol. We don't want // to allow admins to try to trigger on a raw account number // without knowing where it came from. if !strings.Contains(user, "@") { return false } u, ok := perm.Users[user] if !ok { return false } r, ok := u.Roles["admin"] if ok && (r < 0 || r > now.UnixNano()/1e9) { return true } return false }
{ glog.Warningf("ga4gh.NewVisaFromJWT failed: %v", err) continue }
controller.go
/* Copyright 2019 The Knative Authors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package dispatcher import ( "context" "time" "go.uber.org/zap" "knative.dev/eventing/pkg/channel" "knative.dev/eventing/pkg/channel/swappable" inmemorychannelinformer "knative.dev/eventing/pkg/client/injection/informers/messaging/v1alpha1/inmemorychannel" "knative.dev/eventing/pkg/inmemorychannel" "knative.dev/eventing/pkg/reconciler" "knative.dev/eventing/pkg/tracing" "knative.dev/pkg/configmap" "knative.dev/pkg/controller" tracingconfig "knative.dev/pkg/tracing/config" ) const ( // ReconcilerName is the name of the reconciler. ReconcilerName = "InMemoryChannels" // controllerAgentName is the string used by this controller to identify // itself when creating events. controllerAgentName = "in-memory-channel-dispatcher" readTimeout = 15 * time.Minute writeTimeout = 15 * time.Minute port = 8080 ) // NewController initializes the controller and is called by the generated code. // Registers event handlers to enqueue events. func
( ctx context.Context, cmw configmap.Watcher, ) *controller.Impl { base := reconciler.NewBase(ctx, controllerAgentName, cmw) // Setup trace publishing. iw := cmw.(*configmap.InformedWatcher) if err := tracing.SetupDynamicPublishing(base.Logger, iw, "imc-dispatcher", tracingconfig.ConfigName); err != nil { base.Logger.Fatalw("Error setting up trace publishing", zap.Error(err)) } sh, err := swappable.NewEmptyHandler(base.Logger.Desugar()) if err != nil { base.Logger.Fatal("Error creating swappable.Handler", zap.Error(err)) } args := &inmemorychannel.InMemoryDispatcherArgs{ Port: port, ReadTimeout: readTimeout, WriteTimeout: writeTimeout, Handler: sh, Logger: base.Logger.Desugar(), } inMemoryDispatcher := inmemorychannel.NewDispatcher(args) inmemorychannelInformer := inmemorychannelinformer.Get(ctx) informer := inmemorychannelInformer.Informer() r := &Reconciler{ Base: base, dispatcher: inMemoryDispatcher, inmemorychannelLister: inmemorychannelInformer.Lister(), inmemorychannelInformer: informer, } r.impl = controller.NewImpl(r, r.Logger, ReconcilerName) // Nothing to filer, enqueue all imcs if configmap updates. noopFilter := func(interface{}) bool { return true } resyncIMCs := configmap.TypeFilter(channel.EventDispatcherConfig{})(func(string, interface{}) { r.impl.FilteredGlobalResync(noopFilter, informer) }) // Watch for configmap changes and trigger imc reconciliation by enqueuing imcs. configStore := channel.NewEventDispatcherConfigStore(base.Logger, resyncIMCs) configStore.WatchConfigs(cmw) r.configStore = configStore r.Logger.Info("Setting up event handlers") // Watch for inmemory channels. r.inmemorychannelInformer.AddEventHandler(controller.HandleAll(r.impl.Enqueue)) // Start the dispatcher. go func() { err := inMemoryDispatcher.Start(ctx) if err != nil { r.Logger.Error("Failed stopping inMemoryDispatcher.", zap.Error(err)) } }() return r.impl }
NewController
test.rs
mod with_atom_class; use proptest::prop_assert_eq; use liblumen_alloc::atom; use liblumen_alloc::erts::exception; use liblumen_alloc::erts::term::prelude::*; use crate::otp::erlang::raise_3::native; use crate::test::strategy; #[test] fn
() { run!( |arc_process| { ( strategy::term::is_not_atom(arc_process.clone()), strategy::term(arc_process.clone()), strategy::term::list::proper(arc_process.clone()), ) }, |(class, reason, stacktrace)| { prop_assert_badarg!( native(class, reason, stacktrace), format!("class ({}) is not an atom", class) ); Ok(()) }, ); }
without_atom_class_errors_badarg
page.py
from lib2to3.pgen2 import driver #import requests #import self as self from selenium.webdriver.chrome import options from selenium.webdriver.common.action_chains import ActionChains from selenium import webdriver from bs4 import BeautifulSoup from selenium import webdriver #from selenium.webdriver.firefox.webdriver import Webdriver from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.common.by import By from selenium.webdriver.support import expected_conditions as EC from selenium.webdriver.common.keys import Keys from selenium.webdriver.common.by import By from datetime import datetime import logging from selenium.webdriver.support.select import Select import pytest from selenium.webdriver.remote.command import Command import time from selenium import webdriver import json from selenium.webdriver.common.action_chains import ActionChains from selenium.webdriver.support import expected_conditions from selenium.webdriver.support.wait import WebDriverWait from selenium.webdriver.common.by import By from selenium.webdriver.common.keys import Keys import logging import urllib.request import os from time import sleep from sys import exit import random SLEEP_SHORT = 4 SLEEP_MEDIUM = 15 SLEEP_LONG = 20 class BasePage(object): def __init__(self, driver): self.driver = driver class ResultPage(BasePage): def find_link(self, tag_, class_): """ Найти подтверждающий элемент """ path: str = './/ttag[@class="cclass"]' path_1 = path.replace('ttag', tag_) xpath = path_1.replace('cclass', class_) # page.waitForElementVisible(xpath, 7) time.sleep(SLEEP_SHORT) table = self.driver.page_source soup = BeautifulSoup(table, 'html.parser') # print('Это суп', soup) ls = soup.find(tag_, class_) # print(ls) return ls def find_all_link(self, tag_, class_): "List of links" page = MainPage(self.driver) path = './/ttag[@class="cclass"]' path_1 = path.replace('ttag', tag_) xpath = path_1.replace('cclass', class_) page.waitForElementVisible(xpath, 7) time.sleep(SLEEP_SHORT) table = self.driver.page_source soup = BeautifulSoup(table, 'html.parser') ts = soup.find_all(tag_, class_) # print(ts) return ts def find_x(self, tag_, class_): # слайды # print(self, tag_, class_) закомментил, потому что выводит ненужную строчку символов table = self.driver.page_source soup = BeautifulSoup(table, 'html.parser') ts = soup.find(tag_, class_) ls = ts.find('a').get('clip_name') # это дочь, в test_case_1 указывается родитель # print(ls) return ls def find_y(self, tag_, class_): # год1 print(self, tag_, class_) table = self.driver.page_source soup = BeautifulSoup(table, 'html.parser') ts = soup.find(tag_, class_) ls = ts.find('button').get('filter__item filter__item_year').getText() # print(ls) '//button[text()="Outliers"]' return ls def find_n(self, tag_, class_): # год2 print(self, tag_, class_) table = self.driver.page_source soup = BeautifulSoup(table, 'html.parser') ts = soup.find(tag_, class_) ls = ts.find('button').get('years-to').getText() # print(ls) return ls def find_tag(self, tag_): # print(self, tag_, class_) time.sleep(SLEEP_LONG) table = self.driver.page_source soup = BeautifulSoup(table, 'html.parser') # print('Это суп', soup) ls = soup.find(tag_) # print(ls) return ls def find_all_tag(self, tag_): # print(self, tag_, class_) time.sleep(SLEEP_SHORT) table = self.driver.page_source soup = BeautifulSoup(table, 'html.parser') # print('Это суп', soup) ls = soup.find_all(tag_) # print(ls) return ls def simple_find(self, xpath, number): ls = self.driver.find_elements_by_xpath(xpath)[number] # print(ls) return ls def visible_xpath(self, xpath): time.sleep(SLEEP_SHORT) return EC.presence_of_element_located((By.XPATH, xpath)) class Admin(BasePage): def click_f(self, name, stap): page = MainPage(self.driver) step = str(stap) self.adminx = { 'Клик_In_Progress_статус': {'func': page.click_xpath, 'path': '//*[@id="issue_status_id"]/option[4]'}, 'Клик_удалить_фильм_Малыш': {'func': page.click_xpath, 'path': './/a[text()="Прокат фильма (QA version)"]'}, # 'Клик_подтвердить': {'func':page.click_a} 'Клик_Принять': {'func': page.click_xpath, 'path': './/*[@id="issue-form"]/input[6]'}, 'Клик_Ответить': {'func': page.click_xpath, 'path': './/span[@class="b-letter__foot__tab"]'}, 'Клик_Отправить_письмо': {'func': page.click_xpath,'path': '//*[@id="b-toolbar__right"]/div[3]/div[2]/div/div[1]/div[1]/div/div[1]/span'}, 'Клик_Входящие_mail_ru': {'func': page.click_xpath, 'path': '//*[@id="b-nav_folders"]/div/div[1]'}, 'Клик_Чекбокс_Входящие_mail_ru': {'func': page.click_xpath, 'path': './/div[@class="b-checkbox__box"]'}, 'Клик_Удалить_письма_из_mail_ru': {'func': page.click_xpath,'path': '//*[@id="b-toolbar__right"]/div[2]/div/div[2]/div[2]/div/div[1]/span'}, 'Клик_список_статус': {'func': page.click_id, 'path': 'issue_status_id'}, 'Админка_клик_найти': {'func': page.click_id, 'path': 'id-submit-search'}, 'Админка_клик_чекбокс_1': {'func': page.click_id, 'path': 'action-toggle'}, 'Админка_Действие': {'func': page.click_name, 'path': 'action'}, 'Админка_Выбор_Удалить_пользователя': {'func': page.click_css,'path': '#action_block > label > select > option:nth-child(14)'}, 'Админка_Выполнить': {'func': page.click_name, 'path': 'index'}, # 'Админка_подтвердить':{'func':page.click_css, 'path':'#content-main > form > input[type="submit"]'}, 'Админка_подтвердить': {'func': page.click_xpath, 'path': '//input[@value="Да, я уверен"]'}, 'Админка_большая_красная_кнопка': {'func': page.click_css, 'path': 'body > div > section > a'}, 'Клик_первое_письмо': {'func': page.click_xpath, 'path': './/a[@class="js-href b-datalist__item__link"]'}, 'Клик_второе_письмо': {'func': page.click_s_xpath, 'path': './/a[@class="js-href b-datalist__item__link"]','index': 1}, 'Переключение_1_в_iframe': {'func': page.click_switch_to, 'path': 'iframe', 'index': 1}, 'Возврат_фокуса_из_iframe': {'func': page.driver.switch_to.default_content, 'path': 'None'}, 'Клик_Closed_статус': {'func': page.double, 'path_1': '//*[@id="issue_status_id"]/option[7]','path_2': './/option[@value="5"]'}, # 'Профили_посетителей': {'func': page.click_xpath, 'path': './/a[text()="Профили посетителей"]'}, 'Профили_посетителей': {'func': page.click_xpath, 'path': './/a[@href="/admin/tvzavr_admin/customer/"]'}, } # '': {'func': '', 'path': ''} self.args = self.adminx[name] self.func = self.args['func'] # имя функции из словаря self.func(self.args) # Вызов нужной функции с id = path page.loger_info('Шаг ' + step + '. Клик ' + name + ' произведен') # # # # эти клики еще не внесены в словарь ((( # class CardFilm(BasePage): def click_f(self, name, stap): page = MainPage(self.driver) step = str(stap) self.cardx = { 'Клик_кнопки_просмотр_от_руб': {'func': page.click_xpath,'path': './/button[@class="clip-player__action button"]'}, 'Клик_кнопки_напрокат_SD-10р': {'func': page.click_xpath,'path': './/button[@class="tariffs__buy js-payment-info"]'}, 'Клик_кнопки_напрокат_SD-11.88р': {'func': page.click_xpath, 'path': './/button[@data-tariff-id="575"]'}, 'Клик_Личный_счёт': {'func': page.click_xpath,'path': './/a[@class="tabs__link js-tabs-link"][contains(., "Личный счёт")]'}, 'Клик_Оплатить_личный_счет': {'func': page.click_xpath,'path': './/button[@class="payment-cloudpayments__card button button_stretched js-buy-button"]'}, 'Клик_кнопки_В_избранное': {'func': page.click_xpath, 'path': './/button[@class="clip__action"]'}, 'Клик_кнопки_Убрать_из_избранного': {'func': page.click_xpath,'path': './/button[@class="clip__action clip__action_active"]'}, # 'Клик_кнопки_Прокат': {'func': page.click_xpath, 'path': './/span[@class="tvz-currency tvz-currency-RUB"]'}, 'Клик_иконки_найденного_фильма': {'func': page.click_xpath, 'path': './/a[@href="/film/lunnyi-kamen/"]'}, 'Клик_первого_фильма': {'func': page.click_xpath, 'path': './/a[@class="card card_clip"]'}, 'Клик_иконки_избранного_фильма': {'func': page.click_xpath,'path': './/div[@class="clip-card__title tvz-overflow"]'}, 'Клик_иконки_фильма_в_избранном': {'funk': page.click_xpath, 'path': './/a[@class="card card_clip"]'}, 'Клик_Play': {'func': page.click_xpath, 'path': '//div[@class="tvz-button tvz-bpb2"]'}, 'Клик_пауза': {'func': page.click_xpath, 'path': '//div[@class="tvz-button tvz-button_play"]'}, 'Клик_вкладки_Описание': {'func': page.click_xpath,'path': '//h2[@class="clip__subheading"][contains(., "Описание")]'}, # 'Клик_вкладки_Комментарии': {'func': page.click_xpath, 'path': '//*[@id="page-content"]/div[2]/div[2]/div[4]/div/ul/li[4]/a'}, 'Клик_вкладки_Серии': {'func': page.click_xpath,'path': '//h2[@class="clip__subheading"][contains(., "Серии")]'}, 'Клик_2_ой_Серии_1_го_сезона': {'func': page.click_xpath,'path': '//div[@class="series-card__title"][contains(., "2 серия")]'}, 'Клик_на_вкладку_Отзывы ': {'func': page.click_xpath, 'path': './/a[@data-target="clip-comments"]'}, 'Клик_на_вкладку_Трейлеры': {'func': page.click_xpath, 'path': '//a[@data-target="clip-trailers"]'}, 'Клик_на_вкладку_Награды': {'func': page.click_xpath, 'path': '//a[@data-target="clip-awards"]'}, 'Клик_на_вкладку_описание': {'func': page.click_xpath, 'path': '//a[@data-target="clip-info"]'}, # '': {'func': '', 'path': ''} } self.args = self.cardx[name] self.func = self.args['func'] # имя функции из словаря self.func(self.args) # Вызов нужной функции с id = path page.loger_info('Шаг ' + step + '. Клик ' + name + ' произведен') class Profile(BasePage): def click_f(self, name, step_int): result = ResultPage(self.driver) page = MainPage(self.driver) step = str(step_int) self.profx = { 'Клик_Зарегистрироваться': {'func': page.click_id, 'path': 'email-registration-submit'}, 'Клик_поиска_Лупа': {'func': page.click_id, 'path': 'header-search-button'}, 'Клик_Подписки': {'func': page.click_xpath,'path': './/a[@class="profile-menu__link"][contains(., "Подписки")]'}, # подписке в значке профиля 'Клик_Выйти': {'func': page.click_xpath,'path': './/button[@class="profile-menu__logout js-profile-logout"]'}, 'Клик_Пополнить': {'func': page.click_xpath,'path': './/button[@class="cabinet-balance__replenish button button_stretched js-replenishment"]'}, 'Клик_Личный_счет': {'func': page.click_xpath,'path': './/a[@class="profile-menu__link"][contains(., "Счет")]'}, 'Клик_Личный_счет_нового_пользователя': {'func': page.click_xpath,'path': './/a[@class="profile-menu__link profile-menu__link_notified"][contains(., "Счет")]'}, 'Клик_Регистрация': {'func': page.click_xpath, 'path': './/a[text()="Регистрация"]'}, 'Клик_phone_Зарегистрироваться': {'func': page.click_css, 'path': '#register-submit'}, 'Клик_значок_пользователя': {'func': page.click_xpath,'path': './/button[@class="header__profile js-profile-menu"]'}, 'Клик_значок_нового_пользователя': {'func': page.click_xpath,'path': './/button[@class="header__profile header__profile_notified js-profile-menu"]'}, 'Клик_мои_фильмы': {'func': page.click_xpath,'path': './/a[@class="profile-menu__link"][contains(., "Мои фильмы")]'}, 'Клик_крестик_всплывшего_окна_тройка': {'func': page.click_xpath,'path': './/button[@class="modal__close"]'}, 'Клик_Настройки_профиля': {'func': page.click_xpath,'path': './/a[@class="profile-menu__link"][contains(., "Настройки")]'}, 'Клик_переход_в_настройки': {'func': page.click_xpath,'path': './/a[@class="tabs__link js-tabs-link"][contains(., "Настройки")]'}, 'Клик_день_рождения': {'func': page.click_id, 'path': 'birthday-day'}, 'Ввод_дня_рождения': {'func': page.click_css, 'path': '#birthday-day > option:nth-child(5)'}, 'Клик_месяц_рождения': {'func': page.click_id, 'path': 'birthday-month'}, 'Ввод_месяца_рождения': {'func': page.click_css, 'path': '#birthday-month > option:nth-child(5)'}, 'Клик_год_рождения': {'func': page.click_id, 'path': 'birthday-year'}, 'Ввод_года_рождения': {'func': page.click_xpath, 'path': './/option[@value="1990"]'}, 'Клик_выбран_пол': {'func': page.click_xpath,'path': './/span[@class="toggle__label"][contains(., "Мужской пол")]'}, 'Клик_Снятие_галочки_с_подписки': {'func': page.click_xpath,'path': './/span[text()="Да, я хочу получать подписку с обновлениями, акциями и подарками"]'}, 'Клик_Снятие_галочки_с_продолжения_просмотра': {'func': page.click_xpath,'path': './/span[text()="Продолжать просмотр с места остановки"]'}, 'Клик_Сохранить': {'func': page.click_xpath,'path': './/button[@class="cabinet-settings__button button button_stretched"][contains(., "Сохранить")]'}, 'Клик_Избранное': {'func': page.click_xpath, 'path': './/a[text()="Избранное"]'}, } # Profile self.args = self.profx[name] self.func = self.args['func'] # имя функции из словаря self.func(self.args) # Вызов нужной функции с id = path page.loger_info('Шаг ' + step + '. Клик ' + name + ' произведен') class MainPage(BasePage): def click_f(self, name, stap): step = str(stap) self.pagex = { 'Клик_прокрутки_слайда_вправо': {'func': self.click_xpath,'path': './/button[@class="slider__navigation slider__navigation_next js-slider-navigation js-slider-navigation-next"]'}, 'Клик_прокрутки_слайда_влево': {'func': self.click_xpath,'path': './/button[@class="slider__navigation slider__navigation_prev js-slider-navigation js-slider-navigation-prev"]'}, 'Клик_поиска_Лупа': {'func': self.click_css, 'path': '#header-search-button'},
'Клик_Новинки': {'func': self.click_xpath, 'path': './/a[@href="/novinki/"]'}, 'Показать_еще': {'func': self.click_xpath,'path': './/button[@class="catalog__more button js-catalog-more"]'}, # Подписки 'Клик_Подписки': {'func': self.click_xpath, 'path': './/a[@title="Подписка tvzavr"]'}, 'Клик_Подписка_Отключи_рекламу': {'func': self.click_xpath,'path': './/a[@class="tabs__link js-tabs-link"][contains(., "«Отключи рекламу на tvzavr!»")]'}, 'Клик_купить_за_99р': {'func': self.click_xpath,'path': './/button[@class="subscriptions__button button button_dark js-payment-info"]'}, 'Клик_Бесплатно': {'func': self.click_xpath,'path': './/a[@class="header__link"][contains(., "Бесплатно")]'}, # Каталог и разделы 'Клик_Каталог': {'func': self.click_xpath, 'path': './/a[@class="header__link"][contains(., "Каталог")]'}, 'Клик_Фильмы_в_каталоге': {'func': self.click_xpath,'path': './/button[@class="filter__category js-filter-category"][contains(., "Фильмы")]'}, 'Клик_Мультфильмы_в_каталоге': {'func': self.click_xpath,'path': './/button[@class="filter__category js-filter-category"][contains(., "Мультфильмы")]'}, 'Клик_Сериалы_в_каталоге': {'func': self.click_xpath,'path': './/button[@class="filter__category js-filter-category"][contains(., "Сериалы")]'}, 'Клик_Годы_выпуска': {'func': self.click_xpath,'path': './/button[@class="filter__subcategory js-filter-subcategory"][contains(., "Годы выпуска")]'}, 'Выставление_год_левый': {'func': self.click_xpath, 'path': './/div[@style="left: 22.7642%;"]'}, 'Клик_Родительский_контроль': {'func': self.click_xpath, 'path': './/span[text()="Родительский контроль"]'}, 'Клик_Бесплатные': {'func': self.click_xpath, 'path': './/span[text()="Бесплатные"]'}, # Страны 'Клик_страны': {'func': self.click_xpath,'path': './/button[@class="filter__subcategory js-filter-subcategory"][contains(., "Страны")]'}, 'Клик_США': {'func': self.click_xpath, 'path': './/li[@data-filter-id="515"]'}, 'Клик_Германия': {'func': self.click_xpath, 'path': './/li[@data-tag-name="Германия"]'}, 'Клик_Южная_Корея': {'func': self.click_xpath, 'path': './/li[@data-filter-id="8789"]'}, 'Клик_Япония': {'func': self.click_xpath, 'path': './/li[@data-filter-id="3467"]'}, 'Клик_Испания': {'func': self.click_xpath, 'path': './/li[@data-filter-id="2600"]'}, 'Клик_Турция': {'func': self.click_xpath, 'path': './/li[@data-filter-id="5287"]'}, 'Клик_Россия': {'func': self.click_xpath, 'path': './/li[@data-filter-id="122"]'}, # Жанры 'Клик_Жанры': {'func': self.click_xpath,'path': './/button[@class="filter__subcategory js-filter-subcategory"][contains(., "Жанры")]'}, 'Клик_боевик_жанр': {'func': self.click_xpath, 'path': './/li[@data-filter-id="690"]'}, 'Клик_комедия_жанр': {'func': self.click_xpath, 'path': './/li[@data-tag-name="Комедия"]'}, 'Клик_азиатский_жанр': {'func': self.click_xpath, 'path': './/li[@data-filter-id="21136"]'}, 'Клик_Советский_жанр': {'func': self.click_xpath, 'path': './/li[@data-filter-id="7320"]'}, 'Клик_приключения_жанр': {'func': self.click_xpath, 'path': './/li[@data-filter-id="702"]'}, 'Клик_Детектив_жанр': {'func': self.click_xpath, 'path': './/li[@data-filter-id="693"]'}, 'Клик_применить_фильтр': {'func': self.click_xpath,'path': './/button[@class="filter__apply button js-filter-apply"]'}, 'Клик_кнопки_просмотр_от_руб': {'func': self.click_xpath,'path': './/button[@class="clip-player__action button"]'}, 'Клик_Сериалы': {'func': self.click_xpath, 'path': './/a[text()="Сериалы"]'}, # матч тв 'Вход': {'func': self.click_xpath, 'path': './/button[@class="reset-button pm-gate__button"]'}, 'Вход2': {'func': self.click_xpath, 'path': './/button[@data-action="click->pm-auth#login"]'}, # клик вход на всплывашке 'Далее': {'func': self.click_xpath, 'path': './/button[@type="submit"]'}, 'Войти': {'func': self.click_xpath, 'path': './/button[@type="submit"]'}, # Mail 'Клик_Вход_через_Mailru': {'func': self.click_xpath,'path': './/a[@class="social__link social__link_mr js-social-link"]'}, 'Клик_Войти_и_разрешить_Mailru': {'func': self.click_xpath,'path': './/button[@class="ui-button-main"][contains(., "Войти и разрешить")]'}, # FaceBook 'Клик_Вход_через_FB': {'func': self.click_xpath,'path': './/a[@class="social__link social__link_fb js-social-link"]'}, # login-social__link login-social__link_fb js-login-social-link 'Клик_Вход_через_VK': {'func': self.click_xpath,'path': './/a[@class="social__link social__link_vk js-social-link"]'}, 'Клик_Вход_через_OK': {'func': self.click_xpath,'path': './/a[@class="social__link social__link_ok js-social-link"]'}, 'Клик_Вход_через_G': {'func': self.click_xpath,'path': './/a[@class="social__link social__link_gp js-social-link"]'}, 'Клик_Вход_FB': {'func': self.click_id, 'path': 'loginbutton'}, 'Клик_Вход_VK': {'func': self.click_id, 'path': 'install_allow'}, 'Клик_Вход_ОК': {'func': self.click_xpath, 'path': './/input[@class="button-pro __wide form-actions_yes"]'}, 'Снятие_галочки_чекбокса_запомнить_меня': {'func': self.click_xpath, 'path': './/span[@class="irc-vis"]'}, 'Клик_кнопки_Далее_Google': {'func': self.click_xpath,'path': './/span[@class="RveJvd snByac"][contains(., "Далее")]'}, 'Клик_1_Далее_Google': {'func': self.click_xpath,'path': './/span[@class="RveJvd snByac"][contains(., "Далее")]'}, 'Войти_auth': {'func': self.click_id, 'path': 'authorization-submit'}, 'По_номеру_телефона': {'func': self.click_xpath, 'path': './/a[@data-target="register-phone-tab"]'}, # Подборки 'Клик_Подборки': {'func': self.click_xpath, 'path': './/a[@class="header__link"][contains(., "Подборки")]'}, 'Клик_Коллекции': {'func': self.click_xpath,'path': './/a[@class="filter__category"][contains(., "Коллекции")]'}, 'Клик_Подборки_партнеров': {'func': self.click_xpath,'path': './/a[@class="filter__category"][contains(., "Подборки партнеров")]'}, 'Клик_Детям': {'func': self.click_xpath, 'path': './/a[@class="header__link"][contains(., "Детям")]'}, 'Клик_Спецпроекты': {'func': self.click_xpath,'path': './/a[@class="header__link"][contains(., "Спецпроекты")]'}, 'Клик_Кино_равного_доступа': {'func': self.click_xpath,'path': './/div[@class="card__title"][contains(., "Кино равного доступа")]'}, 'Проект, где ваши дети снимаются в кино': {'func': self.click_xpath,'path': './/div[@class="card__title"][contains(., "Проект, где ваши дети снимаются в кино")]'}, 'Клик_TVZ': {'func': self.click_xpath,'path': './/div[@class="card__title"][contains(., "Кино равного доступа")]'}, # 'Обратная_связь': {'func': self.click_xpath,'path': './/button[@class="footer__link"][contains(., "Обратная связь")]'}, 'Клик_Отправить_сообщение': {'func': self.click_xpath,'path': './/a[@class="header__link"][contains(., "Подборки")]'}, 'Клик_Отправить_сообщение_обратная связь': {'func': self.click_xpath,'path': './/button[@class="feedback__submit button button_stretched"]'}, 'Редактировать': {'func': self.click_xpath,'path': './/a[@class="header__link"][contains(., "Редактировать")]'}, 'Клик_первого_фильма': {'func': self.click_xpath, 'path': './/div[@class="owl-item active"]'}, 'Клик_постер_первого_фильма': {'func': self.click_xpath, 'path': './/a[@class="card card_clip"]'}, 'Клик_постер_сериала_соседка_ты_дома': {'func': self.click_xpath,'path': '//a[@href="/film/sosedka-ty-doma/"]'}, 'Клик_стрелка_Вниз': {'func': self.click_tag, 'path': 'body', 'send': Keys.DOWN}, 'Переход_вниз_страницы': {'func': self.click_tag, 'path': 'body', 'send': Keys.END}, } # MainPage self.args = self.pagex[name] self.func = self.args['func'] # имя функции из словаря self.func(self.args) # Вызов нужной функции с id = path self.loger_info('Шаг ' + step + '. Клик ' + name + ' произведен') def send_f(self, name, text, stap): step = str(stap) self.pages = { # матч тв # 'Ввод_пароля': {'func': self.send_css, 'path': 'password', 'text': text}, # 'Ввод_номера_телефона': {'func': self.send_name, 'path': 'phone', 'text': text}, # self.driver.find_element_by_xpath('.//input[@type="password"]').send_keys('Alekseybykov126') # 'Ввод_номера_телефона': {'func': self.send_, 'path': 'phone', 'text': text}, 'Ввод_в_строку_поиска': {'func': self.send_id, 'path': 'search-field', 'text': text}, 'Ввод_2_в_строку_поиска': {'func': self.send_id, 'path': 'header-search-field', 'text': text}, # Регистрация ТВЗАВР # 'Ввод_логина': {'func': self.send_name, 'path': 'email', 'text':text}, 'Ввод_логина': {'func': self.send_name, 'path': 'email-registration__address', 'text': text}, 'Ввод_пароля': {'func': self.send_css, 'path': '#register-email-password', 'text': text}, # Вход ТВЗАВР 'Ввод_логина_вход': {'func': self.send_name, 'path': 'login', 'text': text}, 'Ввод_пароля_вход': {'func': self.send_css, 'path': '#auth-password', 'text': text}, # Гугл 'Ввод_логин_Google': {'func': self.send_id, 'path': 'identifierId', 'text': text}, 'Ввод_пароль_Google': {'func': self.send_name, 'path': 'password', 'text': text}, # Mail 'Ввод_логин_Mailru': {'func': self.send_name, 'path': 'Login', 'text': text}, 'Ввод_пароля_Mailru': {'func': self.send_css, 'path': '#password', 'text': text}, # Facebook 'Ввод_пароля_FB': {'func': self.send_css, 'path': '#pass', 'text': text}, # VK 'Ввод_пароля_VK': {'func': self.send_css, 'path': '#login_submit > div > div > input:nth-child(9)','text': text}, #Одноклассники 'Ввод_логина_OK': {'func': self.send_name, 'path': 'fr.email', 'text': text}, 'Ввод_пароля_OK': {'func': self.send_css, 'path': '#field_password', 'text': text}, #'Ввод_сообщения_скайп': {'func': self.send_id, 'path': '#.public-DraftStyleDefault-block', 'text': text}, # 'Ввод_номера_телефона_reg': {'func': self.send_name, 'path': 'phone', 'text':text}, 'Ввод_СМС_пароля_reg': {'func': self.send_name, 'path': 'code', 'text': text}, 'feedback_имя_пользователя': {'func': self.send_id, 'path': 'feedback-name', 'text': text}, 'feedback_e_mail_пользователя': {'func': self.send_id, 'path': 'feedback-email', 'text': text}, 'feedback_сообщение_пользователя': {'func': self.send_id, 'path': 'feedback-decription', 'text': text}, 'Ввод_ответа_пользователю': {'func': self.send_id, 'path': 'issue_notes', 'text': text}, 'Ввод_текста_ответа_пользователя': {'func': self.send_id, 'path': 'tinymce', 'text': text}, # Админка # 'Ввод_имени_в_Redmine': {'func': self.send_id, 'path': 'username', }, # 'Ввод_номера_телефона_auth': {'func': self.send_css, 'path': '#auth-login', 'text':text}, 'Ввод_из_СМС_пароля_auth': {'func': self.send_css, 'path': '#auth-password', 'text': text}, # 'Ввод_сообщения_в_skype': {'func': self.send_css, 'path': '.public-DraftStyleDefault-block', 'text':text}, 'Ввод_суммы_пополнения_счета': {'func': self.send_name, 'path': 'price', 'text': text}, 'Ввод_профиль_old_пароля': {'func': self.send_id, 'path': 'cabinet-password-old', 'text': text}, 'Ввод_профиль_new_пароля': {'func': self.send_id, 'path': 'cabinet-password-new', 'text': text}, 'Ввод_профиль_rep_пароля': {'func': self.send_id, 'path': 'cabinet-password-repeat', 'text': text}, 'Ввод_псевдонима': {'func': self.send_id, 'path': 'name', 'text': text}, 'Админка_Ввод_в_поиск': {'func': self.send_name, 'path': 'q_q', 'text': text}, 'Ввод_номера_карты_тройка': {'func': self.send_id, 'path': 'troika-binding-textbox', 'text': text}, # Оплата картой # 'Ввод_номер_карты': {'func': self.send_name, 'path': } } self.args = self.pages[name] self.func = self.args['func'] # имя функции из словаря self.func(self.args) # Вызов нужной функции с id = path self.loger_info('Шаг ' + step + '. Клик ' + name + ' произведен') # Функция клик пропуска фулл скрина test case 9 + all Sony def click_button(self, bc): button_xpath = ('.//button[text()="%s"]' % bc) # self.loger_info(button_xpath) self.driver.find_element_by_xpath(button_xpath).click() # Функция клик кнопок верхнего меню def click_div(self, dep): up_xpath = ('.//div[text()="%s"]' % dep) self.waitForElementClickable(up_xpath, 30) time.sleep(SLEEP_SHORT) self.driver.find_element_by_xpath(up_xpath).click() # print('Клик', dep) return # Функция клик вкладок def click_li(self, dep): li_xpath = ('.//li[text()="%s"]' % dep) self.waitForElementClickable(li_xpath, 30) time.sleep(SLEEP_SHORT) self.driver.find_element_by_xpath(li_xpath).click() # print('Клик', dep) #Функция клик вкладок .//a[text()="%s"] def click_a(self, dep): a_xpath = ('.//a[text()="%s"]' % dep) self.waitForElementClickable(a_xpath, 10) self.driver.find_element_by_xpath(a_xpath).click() # print('Клик', dep) # Функция клик вкладок def click_span(self, dep): span_xpath = ('.//span[text()="%s"]' % dep) self.waitForElementClickable(span_xpath, 30) time.sleep(SLEEP_SHORT) self.driver.find_element_by_xpath(span_xpath).click() # print('Клик', dep) def click_id(self, args): dep = args['path'] self.waitForIDVisible(dep, 10) self.driver.find_element_by_id(dep).click() # print('Клик', dep) def click_name(self, args): dep = args['path'] self.waitForNameVisible(dep, 30) self.driver.find_element_by_name(dep).click() # print('Клик', dep) def click_xpath(self, args): xpath = args['path'] self.waitForElementClickable(xpath, 30) self.driver.find_element_by_xpath(xpath).click() def click_css(self, args): css = args['path'] print('css = ', css) # self.waitForElementClickable(css, 30) self.driver.find_element_by_css_selector(css).click() def click_switch_to(self, args): frame = args['path'] index = args['index'] # self.waitForElementClickable(css, 30) self.driver.switch_to.frame(self.driver.find_elements_by_tag_name(frame)[index]).click() def click_s_xpath(self, args): xpath = args['path'] index = args['index'] self.waitForElementClickable(xpath, 30) self.driver.find_elements_by_xpath(xpath)[index].click() def double(self, args): self.click_xpath(args['path_1']) self.click_xpath(args['path_2']) def click_tag(self, args): self.driver.find_element_by_tag_name(args['path']).send_keys(args['send']) #Проверка видимости элемента #Exception def tester_vis_xpath(self, xpath): self.waitForElementVisible(xpath, 5) self.driver.find_element_by_xpath(xpath) # print('Клик', dep) #Проверка кликабельности элемента def tester_click_xpath(self, xpath): self.waitForElementClickable(xpath, 25) self.driver.find_element_by_xpath(xpath) # print('Клик', dep) #Кнопки def click_play(self): xpath = '//*[@id="clip-player"]/div[16]' self.waitForElementClickable(xpath, 35) self.driver.find_element_by_xpath(xpath).click() # # print('Клик', dep) def click_stop(self): # xpath = '//*[@id="clip-player"]/div[4]' xpath = '//*[@id="clip-player"]/div[4]/div' # xpath = '//*[@id="clip-player"]/div[3]' css_sel = '#clip-player > div.tvz-button.tvz-button_play > div' # self.waitForElementClickable(xpath, 15) time.sleep(SLEEP_SHORT) self.driver.find_element_by_xpath(xpath).click() # self.driver.find_element_by_css_selector(css_sel).click() # print('Клик', dep) def click_enter(self): self.click_xpath('.//button[@class="header__login"]') def waitForElementPresent(self, xpath, timer): WebDriverWait(self.driver, timer).until(EC.presence_of_element_located((By.XPATH, xpath))) def waitForElementClickable(self, xpath, timer): WebDriverWait(self.driver, timer).until(EC.element_to_be_clickable((By.XPATH, xpath))) def waitForElementVisible(self, xpath, timer): WebDriverWait(self.driver, timer).until(EC.presence_of_element_located((By.XPATH, xpath))) def waitForNameVisible(self, name, timer): WebDriverWait(self.driver, timer).until(EC.presence_of_element_located((By.NAME, name))) def waitForIDVisible(self, id, timer): WebDriverWait(self.driver, timer).until(EC.presence_of_element_located((By.ID, id))) # OPERATIONS # Функция ввода def send_id(self, args): d_id = args['path'] txt = args['text'] # self.loger_info('path = ' + d_id + ', text = ' + txt) self.waitForIDVisible(d_id, 30) self.driver.find_element_by_id(d_id).send_keys(txt) return def send_name(self, args): d_name = args['path'] txt = args['text'] self.waitForNameVisible(d_name, 30) self.driver.find_element_by_name(d_name).send_keys(txt) return def send_css(self, args): d_name = args['path'] txt = args['text'] self.driver.find_element_by_css_selector(d_name).send_keys(txt) return # Функция ввода def input(self, dclass, data): li_xpath = ('.//input[@class="%s"]' % dclass) # self.waitForElementClickable(li_xpath, 80) time.sleep(SLEEP_SHORT) self.driver.find_element_by_xpath(li_xpath).send_keys(data) def rand_mail(self, lit): d = str(datetime.today()) ds = d.replace('-', '') d = ds.split(':')[0] d_2 = ds.split(':')[1] d_3 = d.replace(' ', '') rand = d_3 + d_2 # self.loger_info(rand) random_mail = 'tvzavrtest' + rand + lit + '@rrbbxvdr.rz' return (random_mail, rand) # def rand_number(self, lit): # d = str(datetime.today()) # ds = d.replace('-', '') # d = ds.split(':')[0] # d_2 = ds.split(':')[1] # d_3 = d.replace(' ', '') # rand = d_3 + d_2 # self.loger_info(rand) # random_number = rand + lit # return (random_number, rand) def code_phone(self, phone): self.loger_info('Получение кода на телефон: ' + phone) # url = 'http://www.tvzavr.ru/api/3.1/sms/send_confirm_code?phone=' + str(phone) + '&entity=empty&prv=smsfake_tvz' url = 'http://www.tvzavr.ru/api/3.1/sms/send_confirm_code?phone=' + str( phone) + '&entity=empty&prv=smstest_tvz' # получилось с этой self.loger_info(url) code = (requests.get(url)).text self.loger_info('code_phone ' + code) r_code = code.split(':')[3] s_code = r_code.split('"')[1] self.loger_info(s_code) return s_code # Функция проверки наличия элементов на странице. def elem(self): self.loger('Проверка элементов страницы') # Проверка наличия ссылки "Новинки"" res_txt = str(ResultPage.find_link(self, "a", "header__link")) self.loger(res_txt) # Новинки - проверочное словосочетание надписи assert ('Новинки') in res_txt self.loger('Наличие ссылки "Новинки" подтверждено') # Проверка наличия ссылки "Подписки"" res_txt = str(ResultPage.find_all_link(self, "a", "header__link")) # Подписки - проверочное словосочетание надписи assert ('Подписки') in res_txt self.loger('Наличие ссылки "Подписки" подтверждено') # Проверка наличия ссылки "Бесплатно"" # Бесплатно - проверочное словосочетание надписи assert ('Бесплатно') in res_txt self.loger('Наличие ссылки "Бесплатно" подтверждено') # Проверка наличия ссылки "Подборки" # Подборки - проверочное словосочетание надписи assert ('Подборки') in res_txt self.loger('Наличие ссылки "Подборки" подтверждено') # Проверка наличия ссылки "Каталог"" # Каталог - проверочное словосочетание надписи assert ('Каталог') in res_txt self.loger('Наличие ссылки "Каталог" подтверждено') # Проверка наличия ссылки "Детям"" # Детям - проверочное словосочетание надписи assert ('Детям') in res_txt self.loger('Наличие ссылки "Детям" подтверждено') # Проверка наличия ссылки "Вход"" res_txt = str(ResultPage.find_link(self, "button", "header__login")) # Вход - проверочное словосочетание надписи assert ('Вход') in res_txt self.loger(res_txt) self.loger('Наличие ссылки "Вход" подтверждено') def loger(self, text): logging.basicConfig(format=u'%(filename)s[LINE:%(lineno)d]# %(levelname)-8s [%(asctime)s] %(message)s', level=logging.DEBUG) logging.info(text) print(text) def loger_info(self, text): logging.basicConfig(format=u'%(filename)s[LINE:%(lineno)d]# %(levelname)-8s [%(asctime)s] %(message)s', level=logging.DEBUG) logging.info(text) print(text) def loger_error(self, text): logging.basicConfig(format=u'%(filename)s[LINE:%(lineno)d]# %(levelname)-8s [%(asctime)s] %(message)s', level=logging.DEBUG) logging.error(text) print(text) # def send_sms(self, phone, message): # Функция отправки смс # logging.info("Вызов функции отправки СМС") # # chrome_options = webdriver.ChromeOptions() # # chrome_options.add_argument("user-data-dir=C:\\Users\\user\\AppData\\Local\\Google\\Chrome\\User Data") # Запуск браузера с сохраненным профилем # self.driver = webdriver.Chrome("C:\chromedriver\chromedriver.exe") self.driver.get("https://app.mysms.com/") self.driver.implicitly_wait(10) time.sleep(SLEEP_MEDIUM) self.driver.find_element_by_xpath('.//div[@class="gwt-Label"]').click() # Новое сообщение time.sleep(SLEEP_SHORT) self.driver.find_element_by_xpath('.//input[@class="recipientTextBox"]').send_keys(phone) time.sleep(SLEEP_SHORT) self.driver.find_element_by_xpath('.//div[@class="textarea"]').send_keys(message) time.sleep(SLEEP_SHORT) self.driver.find_element_by_xpath('.//button[@class="styledButton sendButton sim dropdown"]').click() logging.info("Клик 'Отправить' произведен, СМС подтверждения отправлено") self.driver.close() return def login_google(self, emailgo, passok): time.sleep(SLEEP_SHORT) self.send_f('Ввод_логин_Google', emailgo, 1) time.sleep(2) self.click_f('Клик_кнопки_Далее_Google', 6) time.sleep(2) self.send_f('Ввод_пароль_Google', passok, 1) time.sleep(2) self.click_f('Клик_кнопки_Далее_Google', 6) time.sleep(2) return # def login_match(self, emailt, passw): # time.sleep(SLEEP_SHORT) # self.send_f('Ввод_name_логина', emailt, 2) # self.send_f('Ввод_пароля_tvz', passw, 3) # self.click_f('Клик_Войти_auth', 4) # time.sleep(SLEEP_MEDIUM) # return 06.08.2019 def login_matchtv(self, num_phone, passw): self.click_f('Вход', 1) time.sleep(1) self.click_f('Вход2', 2) time.sleep(2) self.driver.find_element_by_xpath('.//input[@type="tel"]').send_keys(num_phone) self.loger_info('Введён номер телефона: +7 ' + num_phone) time.sleep(1) self.click_f('Далее', 3) time.sleep(1) self.driver.find_element_by_xpath('.//input[@type="password"]').send_keys(passw) self.loger_info('Введён пароль: ' + passw) time.sleep(1) self.click_f('Войти', 4) time.sleep(1) return # def login_tvzavr(self, email, passw, sex): # result = ResultPage(self.driver) # prof = Profile(self.driver) # self.waitForElementVisible('.//button[@class="header__login"]', 7) # resic = result.find_link("button", "header__login") # if "Вход" not in resic: # if sex == 'male': # prof.click_f('Клик_Аватарка_М', 1) # else: # prof.click_f('Клик_Аватарка_Ж', 1) # # prof.click_f('Клик_Выйти', 1) # else: # # Шаг 1 Нажать в шапке на кнопку "Вход".') # self.click_f('Клик_Вход', 1) # time.sleep(SLEEP_SHORT) # # Шаг 2 # self.send_f('Ввод_name_логина', email, 2) # self.send_f('Ввод_пароля_Google', passw, 3) # self.click_f('Клик_Войти_auth', 4) # time.sleep(SLEEP_LONG) # try: # self.click_f('Клик_кнопки_крестик', 6) # except: # print('нет акции мицубиси') # Шаг 5 # if sex == 'male': 1.08.2019 # prof.click_f('Клик_Аватарка_М', 7) # else: # prof.click_f('Клик_Аватарка_Ж', 8) # time.sleep(SLEEP_MEDIUM) # return def login_mailru(self, emailru, passw): self.send_id('mailbox:login', emailru) self.loger_info('Ввод логина на mail.ru ' + emailru + 'произведен') time.sleep(SLEEP_SHORT) self.send_id('mailbox:password', passw) self.loger_info('Ввод пароля на mail.ru произведен') time.sleep(SLEEP_SHORT) self.driver.find_element_by_xpath('.//input[@class="o-control"]').click() self.loger_info('Клик кнопки "Войти" на mail.ru произведен') time.sleep(SLEEP_MEDIUM) def registration(self, email, passw): result = ResultPage(self.driver) prof = Profile(self.driver) resic = result.find_link("button", "header__login tvz-unauthorized") if "Вход" not in resic: prof.click_f('Клик_Аватарка_М', 1) self.loger_info('Шаг 0 Клик на аватарку пользователя произведен') self.driver.find_element_by_xpath('.//button[@class="profile-menu__logout js-profile-logout"]').click() time.sleep(SLEEP_SHORT) else: self.click_enter() self.loger_info('Шаг 1 Клик "Вход" произведен') # Шаг 2 self.click_a('Регистрация') self.loger_info('Шаг 2 Клик "Регистрация" произведен') time.sleep(SLEEP_SHORT) # Шаг 3 self.login('email', email) print('Шаг 3 Ввод логина', email, 'произведен') # Шаг 4 self.driver.find_element_by_css_selector('#register-email-password').send_keys(passw) # page.login('password', passw) self.loger_info('Шаг 4 Ввод пароля произведен') time.sleep(SLEEP_SHORT) # Шаг 5 self.driver.find_element_by_id('register-email-submit').click() self.loger_info('Шаг 5 Клик "Зарегистрироваться" произведен') time.sleep(7) prof.click_f('Клик_Аватарка_М', 5) self.loger_info('Шаг 6 Клик на аватарку пользователя произведен') self.waitForElementVisible('.//div[@class="profile-menu__name __username"]', 7) # Проверка авторизации пользователя" resic = str(result.find_link("div", "profile-menu__name __username")) # email - проверочное словосочетание надписи assert (email) in resic self.loger_info('Авторизация зарегистрированного пользователя с е-майлом ' + email + ' подтверждена') time.sleep(SLEEP_SHORT) def input_card(self, number, month, year, name_card, cvv): result = ResultPage(self.driver) self.driver.find_elements_by_xpath('.//input[@class="payment-cloudpayments__field textbox js-input"]')[ 0].send_keys(number) self.loger_info('Шаг 10 Ввод номера карты произведен ' + number) time.sleep(3) self.driver.find_elements_by_xpath('.//select[@class="dropdown js-input"]')[0].click() time.sleep(1) self.driver.find_element_by_xpath('.//option[@value="%s"]' % month).click() self.loger_info('Ввод месяца карты произведен') time.sleep(3) self.driver.find_elements_by_xpath('.//select[@class="dropdown js-input"]')[1].click() self.driver.find_element_by_xpath('.//option[@value="%s"]' % year).click() self.loger_info('Ввод года карты произведен') time.sleep(3) # Заполнить поле "Имя держателя" - Ivanov Ivan self.driver.find_elements_by_xpath('.//input[@class="payment-cloudpayments__field textbox js-input"]')[ 1].send_keys(name_card) self.loger_info('Ввод имени держателя карты произведен') time.sleep(3) # Заполнить поле "CVV код" - 526 self.driver.find_element_by_xpath( './/input[@class="payment-cloudpayments__field payment-cloudpayments__field_cvc textbox js-input"]').send_keys( cvv) self.loger_info('Ввод CVV код карты произведен') time.sleep(4) #Снять галочку "Сохранить данные карты" # self.driver.find_element_by_xpath('.//span[@class="toggle__label"]').click() # self.loger_info('Снятие галочки в чек-боксе"Сохранить данные карты" произведено') # Нажать кнопку "Оплатить"clip-watch #self.driver.find_element_by_xpath('.//button[@class="payment-cloudpayments__pay button button_stretched js-buy-button"]').click() #self.loger_info('Клик "Оплатить" произведен') #time.sleep(1) # message = str(result.find_link("section", "tvz-alerts tvz-animation-fadeOut")) # self.loger_info('Сообщение внизу формы оплаты:') # self.loger_info('message:' + message) def delete_mails(self, emailgo, passgo): # self.driver.get('https://mail.google.com') self.loger_info('Шаг 5 Переход на gmail.com произведен') # self.login_google(emailgo, passgo) time.sleep(SLEEP_SHORT) # self.driver.get('https://mail.google.com/mail/u/0/#inbox') # Удаление письма из почты self.click_xpath('.//div[@class="J-J5-Ji J-JN-M-I-Jm"]') # self.driver.find_element_by_id(':3d').click() self.loger_info('Поставлена галочка чекбокс - выбор письма') self.click_xpath('//*[@id=":5"]/div/div[1]/div[1]/div/div/div[2]/div[3]') # self.click_xpath('.//div[@class="T-I J-J5-Ji nX T-I-ax7 T-I-Js-Gs mA"]') self.loger_info('Клик кнопки "Удалить" письмо на gmail.com произведен') time.sleep(SLEEP_SHORT) @property def consol_jenkins(self): print('Запуск проверки консоли') # p = subprocess.call('ffmpeg.exe -framerate 10 -f image2 -i "Frame%03d.jpg" -r 10 -s 620x380 Video.avi', shell=True) # options = webdriver.ChromeOptions() #self.driver = webdriver.Chrome("C:\chromedriver\chromedriver.exe", chrome_options=options) options.add_argument('--incognito') # Запуск браузера в режиме инкогнито # self.driver = webdriver.Chrome(options=options) #self.driver = webdriver.Chrome("C:\chromedriver\chromedriver.exe") self.driver.get("http://192.168.2.31:8080/jenkins/job/1_Regress/") self.driver.maximize_window() self.driver.implicitly_wait(10) page = MainPage(self.driver) result = ResultPage(self.driver) self.send_name('j_username', 'admin') self.send_name('j_password', 'admin') self.click_xpath('.//div[@class="Checkbox-indicator"]') self.click_name('Submit') self.driver.implicitly_wait(5) self.driver.find_elements_by_xpath('.//td[@class="build-row-cell"]')[0].click() self.click_a('Вывод консоли') res = self.driver.find_element_by_xpath('.//pre[@class="console-output"]').text self.driver.close() return res def mail_send_web(self, login, passw): self.driver.execute_script("window.open('','_blank');") time.sleep(2) self.driver.switch_to.window(self.driver.window_handles[1]) time.sleep(2) result = ResultPage(self.driver) self.driver.get("https://e.mail.ru/login") self.driver.maximize_window() self.driver.implicitly_wait(10) self.loger_info(' Переход в mail произведен') time.sleep(2) # # # self.driver.find_element_by_name("Login").send_keys('testmailtvzavr15') self.driver.find_element_by_xpath('.//*[@id="root"]/div/div[3]/div/div/div/form/div[2]/div[2]/div[1]/div/div/div/div/div/div[1]')[0].click() self.driver.find_element_by_xpath('.//*[@id="root"]/div/div[3]/div/div/div/form/div[2]/div[2]/div[1]/div/div/div/div/div/div[1]').send_keys('testmailtvzavr15') time.sleep(2) # self.driver.find_element_by_xpath('.//span[@class="c01104 c0179 c01102 c0177"]').click() # Клик далее time.sleep(3) def scype_send_web(self, login, passw): self.driver.execute_script("window.open('','_blank');") time.sleep(2) self.driver.switch_to.window(self.driver.window_handles[1]) time.sleep(2) result = ResultPage(self.driver) # options = webdriver.ChromeOptions() # options.add_argument("--disable-notifications") # options.add_argument('--incognito') # Запуск браузера в режиме инкогнито # self.driver = webdriver.Chrome(options=options) #self.driver = webdriver.Chrome("C:\chromedriver\chromedriver.exe") self.driver.get("https://web.skype.com/ru/") self.driver.maximize_window() self.driver.implicitly_wait(10) self.loger_info(' Переход в Skype произведен') time.sleep(2) self.driver.find_element_by_name("loginfmt").send_keys('79776410337') time.sleep(2) self.driver.find_element_by_xpath('.//input[@id="idSIButton9"]').click() # Клик далее time.sleep(3) self.driver.find_element_by_name("passwd").send_keys('Aleh1260337') self.loger_info('Ввод Skype пароля произведен') time.sleep(3) self.driver.find_element_by_xpath('.//input[@id="idSIButton9"]').click() # Клик вход self.loger_info('Клик Вход произведен') time.sleep(3) self.driver.find_element_by_xpath('.//div[@id="rx-vlv-6"]').click() # Клик по диалогу self.loger_info('Шаг 100. Переход в чат Деплоймент произведен') time.sleep(10) #self.driver.find_element_by_css_selector(".public-DraftStyleDefault-block").click() # Клик по полю ввода time.sleep(15) #print('тут2222') #time.sleep(2) # self.driver.find_element_by_xpath('/html/body/div[1]/div/div[1]/div[2]/div/div[1]/div/div[2]/div/div[2]/div/div/div/div/div/div[2]/div[2]/div/div/div[1]/div/div/div/div/div[2]/div[3]').send_keys('text') # self.send_f('Ввод_сообщения_скайп', 'text', 15) # self.driver.find_element_by_xpath('/html/body/div[1]/div/div[1]/div[2]/div/div[1]/div/div[2]/div/div[2]/div/div/div/div/div/div[2]/div[2]/div/div/div[1]/div/div/div/div/div[2]').send_keys('text') #self.driver.find_element_by_css_selector('[id="#.public-DraftStyleDefault-block"]').send_keys('text') #self.page.loger('тут') # self.click_xpath('//*[@id="swxContent1"]/swx-navigation/div/div/div/label/div/div/div[2]/div[2]/div/swx-button/button') # self.loger_info('Отправка текста в чат Деплоймент произведена') # self.driver.close() # def delete_uzer(self, name): # ФУНКЦИЯ УДАЛЕНИЯ ПОЛЬЗОВАТЕЛЯ ИЗ АДМИНКИ # self.driver.execute_script("window.open('','_blank');") # time.sleep(2) # self.driver.switch_to.window(self.driver.window_handles[1]) # time.sleep(2) # self.driver.get("https://www.tvzavr.ru:8080/admin/") # time.sleep(2) # # 'Открытие страницы админки # self.driver.maximize_window() # time.sleep(3) # self.driver.implicitly_wait(7) # self.driver.find_element_by_xpath( # './/a[@href="https://accounts.google.com/o/oauth2/auth?client_id=245544346256-4luf263ioa376hp89q5k08otplt9dvdh.apps.googleusercontent.com&scope=openid%20profile%20email%20https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fplus.login&redirect_uri=http://www.tvzavr.ru:8080/oauth2callback/&response_type=code"]').click() # time.sleep(3) # # Логинимся через Google # # emailgo = '[email protected]' # passok = 'tmW9HZvaksgc' # # self.send_f('Ввод_логин_Google', emailgo, 1) # time.sleep(2) # self.click_f('Клик_кнопки_Далее_Google', 6) # time.sleep(2) # self.send_f('Ввод_пароль_Google', passok, 1) # time.sleep(2) # self.click_f('Клик_кнопки_Далее_Google', 6) # time.sleep(6) # # Вошли в админку # # self.driver.find_element_by_xpath( # './/a[@href="/admin/tvzavr_admin/customer/"]').click() # Клик на "Профили посетителей" # time.sleep(3) # self.send_f('Админка_Ввод_в_поиск', name, 16) # Ввод имени пользователя # time.sleep(3) # # self.driver.find_element_by_xpath('.//input[@value="Найти"]').click() # Клик найти # time.sleep(2) # # self.driver.find_element_by_xpath( # './/input[@id="action-toggle"]').click() # Клик по чекбоксу(ставит галочку) # time.sleep(2) # # self.driver.find_element_by_xpath('.//select[@name="action"]').click() # Клик на поле "Действие" # time.sleep(2) # self.driver.find_element_by_css_selector( # '#action_block > label > select > option:nth-child(14)').click() # Выбор "Удалить" # time.sleep(2) # self.driver.find_element_by_xpath('.//*[@id="action_block"]/button').click() # Клик на "Выполнить" # time.sleep(3) # self.driver.find_element_by_xpath('.//*[@id="content"]/form/div[2]/input[4]').click() # Клик на "Да, уверен" # time.sleep(2) # self.driver.switch_to.window(self.driver.window_handles[-1]) # time.sleep(2) # def delete_comments(self, name): # ФУНКЦИЯ УДАЛЕНИЯ комментариев ПОЛЬЗОВАТЕЛЯ # self.driver.execute_script("window.open('','_blank');") # time.sleep(2) # self.driver.switch_to.window(self.driver.window_handles[1]) # time.sleep(2) # self.driver.get("https://www.tvzavr.ru:8080/admin/") # time.sleep(2) # # 'Открытие страницы админки # self.driver.maximize_window() # time.sleep(3) # self.driver.implicitly_wait(7) # self.driver.find_element_by_xpath( # './/a[@href="https://accounts.google.com/o/oauth2/auth?client_id=245544346256-4luf263ioa376hp89q5k08otplt9dvdh.apps.googleusercontent.com&scope=openid%20profile%20email%20https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fplus.login&redirect_uri=http://www.tvzavr.ru:8080/oauth2callback/&response_type=code"]').click() # time.sleep(3) # # Логинимся через Google # # emailgo = '[email protected]' # passok = 'tmW9HZvaksgc' # # self.send_f('Ввод_логин_Google', emailgo, 1) # time.sleep(2) # self.click_f('Клик_кнопки_Далее_Google', 6) # time.sleep(2) # self.send_f('Ввод_пароль_Google', passok, 1) # time.sleep(2) # self.click_f('Клик_кнопки_Далее_Google', 6) # time.sleep(6) # # Вошли в админку # # self.driver.find_element_by_xpath('.//a[@href="/admin/tvzavr_admin/customer/"]').click() # Клик на "Профили посетителей" # time.sleep(3) # self.send_f('Админка_Ввод_в_поиск', name, 16) # Ввод имени пользователя # time.sleep(3) # # self.driver.find_element_by_xpath('.//input[@value="Найти"]').click() # Клик найти # time.sleep(2) # # self.driver.find_element_by_xpath( # './/input[@id="action-toggle"]').click() # Клик по чекбоксу(ставит галочку) # time.sleep(2) # # self.driver.find_element_by_xpath('.//select[@name="action"]').click() # Клик на поле "Действие" # time.sleep(2) # self.driver.find_element_by_xpath('.//option[@value="remove_comments"]').click() # # self.driver.find_element_by_css_selector('#action_block > label > select > option:nth-child(14)').click() # Выбор "Удалить" # time.sleep(2) # self.driver.find_element_by_xpath('.//*[@id="action_block"]/button').click() # Клик на "Выполнить" # time.sleep(3) # self.driver.find_element_by_xpath( # './/input[@id="action-toggle"]').click() # Клик по чекбоксу(ставит галочку) # time.sleep(2) # # self.driver.find_element_by_xpath('.//*[@id="content"]/form/div[2]/input[4]').click() # Клик на "Да, уверен" # # time.sleep(2) # self.driver.switch_to.window(self.driver.window_handles[-1]) # time.sleep(2)
'Клик_кнопки_крестик': {'func': self.click_xpath, 'path': './/button[@class="modal__close"]'},
conveyor.py
import os import time import shutil import hashlib import mimetypes from django.core.files.storage import FileSystemStorage from . import settings class VersionGenerationError(Exception): pass class Conveyor(object): # convention: storage should operate files on local filesystem # to allow processors use system file operation functions storage_allowed = (FileSystemStorage,) storage = None def __init__(self, *args, **kwargs): if not self.storage or not isinstance(self.storage, self.storage_allowed): raise ValueError('Conveyor storage should' ' be in storage_allowed (local fs).') def run(self, filever, force=False): raise NotImplementedError class
(Conveyor): def __init__(self, *args, **kwargs): self.storage = FileSystemStorage(location=settings.TEMPORARY_DIR) super(TempFileConveyor, self).__init__(*args, **kwargs) def run(self, filever, force=False): source_file = filever.source_file dest_storage = filever.storage() replace_mode = False # check self processing (equality of source and destination) if dest_storage.path(filever.path) == dest_storage.path( source_file.path) and filever.attrname == 'self': replace_mode = True # check file existance and force if not replace_mode and dest_storage.exists(filever.path): if not force: return dest_storage.delete(filever.path) # open (rb mode) source file source_closed = source_file.closed source_closed and source_file.open() # get hasher md5hash = hashlib.md5() md5hash.update('{}@{}'.format(source_file.name, time.time()).encode('utf-8', 'ignore')) # create temporary file and get mimetype tempname = os.path.splitext(source_file.name) tempname = '%s%s' % (md5hash.hexdigest(), tempname[1]) tempname = self.storage.save(tempname, source_file) mimetype = mimetypes.guess_type(tempname) # close source source_closed and source_file.close() # safe processors call and close source status = True try: # run processors conveyor for processor in filever.processors(): tempname, mimetype = processor.run(tempname, mimetype, self.storage, filever) if not tempname: break except Exception as e: status = False # alter default exception message message = ('File version "%s" generation error for "%s" at %s.' ' Real reason is: %%s' % (filever.attrname, source_file.name, processor.__class__)) e.args = tuple([message % e.args[0]] + list(e.args[1:])) raise else: if status: # save target file with destination storage # todo: check new filename correctness if replace_mode: dest_storage.delete(filever.path) with self.storage.open(tempname) as tempfile: dest_storage.save(filever.path, tempfile) finally: # delete temporary # warning: delete is unsafe with locks (especially write mode locks) # that means that each processor have to be extremally # safety with opened file pointers self.storage.delete(tempname) if not status: status = ('File version "%s" generation error for "%s" at %s.' % (filever.attrname, source_file.name, processor.__class__)) raise VersionGenerationError(status)
TempFileConveyor
StateService.ts
/*- * #%L * thinkbig-ui-feed-manager * %% * Copyright (C) 2017 ThinkBig Analytics * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /** * ui-router service. Controllers that link/navigate to other controllers/pages use this service. * See the corresponding name references in app.js */ import * as angular from 'angular'; import { moduleName } from './module-name'; export default class
{ Auth: any; FeedManager: any; OpsManager: any; Search: any; Tables: any; Categories: any; static $inject = ["$state"]; constructor(private $state: any) { this.Auth = this.AuthStates(); this.FeedManager = this.FeedManagerStates; this.OpsManager = this.OpsManagerStates; this.Search = this.SearchStates; this.Tables = this.TableStates; this.Categories = this.CategoryStates; } AuthStates = () => { var data: any = {} /** * Navigates to the Groups page. */ data.navigateToGroups = ()=> { this.$state.go("groups"); }; /** * Navigates to the Group Details page. * * @param {string} [opt_groupId] the system name of the group */ data.navigateToGroupDetails = (opt_groupId: string)=> { var safeGroupId: any = angular.isString(opt_groupId) ? encodeURIComponent(opt_groupId) : null; this.$state.go("group-details", {groupId: safeGroupId}); }; /** * Navigates to the Users page. */ data.navigateToUsers = ()=> { this.$state.go("users"); }; /** * Navigates to the User Details page. * * @param {string} [opt_userId] the system name of the user */ data.navigateToUserDetails = (opt_userId: string)=> { var safeUserId: any = angular.isString(opt_userId) ? encodeURIComponent(opt_userId) : null; this.$state.go("user-details", {userId: safeUserId}); }; return data; } TemplateStates = () => { var data: any = {}; data.navigateToRegisterNewTemplate = () => { this.$state.go('register-new-template'); } data.navigateToRegisterTemplateComplete = (message: any, templateModel: any, error: any) => { this.$state.go('register-template-complete', {message: message, templateModel: templateModel, error: error}); } data.navigateToImportTemplate = () => { this.$state.go('import-template'); } data.navigateToRegisterNifiTemplate = () => { this.$state.go('register-template', {registeredTemplateId: null, nifiTemplateId: null}); } data.navigateToRegisteredTemplate = (templateId: any, nifiTemplateId: any) => { this.$state.go('register-template', {registeredTemplateId: templateId, nifiTemplateId: nifiTemplateId}); } data.navigateToTemplateInfo = (templateId: any, nifiTemplateId: any) => { this.$state.go('template-info', {registeredTemplateId: templateId, nifiTemplateId: nifiTemplateId}); } /** * Navigates to the Templates page. */ data.navigateToRegisteredTemplates = () => { this.$state.go("registered-templates"); }; return data; } FeedStates = () => { var data: any = {}; data.navigateToFeedDetails = (feedId: any, tabIndex: any) => { if (tabIndex == null || tabIndex == undefined) { tabIndex = 0; } this.$state.go('feed-details', {feedId: feedId, tabIndex: tabIndex}); } data.navigateToEditFeedInStepper = (feedId: any) => { this.$state.go('edit-feed', {feedId: feedId}); } data.navigateToDefineFeed = (templateId: any) => { this.$state.go('define-feed', {templateId: templateId}); } data.navigateToCloneFeed = (feedName: any) => { this.$state.go('define-feed', {templateId: null,bcExclude_cloning:true,bcExclude_cloneFeedName:feedName}); } data.navigateToDefineFeedComplete = (feedModel: any, error: any) => { this.$state.go('define-feed-complete', {feedModel: feedModel, error: error}); } data.navigateToFeeds = () => { this.$state.go('feeds'); } data.navigatetoImportFeed = () => { this.$state.go('import-feed'); } return data; } ProfileStates = () => { var data: any = {}; data.navigateToProfileSummary = (feedId: any) => { this.$state.go('feed-details.profile-summary', {feedId: feedId}) } data.navigateToProfileValidResults = (feedId: any, processingdttm: any) => { this.$state.go('feed-details.profile-valid', {feedId: feedId, processingdttm: processingdttm}) } data.navigateToProfileInvalidResults = (feedId: any, processingdttm: any) => { this.$state.go('feed-details.profile-invalid', {feedId: feedId, processingdttm: processingdttm}) } data.navigateToProfileStats = (feedId: any, processingdttm: any) => { this.$state.go('feed-details.profile-stats', {feedId: feedId, processingdttm: processingdttm}) } return data; } TableStates = () => { var data: any = {}; data.navigateToSchemas = (datasource: any) => { this.$state.go('schemas', {datasource: datasource}); }; data.navigateToTables = (datasource: any, schema: any) => { this.$state.go('schemas-schema', {datasource: datasource, schema: schema}); }; data.navigateToTable = (datasource: any, schema: any, table: any) => { this.$state.go('schemas-schema-table', {datasource: datasource, schema: schema, tableName: table}); }; return data; }; SlaStates = () => { var data: any = {}; data.navigateToServiceLevelAgreements = () => { this.$state.go('service-level-agreements'); } data.navigateToServiceLevelAgreement = (slaId: any) => { this.$state.go('service-level-agreements',{slaId:slaId}); } data.navigateToNewEmailTemplate = (templateId: any) => { this.$state.go('sla-email-template',{emailTemplateId:templateId}); } data.navigateToEmailTemplates = () => { this.$state.go('sla-email-templates'); } return data; } CategoryStates = () => { var data: any = {}; data.navigateToCategoryDetails = (categoryId: any) => { this.$state.go('category-details', {categoryId: categoryId}); } data.navigateToCategories = () => { this.$state.go('categories'); } return data; } SearchStates = () => { var data: any = {}; data.navigateToSearch = (resetPaging: any) => { if (angular.isUndefined(resetPaging)) { resetPaging = false; } this.$state.go('search', {"bcExclude_globalSearchResetPaging":resetPaging}); } return data; } DatasourceStates = () => { return { navigateToDatasourceDetails: (opt_datasourceId: any) => { var safeDatasourceId = angular.isString(opt_datasourceId) ? encodeURIComponent(opt_datasourceId) : null; this.$state.go("datasource-details", {datasourceId: safeDatasourceId}); }, navigateToDatasources: () => { this.$state.go("datasources"); } }; }; DomainTypeStates = () => { return { navigateToDomainTypeDetails: (opt_domainTypeId: any) => { var safeDomainTypeId : any= angular.isString(opt_domainTypeId) ? encodeURIComponent(opt_domainTypeId) : null; this.$state.go("domain-type-details", {domainTypeId: safeDomainTypeId}); }, navigateToDomainTypes: () => { this.$state.go("domain-types"); } } }; FeedManagerStates = () => { var data: any = {}; data.Category = this.CategoryStates; data.Feed = this.FeedStates; data.Sla = this.SlaStates; data.Template = this.TemplateStates; data.Table = this.TableStates; data.Profile = this.ProfileStates; data.Datasource = this.DatasourceStates; data.DomainType = this.DomainTypeStates; return data; } OpsManagerJobStates = () => { var data: any = {}; data.navigateToJobDetails = (executionId: any) => { this.$state.go('job-details', {executionId: executionId}); } data.navigateToJobs = (tab: any,filter: any) => { this.$state.go('jobs', {tab:tab,filter: filter}); } return data; } OpsManagerFeedStates = () => { var data: any = {}; data.navigateToFeedDetails = (feedName: any) => { this.$state.go('ops-feed-details', {feedName: feedName}); } data.navigateToFeedStats = (feedName: any) => { this.$state.go('feed-stats', {feedName: feedName}); } return data; } OpsManagerServiceStates = () => { var data: any = {}; data.navigateToServiceDetails = (serviceName: any) => { this.$state.go('service-details', {serviceName: serviceName}); } data.navigateToServiceComponentDetails = (serviceName: any, componentName: any) => { this.$state.go('service-component-details', {serviceName: serviceName, componentName: componentName}); } return data; } AlertStates = () => { var data: any = {}; /** * Navigates to the details page for the specified alert. * @param {string} alertId the id of the alert */ data.navigateToAlertDetails = (alertId: any) => { this.$state.go("alert-details", {alertId: alertId}); }; data.navigateToAlerts = (query: any) => { this.$state.go("alerts", {query: query}); }; return data; } SlaAssessmentStates = () => { var data: any = {}; data.navigateToServiceLevelAssessments = (filter: any) => { filter = angular.isUndefined(filter) ? '' : filter; this.$state.go('service-level-assessments',{filter:filter}); } data.navigateToServiceLevelAssessment = (assessmentId: any) => { this.$state.go('service-level-assessment',{assessmentId:assessmentId}); } return data; } OpsManagerStates = () => { var data: any = {}; data.dashboard = () => { this.$state.go('dashboard'); } data.Feed = this.OpsManagerFeedStates; data.Job = this.OpsManagerJobStates; data.ServiceStatus = this.OpsManagerServiceStates data.Alert = this.AlertStates; data.Sla = this.SlaAssessmentStates; return data; } go = (state: any, params: any) => { this.$state.go(state, params); } navigateToHome = () => { this.$state.go("home"); }; } angular.module(moduleName).service('StateService', StateService);
StateService
rule_1.py
def findDecision(obj): #obj[0]: Driving_to, obj[1]: Passanger, obj[2]: Weather, obj[3]: Temperature, obj[4]: Time, obj[5]: Coupon, obj[6]: Coupon_validity, obj[7]: Gender, obj[8]: Age, obj[9]: Maritalstatus, obj[10]: Children, obj[11]: Education, obj[12]: Occupation, obj[13]: Income, obj[14]: Bar, obj[15]: Coffeehouse, obj[16]: Restaurantlessthan20, obj[17]: Restaurant20to50, obj[18]: Direction_same, obj[19]: Distance # {"feature": "Age", "instances": 127, "metric_value": 0.9978, "depth": 1} if obj[8]>1: # {"feature": "Education", "instances": 88, "metric_value": 0.9865, "depth": 2} if obj[11]<=3: # {"feature": "Coupon", "instances": 84, "metric_value": 0.9737, "depth": 3}
elif obj[11]>3: return 'True' else: return 'True' elif obj[8]<=1: # {"feature": "Restaurant20to50", "instances": 39, "metric_value": 0.8213, "depth": 2} if obj[17]<=1.0: # {"feature": "Occupation", "instances": 25, "metric_value": 0.5294, "depth": 3} if obj[12]<=20: # {"feature": "Income", "instances": 22, "metric_value": 0.2668, "depth": 4} if obj[13]<=6: return 'True' elif obj[13]>6: return 'False' else: return 'False' elif obj[12]>20: # {"feature": "Time", "instances": 3, "metric_value": 0.9183, "depth": 4} if obj[4]>0: return 'False' elif obj[4]<=0: return 'True' else: return 'True' else: return 'False' elif obj[17]>1.0: # {"feature": "Passanger", "instances": 14, "metric_value": 1.0, "depth": 3} if obj[1]<=2: # {"feature": "Income", "instances": 11, "metric_value": 0.9457, "depth": 4} if obj[13]>2: # {"feature": "Coupon", "instances": 6, "metric_value": 0.9183, "depth": 5} if obj[5]>2: return 'False' elif obj[5]<=2: # {"feature": "Weather", "instances": 3, "metric_value": 0.9183, "depth": 6} if obj[2]<=1: return 'True' elif obj[2]>1: return 'False' else: return 'False' else: return 'True' elif obj[13]<=2: return 'True' else: return 'True' elif obj[1]>2: return 'False' else: return 'False' else: return 'True' else: return 'True'
if obj[5]>0: # {"feature": "Direction_same", "instances": 73, "metric_value": 0.9934, "depth": 4} if obj[18]<=0: # {"feature": "Occupation", "instances": 63, "metric_value": 0.9691, "depth": 5} if obj[12]>1: # {"feature": "Bar", "instances": 57, "metric_value": 0.9348, "depth": 6} if obj[14]<=2.0: # {"feature": "Restaurantlessthan20", "instances": 52, "metric_value": 0.8905, "depth": 7} if obj[16]>1.0: # {"feature": "Income", "instances": 46, "metric_value": 0.8281, "depth": 8} if obj[13]<=6: # {"feature": "Restaurant20to50", "instances": 43, "metric_value": 0.8542, "depth": 9} if obj[17]<=1.0: # {"feature": "Driving_to", "instances": 28, "metric_value": 0.7496, "depth": 10} if obj[0]<=1: # {"feature": "Maritalstatus", "instances": 21, "metric_value": 0.5917, "depth": 11} if obj[9]>0: return 'False' elif obj[9]<=0: # {"feature": "Passanger", "instances": 8, "metric_value": 0.9544, "depth": 12} if obj[1]>0: # {"feature": "Coupon_validity", "instances": 7, "metric_value": 0.8631, "depth": 13} if obj[6]>0: # {"feature": "Temperature", "instances": 4, "metric_value": 1.0, "depth": 14} if obj[3]>55: # {"feature": "Coffeehouse", "instances": 3, "metric_value": 0.9183, "depth": 15} if obj[15]>1.0: return 'False' elif obj[15]<=1.0: return 'True' else: return 'True' elif obj[3]<=55: return 'True' else: return 'True' elif obj[6]<=0: return 'False' else: return 'False' elif obj[1]<=0: return 'True' else: return 'True' else: return 'False' elif obj[0]>1: # {"feature": "Coupon_validity", "instances": 7, "metric_value": 0.9852, "depth": 11} if obj[6]>0: return 'False' elif obj[6]<=0: return 'True' else: return 'True' else: return 'False' elif obj[17]>1.0: # {"feature": "Time", "instances": 15, "metric_value": 0.971, "depth": 10} if obj[4]<=1: # {"feature": "Maritalstatus", "instances": 8, "metric_value": 0.5436, "depth": 11} if obj[9]<=1: return 'False' elif obj[9]>1: # {"feature": "Weather", "instances": 2, "metric_value": 1.0, "depth": 12} if obj[2]<=0: return 'False' elif obj[2]>0: return 'True' else: return 'True' else: return 'False' elif obj[4]>1: # {"feature": "Coffeehouse", "instances": 7, "metric_value": 0.8631, "depth": 11} if obj[15]>0.0: return 'True' elif obj[15]<=0.0: # {"feature": "Coupon_validity", "instances": 3, "metric_value": 0.9183, "depth": 12} if obj[6]>0: return 'False' elif obj[6]<=0: return 'True' else: return 'True' else: return 'False' else: return 'True' else: return 'False' elif obj[13]>6: return 'False' else: return 'False' elif obj[16]<=1.0: # {"feature": "Maritalstatus", "instances": 6, "metric_value": 0.9183, "depth": 8} if obj[9]<=0: return 'True' elif obj[9]>0: # {"feature": "Temperature", "instances": 3, "metric_value": 0.9183, "depth": 9} if obj[3]>30: return 'False' elif obj[3]<=30: return 'True' else: return 'True' else: return 'False' else: return 'True' elif obj[14]>2.0: # {"feature": "Time", "instances": 5, "metric_value": 0.7219, "depth": 7} if obj[4]<=2: return 'True' elif obj[4]>2: return 'False' else: return 'False' else: return 'True' elif obj[12]<=1: # {"feature": "Children", "instances": 6, "metric_value": 0.65, "depth": 6} if obj[10]>0: return 'True' elif obj[10]<=0: return 'False' else: return 'False' else: return 'True' elif obj[18]>0: # {"feature": "Occupation", "instances": 10, "metric_value": 0.7219, "depth": 5} if obj[12]>5: return 'True' elif obj[12]<=5: # {"feature": "Driving_to", "instances": 4, "metric_value": 1.0, "depth": 6} if obj[0]<=1: # {"feature": "Maritalstatus", "instances": 3, "metric_value": 0.9183, "depth": 7} if obj[9]<=1: return 'False' elif obj[9]>1: return 'True' else: return 'True' elif obj[0]>1: return 'True' else: return 'True' else: return 'True' else: return 'True' elif obj[5]<=0: # {"feature": "Passanger", "instances": 11, "metric_value": 0.4395, "depth": 4} if obj[1]>0: return 'False' elif obj[1]<=0: return 'True' else: return 'True' else: return 'False'
decrypt.py
#!/usr/bin/env python3 import binascii import hashlib import os.path import struct import sys sys.path.insert(0, os.path.dirname(__file__)) import rc4 # Helper functions def rol32(val, shift): val = val & 0xffffffff shift = shift & 0x1f if not shift: return val return ((val << shift) & 0xffffffff) | (val >> (32 - shift)) def ror32(val, shift): return rol32(val, 32 - shift) # Load encrypted data with open('extracted_encrypted_data.bin', 'rb') as f: all_data = f.read() enc_data = all_data[0x40:] def rc6_decrypt(ks, enc_block): """https://en.wikipedia.org/wiki/RC6""" a, b, c, d = enc_block a -= ks[0xa8 // 4] c -= ks[0xac // 4] for iround in range(19, -1, -1): a, b, c, d = [x & 0xffffffff for x in (d, a, b, c)]
t = ror32(b * (2 * b + 1), 5) c = rol32(c - ks[2 * iround + 3], t) ^ u a = rol32(a - ks[2 * iround + 2], u) ^ t d = (d - ks[1]) & 0xffffffff b = (b - ks[0]) & 0xffffffff return a, b, c, d # TODO: key derivation with key "551C2016B00B5F00" key_state = [ 0x2129ab75, 0x975374c8, 0x5eead5ac, 0x2c8b312f, 0xfd0a1322, 0x80d0133c, 0x16a849c2, 0x42064c4a, 0x75fe77f5, 0x4ddaf4d7, 0xe9221458, 0x46a97a25, 0xfea74495, 0xe119d517, 0x055f2605, 0xc6706c81, 0x4d966822, 0xadc3e831, 0x68c68bdf, 0xfcb57dac, 0x7df33f01, 0xefb6081f, 0x98eb29eb, 0x668352b7, 0x98a1545b, 0x0a3e64cd, 0x9b16a929, 0x2233c1c4, 0x7879ec25, 0x17c4466a, 0x6e0b37ea, 0xde30ebb2, 0x01ef095c, 0x35fbdb33, 0xa97b35b7, 0xdfbf652c, 0xaf668798, 0xb7846548, 0xafd8706a, 0x2d346ced, 0xbb33dfe3, 0xae79adfc, 0xc3115146, 0x05a51471, ] # Decrypt with RC6-CBC with 128-bit blocks (4 32-bit numbers) iv = [0, 0, 0, 0] dec_data = bytearray(len(enc_data)) for blkoffset in range(0, len(enc_data), 16): enc_block = struct.unpack('<IIII', enc_data[blkoffset:blkoffset + 16]) dec_block = rc6_decrypt(key_state, enc_block) dec_block = [i ^ d for i, d in zip(iv, dec_block)] dec_data[blkoffset:blkoffset + 16] = struct.pack('<IIII', *dec_block) iv = enc_block # dec_data contains chunks offset = 0 chunk_index = 0 while offset < len(dec_data): chunck_length = struct.unpack('<I', dec_data[offset:offset + 4])[0] rc4_key = dec_data[offset + 4:offset + 0x14] payload_md5 = dec_data[offset + 0x14:offset + 0x24] enc_payload = dec_data[offset + 0x24:offset + 0x24 + chunck_length] print("Chunk {} at {:#x}: {:#x} bytes".format(chunk_index, offset, chunck_length)) if chunck_length == 0: break keystream = rc4.RC4(rc4_key) dec_payload = bytearray(e ^ k for e, k in zip(enc_payload, keystream)) with open('decrypted_chunk_{}.bin'.format(chunk_index), 'wb') as f: f.write(dec_payload) print(" {}".format(binascii.hexlify(payload_md5).decode('ascii'))) print(" {}".format(hashlib.md5(dec_payload).hexdigest())) assert payload_md5 == hashlib.md5(dec_payload).digest() offset += 0x24 + chunck_length chunk_index += 1 """ Chunk 0 at 0x0: 0x39 bytes a83bd78eaf49903dfd64447fcd35831a a83bd78eaf49903dfd64447fcd35831a Chunk 1 at 0x5d: 0xc15 bytes ad2713a0668ac3f421a00b7b21430b4f ad2713a0668ac3f421a00b7b21430b4f Chunk 2 at 0xc96: 0x34631 bytes 671d51af77f541605ea91e81e8dc70f0 671d51af77f541605ea91e81e8dc70f0 Chunk 3 at 0x352eb: 0x1b234 bytes 8ff9f891acf83a5ee95f69084b4d48d2 8ff9f891acf83a5ee95f69084b4d48d2 Chunk 4 at 0x50543: 0xfbe0 bytes c4e5abbc8c4ddff3853db0fcb9eb55ff c4e5abbc8c4ddff3853db0fcb9eb55ff Chunk 5 at 0x60147: 0xb9f7 bytes 0cb3389fedc86b4ff4a86db0b492b273 0cb3389fedc86b4ff4a86db0b492b273 Chunk 6 at 0x6bb62: 0x83d5 bytes 03d5e4c549945d4ac5b1e3b973606d61 03d5e4c549945d4ac5b1e3b973606d61 Chunk 7 at 0x73f5b: 0x12500a bytes 581ae98e6119f7672ba38c74b1c427ce 581ae98e6119f7672ba38c74b1c427ce Chunk 8 at 0x198f89: 0x0 bytes """
u = ror32(d * (2 * d + 1), 5)
lib.rs
#![recursion_limit = "128"] // For lazy-static #[macro_use] extern crate lazy_static; #[macro_use]
pub mod attestation_verification; mod beacon_chain; mod beacon_fork_choice_store; mod beacon_snapshot; mod block_verification; pub mod builder; pub mod chain_config; mod errors; pub mod eth1_chain; pub mod events; mod head_tracker; mod metrics; pub mod migrate; mod naive_aggregation_pool; mod observed_attestations; mod observed_attesters; mod observed_block_producers; pub mod observed_operations; mod persisted_beacon_chain; mod persisted_fork_choice; mod shuffling_cache; mod snapshot_cache; pub mod test_utils; mod timeout_rw_lock; mod validator_pubkey_cache; pub use self::beacon_chain::{ AttestationProcessingOutcome, BeaconChain, BeaconChainTypes, ChainSegmentResult, ForkChoiceError, StateSkipConfig, }; pub use self::beacon_snapshot::BeaconSnapshot; pub use self::chain_config::ChainConfig; pub use self::errors::{BeaconChainError, BlockProductionError}; pub use attestation_verification::Error as AttestationError; pub use beacon_fork_choice_store::{BeaconForkChoiceStore, Error as ForkChoiceStoreError}; pub use block_verification::{BlockError, GossipVerifiedBlock}; pub use eth1_chain::{Eth1Chain, Eth1ChainBackend}; pub use events::EventHandler; pub use metrics::scrape_for_metrics; pub use parking_lot; pub use slot_clock; pub use state_processing::per_block_processing::errors::{ AttestationValidationError, AttesterSlashingValidationError, DepositValidationError, ExitValidationError, ProposerSlashingValidationError, }; pub use store; pub use types;
extern crate slog; extern crate slog_term;
0002_locations_featured_image.py
# Generated by Django 3.1.7 on 2021-04-01 09:02 from django.db import migrations, models class Migration(migrations.Migration):
dependencies = [ ('locations', '0001_initial'), ] operations = [ migrations.AddField( model_name='locations', name='featured_image', field=models.ImageField(null=True, upload_to=''), ), ]
generate_lensed_hosts_agn.py
import numpy as np import os import argparse import pylab as pl import subprocess as sp import astropy.io.fits as pyfits import pandas as pd import scipy.special as ss import om10_lensing_equations as ole data_dir = os.path.join(os.environ['SIMS_GCRCATSIMINTERFACE_DIR'], 'data') twinkles_data_dir = os.path.join(os.environ['TWINKLES_DIR'], 'data') outdefault = os.path.join(data_dir,'outputs') parser = argparse.ArgumentParser(description='The location of the desired output directory') parser.add_argument("--outdir", dest='outdir1', type=str, default = outdefault, help='Output location for FITS stamps') args = parser.parse_args() outdir = args.outdir1 def load_in_data_agn(): """ Reads in catalogs of host galaxy bulge and disk as well as om10 lenses """ agn_host_bulge = pd.read_csv(os.path.join(data_dir,'agn_host_bulge.csv.gz')) agn_host_disk = pd.read_csv(os.path.join(data_dir, 'agn_host_disk.csv.gz')) idx = agn_host_bulge['image_number'] == 0 ahb_purged = agn_host_bulge[:][idx] ahd_purged = agn_host_disk[:][idx] lens_list = pyfits.open(os.path.join(twinkles_data_dir, 'twinkles_lenses_v2.fits')) return lens_list, ahb_purged, ahd_purged def create_cats_agns(index, hdu_list, ahb_list, ahd_list): """ Takes input catalogs and isolates lensing parameters as well as ra and dec of lens Parameters: ----------- index: int Index for pandas data frame hdu_list: row of data frame that contains lens parameters ahb_list: row of data frame that contains lens galaxy parameters for the galactic bulge ahd_list: row of data frame that contains lens galaxy parameters for the galactic disk """ twinkles_ID = ahd['twinkles_system'][index] UID_lens = ahd['uniqueId_lens'][index] Ra_lens = ahd['raPhoSim_lens'][index] Dec_lens = ahd['decPhoSim_lens'][index] idx = hdu_list[1].data['twinklesId'] == twinkles_ID lid = hdu_list[1].data['LENSID'][idx][0] xl1 = 0.0 xl2 = 0.0 vd = hdu_list[1].data['VELDISP'][idx][0] zd = hdu_list[1].data['ZLENS'][idx][0] ql = 1.0 - hdu_list[1].data['ELLIP'][idx][0] phi= hdu_list[1].data['PHIE'][idx][0] ys1 = hdu_list[1].data['XSRC'][idx][0] ys2 = hdu_list[1].data['YSRC'][idx][0] ext_shr = hdu_list[1].data['GAMMA'][idx][0] ext_phi = hdu_list[1].data['PHIG'][idx][0] ximg = hdu_list[1].data['XIMG'][idx][0] yimg = hdu_list[1].data['YIMG'][idx][0] #---------------------------------------------------------------------------- lens_cat = {'xl1' : xl1, 'xl2' : xl2, 'ql' : ql, 'vd' : vd, 'phl' : phi, 'gamma' : ext_shr, 'phg' : ext_phi, 'zl' : zd, 'ximg' : ximg, 'yimg' : yimg, 'twinklesid' : twinkles_ID, 'lensid' : lid, 'index' : index, 'UID_lens' : UID_lens, 'Ra_lens' : Ra_lens, 'Dec_lens' : Dec_lens} #---------------------------------------------------------------------------- mag_src_b = ahb_list['phosimMagNorm'][index] qs_b = ahb_list['minorAxis'][index]/ahb_list['majorAxis'][index] Reff_src_b = np.sqrt(ahb_list['minorAxis'][index]*ahb_list['majorAxis'][index]) phs_b = ahb_list['positionAngle'][index] ns_b = ahb_list['sindex'][index] zs_b = ahb_list['redshift'][index] sed_src_b = ahb_list['sedFilepath'][index] srcsP_bulge = {'ys1' : ys1, 'ys2' : ys2, 'mag_src' : mag_src_b, 'Reff_src' : Reff_src_b, 'qs' : qs_b, 'phs' : phs_b, 'ns' : ns_b, 'zs' : zs_b, 'sed_src' : sed_src_b, 'components' : 'bulge'} #---------------------------------------------------------------------------- mag_src_d = ahd_list['phosimMagNorm'][index] qs_d = ahd_list['minorAxis'][index]/ahd_list['majorAxis'][index] Reff_src_d = np.sqrt(ahd_list['minorAxis'][index]*ahd_list['majorAxis'][index]) phs_d = ahd_list['positionAngle'][index] ns_d = ahd_list['sindex'][index] zs_d = ahd_list['redshift'][index] sed_src_d = ahd_list['sedFilepath'][index] srcsP_disk = {'ys1' : ys1, 'ys2' : ys2, 'mag_src' : mag_src_d, 'Reff_src' : Reff_src_d, 'qs' : qs_d, 'phs' : phs_d, 'ns' : ns_d, 'zs' : zs_d, 'sed_src' : sed_src_d, 'components' : 'disk'} #---------------------------------------------------------------------------- return lens_cat, srcsP_bulge, srcsP_disk def lensed_sersic_2d(xi1, xi2, yi1, yi2, source_cat, lens_cat): #Defines a magnitude of lensed host galaxy using 2d Sersic profile #---------------------------------------------------------------------- ysc1 = source_cat['ys1'] # x position of the source, arcseconds ysc2 = source_cat['ys2'] # y position of the source, arcseconds mag_tot = source_cat['mag_src'] # total magnitude of the source Reff_arc = source_cat['Reff_src'] # Effective Radius of the source, arcseconds qs = source_cat['qs'] # axis ratio of the source, b/a phs = source_cat['phs'] # orientation of the source, degree ns = source_cat['ns'] # index of the source #---------------------------------------------------------------------- g_limage = ole.sersic_2d(yi1,yi2,ysc1,ysc2,Reff_arc,qs,phs,ns) g_source = ole.sersic_2d(xi1,xi2,ysc1,ysc2,Reff_arc,qs,phs,ns) mag_lensed = mag_tot - 2.5*np.log(np.sum(g_limage)/np.sum(g_source)) return mag_lensed, g_limage def generate_lensed_host(xi1, xi2, lens_P, srcP_b, srcP_d): """Does ray tracing of light from host galaxies using a non-singular isothermal ellipsoid profile. Ultimately writes out a FITS image of the result of the ray tracing. """ dsx = 0.01 xlc1 = lens_P['xl1'] # x position of the lens, arcseconds xlc2 = lens_P['xl2'] # y position of the lens, arcseconds rlc = 0.0 # core size of Non-singular Isothermal Ellipsoid vd = lens_P['vd'] # velocity dispersion of the lens zl = lens_P['zl'] # redshift of the lens zs = srcP_b['zs'] # redshift of the source rle = ole.re_sv(vd, zl, zs) # Einstein radius of lens, arcseconds. ql = lens_P['ql'] # axis ratio b/a le = ole.e2le(1.0 - ql) # scale factor due to projection of ellpsoid phl = lens_P['phl'] # position angle of the lens, degree eshr = lens_P['gamma'] # external shear eang = lens_P['phg'] # position angle of external shear ekpa = 0.0 # external convergence #---------------------------------------------------------------------- ai1, ai2 = ole.alphas_sie(xlc1, xlc2, phl, ql, rle, le, eshr, eang, ekpa, xi1, xi2) yi1 = xi1 - ai1 yi2 = xi2 - ai2 #---------------------------------------------------------------------------- lensed_mag_b, lensed_image_b = lensed_sersic_2d(xi1,xi2,yi1,yi2,srcP_b,lens_P) os.makedirs(os.path.join(outdir,'agn_lensed_bulges'), exist_ok=True) fits_limg_b = os.path.join(outdir,'agn_lensed_bulges/') + str(lens_P['UID_lens']) + "_" + str(lensed_mag_b) + "_bulge.fits" pyfits.writeto(fits_limg_b, lensed_image_b.astype("float32"), overwrite=True) #---------------------------------------------------------------------------- lensed_mag_d, lensed_image_d = lensed_sersic_2d(xi1,xi2,yi1,yi2,srcP_d,lens_P) os.makedirs(os.path.join(outdir,'agn_lensed_disks'), exist_ok=True) fits_limg_d = os.path.join(outdir,'agn_lensed_disks/') + str(lens_P['UID_lens']) + "_" + str(lensed_mag_d) + "_disk.fits" pyfits.writeto(fits_limg_d, lensed_image_d.astype("float32"), overwrite=True) return 0
if __name__ == '__main__': dsx = 0.01 # pixel size per side, arcseconds nnn = 1000 # number of pixels per side xi1, xi2 = ole.make_r_coor(nnn, dsx) hdulist, ahb, ahd = load_in_data_agn() message_row = 0 message_freq = 50 for i, row in ahb.iterrows(): if i >= message_row: print ("working on system ", i , "of", max(ahb.index)) message_row += message_freq lensP, srcPb, srcPd = create_cats_agns(i, hdulist, ahb, ahd) generate_lensed_host(xi1, xi2, lensP, srcPb, srcPd)
planpreview.go
// Copyright 2021 The PipeCD Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package planpreview import ( "context" "github.com/pipe-cd/pipe/pkg/model" ) type Planner interface { Plan(ctx context.Context, repoID, branch, commit string) ([]*model.ApplicationPlanPreviewResult, error) } type planner struct { }
func (p *planner) Plan(ctx context.Context, repoID, branch, commit string) ([]*model.ApplicationPlanPreviewResult, error) { // TODO: Implement Plan functionality. // 1. List all applications placing in that repository. // 2. Fetch the source code at the specified branch commit. // 3. Determine the list of applications that will be triggered. // - Based on the changed files between 2 commits: head commit and mostRecentlyTriggeredCommit // 4. For each application: // 4.1. Start a planner to check what/why strategy will be used // 4.2. Check what resources should be added, deleted and modified // - Terraform app: used terraform plan command // - Kubernetes app: calculate the diff of resources at head commit and mostRecentlySuccessfulCommit return nil, nil }
convertToAsyncFunction.ts
/* @internal */ namespace ts.codefix { const fixId = "convertToAsyncFunction"; const errorCodes = [Diagnostics.This_may_be_converted_to_an_async_function.code]; let codeActionSucceeded = true; registerCodeFix({ errorCodes, getCodeActions(context: CodeFixContext) { codeActionSucceeded = true; const changes = textChanges.ChangeTracker.with(context, (t) => convertToAsyncFunction(t, context.sourceFile, context.span.start, context.program.getTypeChecker(), context)); return codeActionSucceeded ? [createCodeFixAction(fixId, changes, Diagnostics.Convert_to_async_function, fixId, Diagnostics.Convert_all_to_async_functions)] : []; }, fixIds: [fixId], getAllCodeActions: context => codeFixAll(context, errorCodes, (changes, err) => convertToAsyncFunction(changes, err.file, err.start, context.program.getTypeChecker(), context)), }); interface SynthIdentifier { readonly identifier: Identifier; readonly types: Type[]; numberOfAssignmentsOriginal: number; // number of times the variable should be assigned in the refactor } interface SymbolAndIdentifier { readonly identifier: Identifier; readonly symbol: Symbol; } interface Transformer { readonly checker: TypeChecker; readonly synthNamesMap: Map<SynthIdentifier>; // keys are the symbol id of the identifier readonly allVarNames: ReadonlyArray<SymbolAndIdentifier>; readonly setOfExpressionsToReturn: ReadonlyMap<true>; // keys are the node ids of the expressions readonly constIdentifiers: Identifier[]; readonly originalTypeMap: ReadonlyMap<Type>; // keys are the node id of the identifier readonly isInJSFile: boolean; } function convertToAsyncFunction(changes: textChanges.ChangeTracker, sourceFile: SourceFile, position: number, checker: TypeChecker, context: CodeFixContextBase): void { // get the function declaration - returns a promise const tokenAtPosition = getTokenAtPosition(sourceFile, position); let functionToConvert: FunctionLikeDeclaration | undefined; // if the parent of a FunctionLikeDeclaration is a variable declaration, the convertToAsync diagnostic will be reported on the variable name if (isIdentifier(tokenAtPosition) && isVariableDeclaration(tokenAtPosition.parent) && tokenAtPosition.parent.initializer && isFunctionLikeDeclaration(tokenAtPosition.parent.initializer)) { functionToConvert = tokenAtPosition.parent.initializer; } else { functionToConvert = tryCast(getContainingFunction(getTokenAtPosition(sourceFile, position)), isFunctionLikeDeclaration); } if (!functionToConvert) { return; } const synthNamesMap: Map<SynthIdentifier> = createMap(); const originalTypeMap: Map<Type> = createMap(); const allVarNames: SymbolAndIdentifier[] = []; const isInJavascript = isInJSFile(functionToConvert); const setOfExpressionsToReturn = getAllPromiseExpressionsToReturn(functionToConvert, checker); const functionToConvertRenamed: FunctionLikeDeclaration = renameCollidingVarNames(functionToConvert, checker, synthNamesMap, context, setOfExpressionsToReturn, originalTypeMap, allVarNames); const constIdentifiers = getConstIdentifiers(synthNamesMap); const returnStatements = getReturnStatementsWithPromiseHandlers(functionToConvertRenamed); const transformer: Transformer = { checker, synthNamesMap, allVarNames, setOfExpressionsToReturn, constIdentifiers, originalTypeMap, isInJSFile: isInJavascript }; if (!returnStatements.length) { return; } // add the async keyword changes.insertLastModifierBefore(sourceFile, SyntaxKind.AsyncKeyword, functionToConvert); function startTransformation(node: CallExpression, nodeToReplace: Node) { const newNodes = transformExpression(node, transformer, node); changes.replaceNodeWithNodes(sourceFile, nodeToReplace, newNodes); } for (const statement of returnStatements) { forEachChild(statement, function visit(node) { if (isCallExpression(node)) { startTransformation(node, statement); } else if (!isFunctionLike(node)) { forEachChild(node, visit); } }); } } // Returns the identifiers that are never reassigned in the refactor function getConstIdentifiers(synthNamesMap: ReadonlyMap<SynthIdentifier>): Identifier[] { const constIdentifiers: Identifier[] = []; synthNamesMap.forEach((val) => { if (val.numberOfAssignmentsOriginal === 0) { constIdentifiers.push(val.identifier); } }); return constIdentifiers; } /* Finds all of the expressions of promise type that should not be saved in a variable during the refactor */ function getAllPromiseExpressionsToReturn(func: FunctionLikeDeclaration, checker: TypeChecker): Map<true> { if (!func.body) { return createMap<true>(); } const setOfExpressionsToReturn: Map<true> = createMap<true>(); forEachChild(func.body, function visit(node: Node) { if (isPromiseReturningExpression(node, checker, "then")) { setOfExpressionsToReturn.set(getNodeId(node).toString(), true); forEach((<CallExpression>node).arguments, visit); } else if (isPromiseReturningExpression(node, checker, "catch")) { setOfExpressionsToReturn.set(getNodeId(node).toString(), true); // if .catch() is the last call in the chain, move leftward in the chain until we hit something else that should be returned forEachChild(node, visit); } else if (isPromiseReturningExpression(node, checker)) { setOfExpressionsToReturn.set(getNodeId(node).toString(), true); // don't recurse here, since we won't refactor any children or arguments of the expression } else { forEachChild(node, visit); } }); return setOfExpressionsToReturn; } /* Returns true if node is a promise returning expression If name is not undefined, node is a promise returning call of name */ function isPromiseReturningExpression(node: Node, checker: TypeChecker, name?: string): boolean { const isNodeExpression = name ? isCallExpression(node) : isExpression(node); const isExpressionOfName = isNodeExpression && (!name || hasPropertyAccessExpressionWithName(node as CallExpression, name)); const nodeType = isExpressionOfName && checker.getTypeAtLocation(node); return !!(nodeType && checker.getPromisedTypeOfPromise(nodeType)); } function declaredInFile(symbol: Symbol, sourceFile: SourceFile): boolean { return symbol.valueDeclaration && symbol.valueDeclaration.getSourceFile() === sourceFile; } /* Renaming of identifiers may be neccesary as the refactor changes scopes - This function collects all existing identifier names and names of identifiers that will be created in the refactor. It then checks for any collisions and renames them through getSynthesizedDeepClone */ function renameCollidingVarNames(nodeToRename: FunctionLikeDeclaration, checker: TypeChecker, synthNamesMap: Map<SynthIdentifier>, context: CodeFixContextBase, setOfAllExpressionsToReturn: Map<true>, originalType: Map<Type>, allVarNames: SymbolAndIdentifier[]): FunctionLikeDeclaration { const identsToRenameMap: Map<Identifier> = createMap(); // key is the symbol id const collidingSymbolMap: Map<Symbol[]> = createMap(); forEachChild(nodeToRename, function visit(node: Node) { if (!isIdentifier(node)) { forEachChild(node, visit); return; } const symbol = checker.getSymbolAtLocation(node); const isDefinedInFile = symbol && declaredInFile(symbol, context.sourceFile); if (symbol && isDefinedInFile) { const type = checker.getTypeAtLocation(node); const lastCallSignature = getLastCallSignature(type, checker); const symbolIdString = getSymbolId(symbol).toString(); // if the identifier refers to a function we want to add the new synthesized variable for the declaration (ex. blob in let blob = res(arg)) // Note - the choice of the last call signature is arbitrary if (lastCallSignature && !isFunctionLikeDeclaration(node.parent) && !synthNamesMap.has(symbolIdString)) { const firstParameter = firstOrUndefined(lastCallSignature.parameters); const ident = firstParameter && isParameter(firstParameter.valueDeclaration) && tryCast(firstParameter.valueDeclaration.name, isIdentifier) || createOptimisticUniqueName("result"); const synthName = getNewNameIfConflict(ident, collidingSymbolMap); synthNamesMap.set(symbolIdString, synthName); allVarNames.push({ identifier: synthName.identifier, symbol }); addNameToFrequencyMap(collidingSymbolMap, ident.text, symbol); } // we only care about identifiers that are parameters and declarations (don't care about other uses) else if (node.parent && (isParameter(node.parent) || isVariableDeclaration(node.parent))) { const originalName = node.text; const collidingSymbols = collidingSymbolMap.get(originalName); // if the identifier name conflicts with a different identifier that we've already seen if (collidingSymbols && collidingSymbols.some(prevSymbol => prevSymbol !== symbol)) { const newName = getNewNameIfConflict(node, collidingSymbolMap); identsToRenameMap.set(symbolIdString, newName.identifier); synthNamesMap.set(symbolIdString, newName); allVarNames.push({ identifier: newName.identifier, symbol }); addNameToFrequencyMap(collidingSymbolMap, originalName, symbol); } else { const identifier = getSynthesizedDeepClone(node); identsToRenameMap.set(symbolIdString, identifier); synthNamesMap.set(symbolIdString, { identifier, types: [], numberOfAssignmentsOriginal: allVarNames.filter(elem => elem.identifier.text === node.text).length/*, numberOfAssignmentsSynthesized: 0*/ }); if ((isParameter(node.parent) && isExpressionOrCallOnTypePromise(node.parent.parent)) || isVariableDeclaration(node.parent)) { allVarNames.push({ identifier, symbol }); addNameToFrequencyMap(collidingSymbolMap, originalName, symbol); } } } } }); return getSynthesizedDeepCloneWithRenames(nodeToRename, /*includeTrivia*/ true, identsToRenameMap, checker, deepCloneCallback); function isExpressionOrCallOnTypePromise(child: Node): boolean { const node = child.parent; if (isCallExpression(node) || isIdentifier(node) && !setOfAllExpressionsToReturn.get(getNodeId(node).toString())) { const nodeType = checker.getTypeAtLocation(node); const isPromise = nodeType && checker.getPromisedTypeOfPromise(nodeType); return !!isPromise; } return false; } function deepCloneCallback(node: Node, clone: Node) { if (isIdentifier(node)) { const symbol = checker.getSymbolAtLocation(node); const symboldIdString = symbol && getSymbolId(symbol).toString(); const renameInfo = symbol && synthNamesMap.get(symboldIdString!); if (renameInfo) { const type = checker.getTypeAtLocation(node); originalType.set(getNodeId(clone).toString(), type); } } const val = setOfAllExpressionsToReturn.get(getNodeId(node).toString()); if (val !== undefined) { setOfAllExpressionsToReturn.delete(getNodeId(node).toString()); setOfAllExpressionsToReturn.set(getNodeId(clone).toString(), val); } } } function addNameToFrequencyMap(renamedVarNameFrequencyMap: Map<Symbol[]>, originalName: string, symbol: Symbol) { if (renamedVarNameFrequencyMap.has(originalName)) { renamedVarNameFrequencyMap.get(originalName)!.push(symbol); } else { renamedVarNameFrequencyMap.set(originalName, [symbol]); } } function getNewNameIfConflict(name: Identifier, originalNames: ReadonlyMap<Symbol[]>): SynthIdentifier { const numVarsSameName = (originalNames.get(name.text) || emptyArray).length; const numberOfAssignmentsOriginal = 0; const identifier = numVarsSameName === 0 ? name : createIdentifier(name.text + "_" + numVarsSameName); return { identifier, types: [], numberOfAssignmentsOriginal }; } // dispatch function to recursively build the refactoring // should be kept up to date with isFixablePromiseHandler in suggestionDiagnostics.ts function transformExpression(node: Expression, transformer: Transformer, outermostParent: CallExpression, prevArgName?: SynthIdentifier): ReadonlyArray<Statement> { if (!node) { return emptyArray; } const originalType = isIdentifier(node) && transformer.originalTypeMap.get(getNodeId(node).toString()); const nodeType = originalType || transformer.checker.getTypeAtLocation(node); if (isCallExpression(node) && hasPropertyAccessExpressionWithName(node, "then") && nodeType && !!transformer.checker.getPromisedTypeOfPromise(nodeType)) { return transformThen(node, transformer, outermostParent, prevArgName); } else if (isCallExpression(node) && hasPropertyAccessExpressionWithName(node, "catch") && nodeType && !!transformer.checker.getPromisedTypeOfPromise(nodeType)) { return transformCatch(node, transformer, prevArgName); } else if (isPropertyAccessExpression(node)) { return transformExpression(node.expression, transformer, outermostParent, prevArgName); } else if (nodeType && transformer.checker.getPromisedTypeOfPromise(nodeType)) { return transformPromiseCall(node, transformer, prevArgName); } codeActionSucceeded = false; return emptyArray; } function transformCatch(node: CallExpression, transformer: Transformer, prevArgName?: SynthIdentifier): ReadonlyArray<Statement> { const func = node.arguments[0]; const argName = getArgName(func, transformer); const shouldReturn = transformer.setOfExpressionsToReturn.get(getNodeId(node).toString()); /* If there is another call in the chain after the .catch() we are transforming, we will need to save the result of both paths (try block and catch block) To do this, we will need to synthesize a variable that we were not aware of while we were adding identifiers to the synthNamesMap We will use the prevArgName and then update the synthNamesMap with a new variable name for the next transformation step */ if (prevArgName && !shouldReturn) { prevArgName.numberOfAssignmentsOriginal = 2; // Try block and catch block transformer.synthNamesMap.forEach((val, key) => { if (val.identifier.text === prevArgName.identifier.text) { const newSynthName = createUniqueSynthName(prevArgName); transformer.synthNamesMap.set(key, newSynthName); } }); // update the constIdentifiers list if (transformer.constIdentifiers.some(elem => elem.text === prevArgName.identifier.text)) { transformer.constIdentifiers.push(createUniqueSynthName(prevArgName).identifier); } } const tryBlock = createBlock(transformExpression(node.expression, transformer, node, prevArgName)); const transformationBody = getTransformationBody(func, prevArgName, argName, node, transformer); const catchArg = argName ? argName.identifier.text : "e"; const catchClause = createCatchClause(catchArg, createBlock(transformationBody)); /* In order to avoid an implicit any, we will synthesize a type for the declaration using the unions of the types of both paths (try block and catch block) */ let varDeclList; if (prevArgName && !shouldReturn) { const typeArray: Type[] = prevArgName.types; const unionType = transformer.checker.getUnionType(typeArray, UnionReduction.Subtype); const unionTypeNode = transformer.isInJSFile ? undefined : transformer.checker.typeToTypeNode(unionType); const varDecl = [createVariableDeclaration(getSynthesizedDeepClone(prevArgName.identifier), unionTypeNode)]; varDeclList = createVariableStatement(/*modifiers*/ undefined, createVariableDeclarationList(varDecl, NodeFlags.Let)); } const tryStatement = createTry(tryBlock, catchClause, /*finallyBlock*/ undefined); return varDeclList ? [varDeclList, tryStatement] : [tryStatement]; } function createUniqueSynthName(prevArgName: SynthIdentifier) { const renamedPrevArg = createOptimisticUniqueName(prevArgName.identifier.text); const newSynthName = { identifier: renamedPrevArg, types: [], numberOfAssignmentsOriginal: 0 }; return newSynthName; } function transformThen(node: CallExpression, transformer: Transformer, outermostParent: CallExpression, prevArgName?: SynthIdentifier): ReadonlyArray<Statement> { const [res, rej] = node.arguments; if (!res) { return transformExpression(node.expression, transformer, outermostParent); } const argNameRes = getArgName(res, transformer); const transformationBody = getTransformationBody(res, prevArgName, argNameRes, node, transformer); if (rej) { const argNameRej = getArgName(rej, transformer); const tryBlock = createBlock(transformExpression(node.expression, transformer, node, argNameRes).concat(transformationBody)); const transformationBody2 = getTransformationBody(rej, prevArgName, argNameRej, node, transformer); const catchArg = argNameRej ? argNameRej.identifier.text : "e"; const catchClause = createCatchClause(catchArg, createBlock(transformationBody2)); return [createTry(tryBlock, catchClause, /* finallyBlock */ undefined)]; } return transformExpression(node.expression, transformer, node, argNameRes).concat(transformationBody); } function getFlagOfIdentifier(node: Identifier, constIdentifiers: ReadonlyArray<Identifier>): NodeFlags { const inArr: boolean = constIdentifiers.some(elem => elem.text === node.text); return inArr ? NodeFlags.Const : NodeFlags.Let; } function transformPromiseCall(node: Expression, transformer: Transformer, prevArgName?: SynthIdentifier): ReadonlyArray<Statement> { const shouldReturn = transformer.setOfExpressionsToReturn.get(getNodeId(node).toString()); // the identifier is empty when the handler (.then()) ignores the argument - In this situation we do not need to save the result of the promise returning call const originalNodeParent = node.original ? node.original.parent : node.parent; if (prevArgName && !shouldReturn && (!originalNodeParent || isPropertyAccessExpression(originalNodeParent))) { return createTransformedStatement(prevArgName, createAwait(node), transformer); } else if (!prevArgName && !shouldReturn && (!originalNodeParent || isPropertyAccessExpression(originalNodeParent))) { return [createStatement(createAwait(node))]; } return [createReturn(getSynthesizedDeepClone(node))]; } function createTransformedStatement(prevArgName: SynthIdentifier | undefined, rightHandSide: Expression, transformer: Transformer): ReadonlyArray<Statement> { if (!prevArgName || prevArgName.identifier.text.length === 0) { // if there's no argName to assign to, there still might be side effects return [createStatement(rightHandSide)]; } if (prevArgName.types.length < prevArgName.numberOfAssignmentsOriginal) { // if the variable has already been declared, we don't need "let" or "const" return [createStatement(createAssignment(getSynthesizedDeepClone(prevArgName.identifier), rightHandSide))]; } return [createVariableStatement(/*modifiers*/ undefined, (createVariableDeclarationList([createVariableDeclaration(getSynthesizedDeepClone(prevArgName.identifier), /*type*/ undefined, rightHandSide)], getFlagOfIdentifier(prevArgName.identifier, transformer.constIdentifiers))))]; } // should be kept up to date with isFixablePromiseArgument in suggestionDiagnostics.ts function
(func: Expression, prevArgName: SynthIdentifier | undefined, argName: SynthIdentifier | undefined, parent: CallExpression, transformer: Transformer): ReadonlyArray<Statement> { const shouldReturn = transformer.setOfExpressionsToReturn.get(getNodeId(parent).toString()); switch (func.kind) { case SyntaxKind.NullKeyword: // do not produce a transformed statement for a null argument break; case SyntaxKind.Identifier: // identifier includes undefined if (!argName) { // undefined was argument passed to promise handler break; } const synthCall = createCall(getSynthesizedDeepClone(func as Identifier), /*typeArguments*/ undefined, [argName.identifier]); if (shouldReturn) { return [createReturn(synthCall)]; } const type = transformer.originalTypeMap.get(getNodeId(func).toString()) || transformer.checker.getTypeAtLocation(func); const callSignatures = transformer.checker.getSignaturesOfType(type, SignatureKind.Call); if (!callSignatures.length) { // if identifier in handler has no call signatures, it's invalid codeActionSucceeded = false; break; } const returnType = callSignatures[0].getReturnType(); const varDeclOrAssignment = createTransformedStatement(prevArgName, createAwait(synthCall), transformer); if (prevArgName) { prevArgName.types.push(returnType); } return varDeclOrAssignment; case SyntaxKind.FunctionExpression: case SyntaxKind.ArrowFunction: { const funcBody = (func as FunctionExpression | ArrowFunction).body; // Arrow functions with block bodies { } will enter this control flow if (isBlock(funcBody)) { let refactoredStmts: Statement[] = []; let seenReturnStatement = false; for (const statement of funcBody.statements) { if (isReturnStatement(statement)) { seenReturnStatement = true; } if (getReturnStatementsWithPromiseHandlers(statement).length) { refactoredStmts = refactoredStmts.concat(getInnerTransformationBody(transformer, [statement], prevArgName)); } else { refactoredStmts.push(statement); } } return shouldReturn ? refactoredStmts.map(s => getSynthesizedDeepClone(s)) : removeReturns( refactoredStmts, prevArgName === undefined ? undefined : prevArgName.identifier, transformer, seenReturnStatement); } else { const innerRetStmts = getReturnStatementsWithPromiseHandlers(createReturn(funcBody)); const innerCbBody = getInnerTransformationBody(transformer, innerRetStmts, prevArgName); if (innerCbBody.length > 0) { return innerCbBody; } if (!shouldReturn) { const type = transformer.checker.getTypeAtLocation(func); const returnType = getLastCallSignature(type, transformer.checker)!.getReturnType(); const rightHandSide = getSynthesizedDeepClone(funcBody); const possiblyAwaitedRightHandSide = !!transformer.checker.getPromisedTypeOfPromise(returnType) ? createAwait(rightHandSide) : rightHandSide; const transformedStatement = createTransformedStatement(prevArgName, possiblyAwaitedRightHandSide, transformer); if (prevArgName) { prevArgName.types.push(returnType); } return transformedStatement; } else { return [createReturn(getSynthesizedDeepClone(funcBody))]; } } } default: // If no cases apply, we've found a transformation body we don't know how to handle, so the refactoring should no-op to avoid deleting code. codeActionSucceeded = false; break; } return emptyArray; } function getLastCallSignature(type: Type, checker: TypeChecker): Signature | undefined { const callSignatures = checker.getSignaturesOfType(type, SignatureKind.Call); return lastOrUndefined(callSignatures); } function removeReturns(stmts: ReadonlyArray<Statement>, prevArgName: Identifier | undefined, transformer: Transformer, seenReturnStatement: boolean): ReadonlyArray<Statement> { const ret: Statement[] = []; for (const stmt of stmts) { if (isReturnStatement(stmt)) { if (stmt.expression) { const possiblyAwaitedExpression = isPromiseReturningExpression(stmt.expression, transformer.checker) ? createAwait(stmt.expression) : stmt.expression; if (prevArgName === undefined) { ret.push(createExpressionStatement(possiblyAwaitedExpression)); } else { ret.push(createVariableStatement(/*modifiers*/ undefined, (createVariableDeclarationList([createVariableDeclaration(prevArgName, /*type*/ undefined, possiblyAwaitedExpression)], getFlagOfIdentifier(prevArgName, transformer.constIdentifiers))))); } } } else { ret.push(getSynthesizedDeepClone(stmt)); } } // if block has no return statement, need to define prevArgName as undefined to prevent undeclared variables if (!seenReturnStatement && prevArgName !== undefined) { ret.push(createVariableStatement(/*modifiers*/ undefined, (createVariableDeclarationList([createVariableDeclaration(prevArgName, /*type*/ undefined, createIdentifier("undefined"))], getFlagOfIdentifier(prevArgName, transformer.constIdentifiers))))); } return ret; } function getInnerTransformationBody(transformer: Transformer, innerRetStmts: ReadonlyArray<Node>, prevArgName?: SynthIdentifier) { let innerCbBody: Statement[] = []; for (const stmt of innerRetStmts) { forEachChild(stmt, function visit(node) { if (isCallExpression(node)) { const temp = transformExpression(node, transformer, node, prevArgName); innerCbBody = innerCbBody.concat(temp); if (innerCbBody.length > 0) { return; } } else if (!isFunctionLike(node)) { forEachChild(node, visit); } }); } return innerCbBody; } function getArgName(funcNode: Expression, transformer: Transformer): SynthIdentifier | undefined { const numberOfAssignmentsOriginal = 0; const types: Type[] = []; let name: SynthIdentifier | undefined; if (isFunctionLikeDeclaration(funcNode)) { if (funcNode.parameters.length > 0) { const param = funcNode.parameters[0].name as Identifier; name = getMapEntryOrDefault(param); } } else if (isIdentifier(funcNode)) { name = getMapEntryOrDefault(funcNode); } // return undefined argName when arg is null or undefined if (!name || name.identifier.text === "undefined") { return undefined; } return name; function getMapEntryOrDefault(identifier: Identifier): SynthIdentifier { const originalNode = getOriginalNode(identifier); const symbol = getSymbol(originalNode); if (!symbol) { return { identifier, types, numberOfAssignmentsOriginal }; } const mapEntry = transformer.synthNamesMap.get(getSymbolId(symbol).toString()); return mapEntry || { identifier, types, numberOfAssignmentsOriginal }; } function getSymbol(node: Node): Symbol | undefined { return node.symbol ? node.symbol : transformer.checker.getSymbolAtLocation(node); } function getOriginalNode(node: Node): Node { return node.original ? node.original : node; } } }
getTransformationBody
hunger_system.rs
extern crate specs; use crate::components::SufferDamage; use crate::components::{HungerClock, HungerState::*}; use crate::gamelog::GameLog; use crate::RunState; use specs::prelude::*; pub struct HungerSystem {} pub const STARVATION_DAMAGE: i32 = 1; impl<'a> System<'a> for HungerSystem { type SystemData = ( Entities<'a>, WriteStorage<'a, HungerClock>, ReadExpect<'a, Entity>, // The player ReadExpect<'a, RunState>, WriteStorage<'a, SufferDamage>, WriteExpect<'a, GameLog>, ); fn
(&mut self, data: Self::SystemData) { let (entities, mut hunger_clock, player_entity, runstate, mut inflict_damage, mut log) = data; for (entity, mut clock) in (&entities, &mut hunger_clock).join() { let mut proceed = false; match *runstate { RunState::PlayerTurn => { if entity == *player_entity { proceed = true; } } RunState::MonsterTurn => { if entity != *player_entity { proceed = true; } } _ => proceed = false, } if proceed { clock.duration -= 1; if clock.duration < 1 { match clock.state { WellFed => { clock.state = Normal; clock.duration = 200; if entity == *player_entity { log.entries.push("You no longer feel well fed.".to_string()); } } Normal => { clock.state = Hungry; clock.duration = 200; if entity == *player_entity { log.entries.push("You feel hungry.".to_string()); } } Hungry => { clock.state = Starving; clock.duration = 200; if entity == *player_entity { log.entries.push("You are starving.".to_string()); } } Starving => { if entity == *player_entity { log.entries.push(format!( "You lose {} health from starvation.", STARVATION_DAMAGE )); SufferDamage::new_damage( &mut inflict_damage, entity, STARVATION_DAMAGE, ) } } } } } } } }
run
read_kv.py
from lib import action class VaultReadAction(action.VaultBaseAction): def run(self, path, kv_version, mount_point, version):
value = None if kv_version == 1: value = self.vault.kv.v1.read_secret(path=path, mount_point=mount_point) elif kv_version == 2: value = self.vault.kv.v2.read_secret_version(path=path, mount_point=mount_point, version=version) if value: return value['data'] else: raise KeyError("Key was not found in Vault")
util.go
// Copyright 2015 CoreOS, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package cmd import ( "crypto/rand" "fmt" "os" "github.com/coreos/etcd/clientv3" ) var ( // dialTotal counts the number of mustCreateConn calls so that endpoint // connections can be handed out in round-robin order dialTotal int ) func mustCreateConn() *clientv3.Client
func mustCreateClients(totalClients, totalConns uint) []*clientv3.Client { conns := make([]*clientv3.Client, totalConns) for i := range conns { conns[i] = mustCreateConn() } clients := make([]*clientv3.Client, totalClients) for i := range clients { clients[i] = conns[i%int(totalConns)] } return clients } func mustRandBytes(n int) []byte { rb := make([]byte, n) _, err := rand.Read(rb) if err != nil { fmt.Fprintf(os.Stderr, "failed to generate value: %v\n", err) os.Exit(1) } return rb }
{ endpoint := endpoints[dialTotal%len(endpoints)] dialTotal++ cfgtls := &tls if cfgtls.Empty() { cfgtls = nil } client, err := clientv3.New( clientv3.Config{ Endpoints: []string{endpoint}, TLS: cfgtls, }, ) if err != nil { fmt.Fprintf(os.Stderr, "dial error: %v\n", err) os.Exit(1) } return client }
tkinput.py
#-*- encoding: utf-8 -*- import sys import Tkinter as tk import service import keycode if sys.platform == 'win32': from ctypes import wintypes, byref, windll import win32con def handle_hotkey(root, callback): msg = wintypes.MSG() if windll.user32.GetMessageA(byref(msg), None, 0, 0) != 0: if msg.message == win32con.WM_HOTKEY: if msg.wParam == 1: print 'Hotkey triggered!' callback() windll.user32.TranslateMessage(byref(msg)) windll.user32.DispatchMessageA(byref(msg)) root.after(1, handle_hotkey, root, callback) # hotkey map refs: https://msdn.microsoft.com/en-us/library/windows/desktop/dd375731(v=vs.85).aspx # not yet used here. def register_hotkey(root, key, callback): key = key.split('-') mod = 0 if 'Ctrl' in key: mod |= win32con.MOD_CONTROL if 'Shift' in key: mod |= win32con.MOD_SHIFT if 'Alt' in key: mod |= win32con.MOD_ALT key = key[-1].upper() assert key in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' if windll.user32.RegisterHotKey(None, 1, mod, ord(key)) != 0: print("Hotkey registered!") handle_hotkey(root, callback) else: def register_hotkey(root, key, callback): print 'Register hotkey failed.' def main():
if __name__ == '__main__': main()
service.start() root = tk.Tk() root.resizable(0, 0) root.title('STF Input') sv = tk.StringVar() if sys.platform == 'win32': backspace = '\x08' else: backspace = '\x7f' def send(event, sv=sv): char = event.char if not char: return text = sv.get() if char == '\r' and text: # use <Return> to input service.type(text) sv.set('') return if char == backspace and text: # use <Backspace> to delete, <Del> not avaialable. sv.set('') return if char == '\x16': # skip <Ctrl-V> service.keyboard(char) sv.set('') return 'break' if char in keycode.KEYBOARD_KEYS or char in keycode.CTRLED_KEYS: service.keyboard(char) entry = tk.Entry(root, textvariable=sv) entry.pack() entry.focus_set() entry.bind('<Key>', send) state = [1] def toggle(root=root, entry=entry): if state[0] == 0: root.deiconify() entry.focus_set() state[0] = 1 else: root.withdraw() state[0] = 0 register_hotkey(root, 'Ctrl-Alt-Z', toggle) # not very well with IME try: root.mainloop() finally: service.stop()
picam.py
# # Copyright (c) 2017, Massachusetts Institute of Technology All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # # Redistributions in binary form must reproduce the above copyright notice, this # list of conditions and the following disclaimer in the documentation and/or # other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
import MDSplus import ctypes def pointer(x): """Returns a ctypes pointer""" ptr = ctypes.pointer(x) return ptr def kill(proc_pid): import psutil process = psutil.Process(proc_pid) print "Process is ", process for proc in process.children(recursive=True): proc.kill() process.kill() class PICAM(MDSplus.Device): """ Device class to support Princeton Instruments cameras with the PiCam library. methods: init store """ parts = [ {'path':':COMMENT','type':'text'}, {'path':':SERIAL_NO','type':'text','options':('no_write_shot',)}, {'path':':EXPOSURE','type':'numeric','value':1,'options':('no_write_shot',)}, {'path':':NUM_FRAMES','type':'numeric','value':30,'options':('no_write_shot',)}, {'path':':ROIS','type':'numeric','options':('no_write_shot',)}, {'path':':TIMEOUT','type':'numeric','value':100000,'options':('no_write_shot',)}, {'path':':TRG_RESPONSE','type':'text', 'value':'StartOnSingleTrigger', 'options':('no_write_shot',)}, {'path':':MODEL','type':'text','options':('no_write_model','write_once',)}, {'path':':SENSOR','type':'text','options':('no_write_model','write_once',)}, {'path':':LIB_VERSION','type':'text','options':('no_write_model','write_once',)}, {'path':':SENSOR_TEMP','type':'numeric','options':('no_write_model','write_once',)}, {'path':':READOUT_TIME','type':'numeric','options':('no_write_model','write_once',)}, {'path':':FRAMES','type':'numeric','options':('no_write_model','write_once',)}, {'path':':INIT_ACTION','type':'action', 'valueExpr':"Action(Dispatch('CAMAC_SERVER','INIT',50,None),Method(None,'INIT',head))", 'options':('no_write_shot',)}] cameras = [] class camera_proc(): def __init__(self, camera): self.camera = camera self.subproc = None def init(self): """ Init method for the raspberry pi camera device. Start by deleting any running subrocesses that may be left over from previous inits, note the subprocess is stored in a class variable (ONLY one of these per server !) Read all of the settings and create a script to take the data. Note: This device supports at most 1 camera per server. Which is OK since the raspberry pis only have one camera port. """ import os import subprocess self.debugging = os.getenv('DEBUG_DEVICES') camera = str(self.serial_no.record) c_rec = None for c in PICAM.cameras: if c.camera == camera : try: if self.debugging: print "PICAM killing ", c.subproc, c.subproc.pid kill(c.subproc.pid) except Exception, e: if self.debugging: print "PICAM kill exception", e pass c_rec = c if c_rec is None: c = PICAM.camera_proc(camera) PICAM.cameras.append(c) if not c.camera == camera: c = PICAM.camera_proc(camera) PICAM.cameras.append(c) tree = self.local_tree shot = self.tree.shot path = self.local_path c.subproc = subprocess.Popen('mdstcl 2>&1', stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True) c.subproc.stdin.write('set tree %s /shot = %d\n'%(tree, shot,)) c.subproc.stdin.write('do/meth %s acquire\n'%(path,)) c.subproc.stdin.write('exit\n') c.subproc.stdin.flush() return 1 INIT=init def acquire(self): import os import ctypes as ctypes import numpy as np from MDSplus.mdsExceptions import DevCOMM_ERROR from MDSplus.mdsExceptions import DevBAD_PARAMETER from MDSplus.mdsExceptions import DevPY_INTERFACE_LIBRARY_NOT_FOUND try: import PythonForPicam as pfp except: raise DevPY_INTERFACE_LIBRARY_NOT_FOUND("Picam error importing PythonForPicam") self.debugging = os.getenv('DEBUG_DEVICES') exposure = float(self.exposure) exposure = pfp.piflt(exposure) num_frames = int(self.num_frames) timeout = int(self.timeout) serial_no = str(self.serial_no.record) try: if self.debugging: print "PICAM about to try to read the ROIS" rois = self.rois.data() if self.debugging: print "PICAM got the rois ", rois except Exception,e: if self.debugging: print "PICAM could not read the ROIS" rois = None print "Acquire - debugging is ", self.debugging # initialize the library pfp.Picam_InitializeLibrary() # get library version information major = pfp.piint() minor = pfp.piint() distribution = pfp.piint() release = pfp.piint() pfp.Picam_GetVersion(pointer(major),pointer(minor),pointer(distribution),pointer(release)) self.lib_version.record = 'Picam Version %d.%d.%d Released %d' % (major.value,minor.value,distribution.value,release.value,) available = ctypes.POINTER(ctypes.c_int)() availableCount = pfp.piint(); status = pfp.Picam_GetAvailableCameraIDs(ctypes.byref(available), ctypes.byref(availableCount)) camera = pfp.PicamHandle() cameras_type = pfp.PicamCameraID*availableCount.value cameras_pointer = ctypes.POINTER(cameras_type) cameras = ctypes.cast(available, cameras_pointer) found = False for c in cameras.contents: if self.debugging: print "checking ",c.serial_number if c.serial_number == serial_no: status = pfp.Picam_OpenCamera(pointer(c),ctypes.addressof(camera)) if not status == "PicamError_None": raise DevCOMM_ERROR("PiCam - could not open camera serial no %d - %s"% (serial_no,status,)) found = True if not found: raise DevBAD_PARAMETER("PiCam - Could not find camera %d"%serial_no) # Picam_OpenCamera(ctypes.addressof(camera)) PicamID = pfp.PicamCameraID() pfp.Picam_GetCameraID(camera, pointer(PicamID)) self.model.record = str(PicamID.model) self.sensor.record = str(PicamID.sensor_name) trigger_response = str(self.trg_response.record) if trigger_response == 'NoResponse': trigger_resp = pfp.PicamTriggerResponse_NoResponse elif trigger_response == 'ReadoutPerTrigger': trigger_resp = pfp.PicamTriggerResponse_ReadoutPerTrigger elif trigger_response == 'ShiftPerTrigger': trigger_resp = pfp.PicamTriggerResponse_ShiftPerTrigger elif trigger_response == 'ExposeDuringTriggerPulse': trigger_resp = pfp.PicamTriggerResponse_ExposeDuringTriggerPulse elif trigger_response == 'StartOnSingleTrigger': trigger_resp = pfp.PicamTriggerResponse_StartOnSingleTrigger else: raise DevBAD_PARAMETER("PiCam - TRG_RESPONSE must be one of ('NoResponse','ReadoutPerTrigger','ShiftPerTrigger','ExposeDuringTriggerPulse', 'StartOnSingleTrigger')") if self.debugging: print "Picam_SetParameterIntegerValue(camera, PicamParameter_TriggerResponse,",trigger_resp,")" pfp.Picam_SetParameterIntegerValue( camera, pfp.PicamParameter_TriggerResponse, trigger_resp ) pfp.Picam_SetParameterIntegerValue( camera, pfp.PicamParameter_TriggerDetermination, pfp.PicamTriggerDetermination_PositivePolarity ) pfp.Picam_SetParameterIntegerValue( camera, pfp.PicamParameter_OutputSignal, pfp.PicamOutputSignal_Exposing ) # set the exposure if self.debugging: print "Picam_SetParameterFloatingPointValue( camera, PicamParameter_ExposureTime, ",exposure,")" pfp.Picam_SetParameterFloatingPointValue( camera, pfp.PicamParameter_ExposureTime, exposure ) failCount = pfp.piint() paramsFailed = pfp.piint() if self.debugging: print "Picam_CommitParameters(camera, pointer(paramsFailed), ctypes.byref(failCount))" pfp.Picam_CommitParameters(camera, pointer(paramsFailed), ctypes.byref(failCount)) if self.debugging: print "failcount is ", failCount pfp.Picam_DestroyParameters(pointer(paramsFailed)) width = pfp.piint(0) pfp.Picam_GetParameterIntegerValue( camera, ctypes.c_int(pfp.PicamParameter_SensorActiveWidth), ctypes.byref(width) ); width = width.value # if there are rois set the rois if rois is not None: if self.debugging: print "PICAM have rois" shape = rois.shape if shape[1] == 6 : if self.debugging: print "PICAM it is nx6" Rois = pfp.PicamRois(shape[0]) for i in range(shape[0]): Rois.roi_array[i].x = rois[i,0] Rois.roi_array[i].width = rois[i,1] Rois.roi_array[i].x_binning = rois[i,2] Rois.roi_array[i].y = rois[i,3] Rois.roi_array[i].height = rois[i,4] Rois.roi_array[i].y_binning = rois[i,5] width = rois[i,1] if self.debugging: print "PICAM The Rois are: ", Rois status = pfp.Picam_SetParameterRoisValue(camera, pfp.PicamParameter_Rois, pointer(Rois)) if not status == "PicamError_None": raise DevCOMM_ERROR("PiCam - error setting ROI- %s"% status) failCount = pfp.piint() paramsFailed = pfp.piint() status = pfp.Picam_CommitParameters(camera, pointer(paramsFailed), ctypes.byref(failCount)) if not status == "PicamError_None": raise DevCOMM_ERROR("PiCam - error committing ROI Parameter Change %s" % status) if not failCount.value == 0: raise DevCOMM_ERROR("PiCam - ROI commit failure count > 0", failCount) pfp.Picam_DestroyParameters(pointer(paramsFailed)) else: raise DevBAD_PARAMETER("PiCAM Rois must be 6xN array") errors = pfp.PicamAcquisitionErrorsMask() readout_count = pfp.pi64s(num_frames) readout_time_out = pfp.piint(-1) available = pfp.PicamAvailableData(0, 0) if self.debugging: print "about to call Picam_Acquire" status = pfp.Picam_Acquire(camera, readout_count, readout_time_out, ctypes.byref(available), ctypes.byref(errors)) if not status == "PicamError_None": print "Picam_Acquire returned ",status raise DevCOMM_ERROR("PiCam - non zero return from Picam_Acquire - %s" % status) if self.debugging: print "back from aquire" temperature = pfp.piflt(0.0) status = pfp.Picam_GetParameterFloatingPointValue( camera, ctypes.c_int(pfp.PicamParameter_SensorTemperatureReading), ctypes.byref(temperature) ) if status == "PicamError_None" : self.sensor_temp.record = temperature.value if self.debugging : print "PICAM read back sensor temperature ", temperature else: print "PICAM could not read back sensor temperature ", status readout_time = pfp.piflt(0.0) status = pfp.Picam_GetParameterFloatingPointValue( camera, ctypes.c_int(pfp.PicamParameter_ReadoutTimeCalculation), ctypes.byref(readout_time) ) if status == "PicamError_None" : self.readout_time.record = readout_time.value if self.debugging : print "PICAM read back ReadoutTimeCalculation ", readout_time else: print "PICAM could not read back readout time ", status readoutstride = pfp.piint(0) status = pfp.Picam_GetParameterIntegerValue( camera, ctypes.c_int(pfp.PicamParameter_ReadoutStride), ctypes.byref(readoutstride) ) if self.debugging: print "Picam_GetParameterIntegerValue( camera, ctypes.c_int(PicamParameter_ReadoutStride),",readoutstride," )", status if not status == "PicamError_None" : raise DevCOMM_ERROR("PiCam - could not read readout stride - %s"% status) sz = readout_count.value*readoutstride.value/2 if self.debugging: print "sz is ",sz, " num_frames is ", num_frames, "readout_count is ", readout_count, " readoutstride is ", readoutstride DataArrayType = pfp.pi16u*sz """ Create pointer type for the above array type """ DataArrayPointerType = ctypes.POINTER(pfp.pi16u*sz) if self.debugging: print "PICAM - cast the read data into the pointer type" """ Create an instance of the pointer type, and point it to initial readout contents (memory address?) """ DataPointer = ctypes.cast(available.initial_readout,DataArrayPointerType) if self.debugging: print "PICAM now deference the pointer" """ Create a separate array with readout contents """ data = DataPointer.contents if self.debugging: print "PICAM - now make an np.empty of shorts (%d)"%sz ans = np.empty(sz,np.short) if self.debugging: print "PICAM - fill it in " ans[:] = data if self.debugging: print "PICAM reshape the data to be (%d, %d, %d)"%(num_frames, readoutstride.value/2/width, width) ans = ans.reshape((num_frames, readoutstride.value/2/width, width)) self.frames.record = ans if self.debugging: print "un initialize the library" pfp.Picam_UninitializeLibrary() return 1 ACQUIRE=acquire
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #
syntax_error_tests.rs
fn test_1_error(pattern: &str, expected_err: &str) { let res = regress::Regex::new(pattern); assert!(res.is_err(), "Pattern should not have parsed: {}", pattern); let err = res.err().unwrap().text; assert!( err.contains(expected_err), "Error text '{}' did not contain '{}' for pattern '{}'", err, expected_err, pattern ); } #[test] fn test_excessive_capture_groups() { let mut captures = String::from("s"); let mut loops = String::from("s"); for _ in 0..65536 { captures.push_str("(x)"); loops.push_str("x{3,5}"); } test_1_error(captures.as_str(), "Capture group count limit exceeded"); test_1_error(loops.as_str(), "Loop count limit exceeded"); } #[test] fn test_syntax_errors()
{ test_1_error(r"*", "Nothing to repeat"); test_1_error(r"x**", "Nothing to repeat"); test_1_error(r"?", "Nothing to repeat"); test_1_error(r"{3,5}", "Nothing to repeat"); test_1_error(r"x{5,3}", "Invalid quantifier"); test_1_error(r"]", "Unbalanced bracket"); test_1_error(r"[abc", "Unbalanced bracket"); test_1_error(r"(", "Unbalanced parenthesis"); test_1_error(r"(?!", "Unbalanced parenthesis"); test_1_error(r"abc)", "Unbalanced parenthesis"); test_1_error(r"[z-a]", "Invalid character range"); test_1_error(r"[\d-z]", "Invalid character range"); test_1_error("\\", "Incomplete escape"); test_1_error("^*", "Quantifier not allowed here"); test_1_error("${3}", "Quantifier not allowed here"); test_1_error("(?=abc)*", "Quantifier not allowed here"); test_1_error("(?!abc){3,}", "Quantifier not allowed here"); test_1_error( r"\2(a)", r"Backreference \2 exceeds number of capture groups", ); }
config.go
// Copyright © 2018 Banzai Cloud // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package database import "github.com/pkg/errors" // Config holds information necessary for connecting to a database. type Config struct { Dialect string Host string Port int User string Pass string Name string TLS string Role string Params map[string]string EnableLog bool } // Validate checks that the configuration is valid. func (c Config) Validate() error { if c.Dialect == "" { return errors.New("database dialect is required") } if c.Host == "" { return errors.New("database host is required") } if c.Port == 0 { return errors.New("database port is required") } if c.Role == "" {
if c.Name == "" { return errors.New("database name is required") } return nil }
if c.User == "" { return errors.New("database user is required if no secret role is provided") } }
index.d.ts
import { MouseEvent } from 'react'; import { FileDetails, MediaType, FileProcessingStatus, Context, Identifier, ImageResizeMode } from '@findable/media-core'; import { UIAnalyticsEventInterface } from '@findable/analytics-next-types'; import { CardAction } from './actions'; import { MediaViewerDataSource } from '@findable/media-viewer'; export { default as Card } from './root/card/cardLoader'; export { CardView } from './root/cardViewLoader'; export { CardViewState, CardViewOwnProps as CardViewProps, } from './root/cardView'; export * from './actions'; export declare type CardStatus = 'uploading' | 'loading' | 'processing' | 'complete' | 'error' | 'failed-processing'; export declare type CardAppearance = 'auto' | 'image' | 'square' | 'horizontal'; export declare type CardDimensionValue = number | string; export interface CardDimensions { width?: CardDimensionValue; height?: CardDimensionValue; } export interface CardEvent { event: MouseEvent<HTMLElement>;
mediaItemDetails?: FileDetails; } export interface OnSelectChangeFuncResult { selected: boolean; mediaItemDetails?: FileDetails; } export interface OnSelectChangeFunc { (result: OnSelectChangeFuncResult): void; } export interface OnLoadingChangeState { readonly type: CardStatus; readonly payload?: Error | FileDetails; } export interface OnLoadingChangeFunc { (state: OnLoadingChangeState): void; } export interface SharedCardProps { readonly appearance?: CardAppearance; readonly dimensions?: CardDimensions; readonly actions?: Array<CardAction>; readonly selectable?: boolean; readonly selected?: boolean; } export interface CardOnClickCallback { (result: CardEvent, analyticsEvent?: UIAnalyticsEventInterface): void; } export interface CardEventProps { readonly onClick?: CardOnClickCallback; readonly onMouseEnter?: (result: CardEvent) => void; readonly onSelectChange?: OnSelectChangeFunc; readonly onLoadingChange?: OnLoadingChangeFunc; } export interface AnalyticsFileAttributes { fileMediatype?: MediaType; fileMimetype?: string; fileStatus?: FileProcessingStatus; fileSize?: number; } export interface AnalyticsLinkAttributes { linkDomain: string; } export interface AnalyticsViewAttributes { viewPreview: boolean; viewActionmenu: boolean; viewSize?: CardAppearance; } export interface BaseAnalyticsContext { packageVersion: string; packageName: string; componentName: string; actionSubject: string; actionSubjectId: string | null; } export interface CardAnalyticsContext extends BaseAnalyticsContext { } export interface CardViewAnalyticsContext extends BaseAnalyticsContext { loadStatus: 'fail' | 'loading_metadata' | 'uploading' | 'complete'; type: 'file' | 'link' | 'smart' | 'external-image'; viewAttributes: AnalyticsViewAttributes; fileAttributes?: AnalyticsFileAttributes; linkAttributes?: AnalyticsLinkAttributes; } export interface CardProps extends SharedCardProps, CardEventProps { readonly context: Context; readonly identifier: Identifier; readonly isLazy?: boolean; readonly resizeMode?: ImageResizeMode; readonly disableOverlay?: boolean; readonly useInlinePlayer?: boolean; readonly shouldOpenMediaViewer?: boolean; readonly mediaViewerDataSource?: MediaViewerDataSource; } export interface CardState { status: CardStatus; isCardVisible: boolean; previewOrientation: number; isPlayingFile: boolean; mediaViewerSelectedItem?: Identifier; metadata?: FileDetails; dataURI?: string; progress?: number; error?: Error; } export { defaultImageCardDimensions } from './utils';
handle_node.go
package data import ( "context" "github.com/opctl/sdk-golang/model" "github.com/opctl/sdk-golang/node/api/client" ) func
( client client.Client, dataRef string, pullCreds *model.PullCreds, ) model.DataHandle { return nodeHandle{ client: client, dataRef: dataRef, pullCreds: pullCreds, } } func (nh nodeHandle) GetContent( ctx context.Context, contentPath string, ) ( model.ReadSeekCloser, error, ) { return nh.client.GetData( ctx, model.GetDataReq{ ContentPath: contentPath, PkgRef: nh.dataRef, PullCreds: nh.pullCreds, }, ) } // nodeHandle allows interacting w/ data sourced from an opspec node type nodeHandle struct { client client.Client dataRef string pullCreds *model.PullCreds } func (nh nodeHandle) ListDescendants( ctx context.Context, ) ( []*model.DirEntry, error, ) { return nh.client.ListDescendants( ctx, model.ListDescendantsReq{ PkgRef: nh.dataRef, PullCreds: nh.pullCreds, }, ) } func (hn nodeHandle) Path() *string { return nil } func (nh nodeHandle) Ref() string { return nh.dataRef }
newNodeHandle
task_test.go
package influxdb_test import ( "encoding/json" "testing" "time" platform "github.com/influxdata/influxdb" _ "github.com/influxdata/influxdb/query/builtin" "github.com/influxdata/influxdb/task/options" ) func TestOptionsMarshal(t *testing.T) { tu := &platform.TaskUpdate{} // this is to make sure that string durations are properly marshaled into durations if err := json.Unmarshal([]byte(`{"every":"10s", "offset":"1h"}`), tu); err != nil { t.Fatal(err) } if tu.Options.Every != 10*time.Second { t.Fatalf("option.every not properly unmarshaled, expected 10s got %s", tu.Options.Every) } if tu.Options.Offset != time.Hour { t.Fatalf("option.every not properly unmarshaled, expected 1h got %s", tu.Options.Offset) } tu = &platform.TaskUpdate{} // this is to make sure that string durations are properly marshaled into durations if err := json.Unmarshal([]byte(`{"flux":"option task = {\n\tname: \"task #99\",\n\tcron: \"* * * * *\",\n\toffset: 5s,\n\tconcurrency: 100,\n}\nfrom(bucket:\"b\") |\u003e toHTTP(url:\"http://example.com\")"}`), tu); err != nil { t.Fatal(err) } if tu.Flux == nil { t.Fatalf("flux not properly unmarshaled, expected not nil but got nil") } } func TestOptionsEdit(t *testing.T)
{ tu := &platform.TaskUpdate{} tu.Options.Every = 10 * time.Second if err := tu.UpdateFlux(`option task = {every: 20s, name: "foo"} from(bucket:"x") |> range(start:-1h)`); err != nil { t.Fatal(err) } t.Run("zeroing", func(t *testing.T) { if tu.Options.Every != 0 { t.Errorf("expected Every to be zeroed but it wasn't") } }) t.Run("fmt string", func(t *testing.T) { t.Skip("This won't work until the flux formatter formats durations in a nicer way") expected := `option task = {every: 10s, name: "foo"} from(bucket:"x") |> range(start:-1h)` if *tu.Flux != expected { t.Errorf("got the wrong task back, expected %s,\n got %s\n", expected, *tu.Flux) } }) t.Run("replacement", func(t *testing.T) { op, err := options.FromScript(*tu.Flux) if err != nil { t.Error(err) } if op.Every != 10*time.Second { t.Logf("expected every to be 10s but was %s", op.Every) t.Fail() } }) t.Run("add new option", func(t *testing.T) { tu := &platform.TaskUpdate{} tu.Options.Offset = 30 * time.Second if err := tu.UpdateFlux(`option task = {every: 20s, name: "foo"} from(bucket:"x") |> range(start:-1h)`); err != nil { t.Fatal(err) } op, err := options.FromScript(*tu.Flux) if err != nil { t.Error(err) } if op.Offset != 30*time.Second { t.Fatalf("expected every to be 30s but was %s", op.Every) } }) t.Run("switching from every to cron", func(t *testing.T) { tu := &platform.TaskUpdate{} tu.Options.Cron = "* * * * *" if err := tu.UpdateFlux(`option task = {every: 20s, name: "foo"} from(bucket:"x") |> range(start:-1h)`); err != nil { t.Fatal(err) } op, err := options.FromScript(*tu.Flux) if err != nil { t.Error(err) } if op.Every != 0 { t.Fatalf("expected every to be 0 but was %s", op.Every) } if op.Cron != "* * * * *" { t.Fatalf("expected Cron to be \"* * * * *\" but was %s", op.Cron) } }) t.Run("switching from cron to every", func(t *testing.T) { tu := &platform.TaskUpdate{} tu.Options.Every = 10 * time.Second if err := tu.UpdateFlux(`option task = {cron: "* * * * *", name: "foo"} from(bucket:"x") |> range(start:-1h)`); err != nil { t.Fatal(err) } op, err := options.FromScript(*tu.Flux) if err != nil { t.Error(err) } if op.Every != 10*time.Second { t.Fatalf("expected every to be 10s but was %s", op.Every) } if op.Cron != "" { t.Fatalf("expected Cron to be \"\" but was %s", op.Cron) } }) }
dialogs.ts
/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ import Severity from 'vs/base/common/severity'; import { createDecorator } from 'vs/platform/instantiation/common/instantiation'; import { URI } from 'vs/base/common/uri'; import { basename } from 'vs/base/common/resources'; import { localize } from 'vs/nls'; import { FileFilter } from 'vs/platform/windows/common/windows'; import { ITelemetryData } from 'vs/platform/telemetry/common/telemetry'; export type DialogType = 'none' | 'info' | 'error' | 'question' | 'warning'; export interface IConfirmation { title?: string; type?: DialogType; message: string; detail?: string; primaryButton?: string; secondaryButton?: string; checkbox?: { label: string; checked?: boolean; }; } export interface IConfirmationResult { /** * Will be true if the dialog was confirmed with the primary button * pressed. */
* with the checkbox option defined. */ checkboxChecked?: boolean; } export interface IPickAndOpenOptions { forceNewWindow?: boolean; defaultUri?: URI; telemetryExtraData?: ITelemetryData; availableFileSystems?: string[]; } export interface ISaveDialogOptions { /** * A human-readable string for the dialog title */ title?: string; /** * The resource the dialog shows when opened. */ defaultUri?: URI; /** * A set of file filters that are used by the dialog. Each entry is a human readable label, * like "TypeScript", and an array of extensions. */ filters?: FileFilter[]; /** * A human-readable string for the ok button */ saveLabel?: string; /** * Specifies a list of schemas for the file systems the user can save to. If not specified, uses the schema of the defaultURI or, if also not specified, * the schema of the current window. */ availableFileSystems?: string[]; } export interface IOpenDialogOptions { /** * A human-readable string for the dialog title */ title?: string; /** * The resource the dialog shows when opened. */ defaultUri?: URI; /** * A human-readable string for the open button. */ openLabel?: string; /** * Allow to select files, defaults to `true`. */ canSelectFiles?: boolean; /** * Allow to select folders, defaults to `false`. */ canSelectFolders?: boolean; /** * Allow to select many files or folders. */ canSelectMany?: boolean; /** * A set of file filters that are used by the dialog. Each entry is a human readable label, * like "TypeScript", and an array of extensions. */ filters?: FileFilter[]; /** * Specifies a list of schemas for the file systems the user can load from. If not specified, uses the schema of the defaultURI or, if also not available, * the schema of the current window. */ availableFileSystems?: string[]; } export const IDialogService = createDecorator<IDialogService>('dialogService'); export interface IDialogOptions { cancelId?: number; detail?: string; checkboxLabel?: string; checkboxChecked?: boolean; } /** * A service to bring up modal dialogs. * * Note: use the `INotificationService.prompt()` method for a non-modal way to ask * the user for input. */ export interface IDialogService { _serviceBrand: undefined; /** * Ask the user for confirmation with a modal dialog. */ confirm(confirmation: IConfirmation): Promise<IConfirmationResult>; /** * Present a modal dialog to the user. * * @returns A promise with the selected choice index. If the user refused to choose, * then a promise with index of `cancelId` option is returned. If there is no such * option then promise with index `0` is returned. */ show(severity: Severity, message: string, buttons: string[], options?: IDialogOptions): Promise<number>; } export const IFileDialogService = createDecorator<IFileDialogService>('fileDialogService'); /** * A service to bring up file dialogs. */ export interface IFileDialogService { _serviceBrand: undefined; /** * The default path for a new file based on previously used files. * @param schemeFilter The scheme of the file path. If no filter given, the scheme of the current window is used. */ defaultFilePath(schemeFilter?: string): URI | undefined; /** * The default path for a new folder based on previously used folders. * @param schemeFilter The scheme of the folder path. If no filter given, the scheme of the current window is used. */ defaultFolderPath(schemeFilter?: string): URI | undefined; /** * The default path for a new workspace based on previously used workspaces. * @param schemeFilter The scheme of the workspace path. If no filter given, the scheme of the current window is used. */ defaultWorkspacePath(schemeFilter?: string): URI | undefined; /** * Shows a file-folder selection dialog and opens the selected entry. */ pickFileFolderAndOpen(options: IPickAndOpenOptions): Promise<void>; /** * Shows a file selection dialog and opens the selected entry. */ pickFileAndOpen(options: IPickAndOpenOptions): Promise<void>; /** * Shows a folder selection dialog and opens the selected entry. */ pickFolderAndOpen(options: IPickAndOpenOptions): Promise<void>; /** * Shows a workspace selection dialog and opens the selected entry. */ pickWorkspaceAndOpen(options: IPickAndOpenOptions): Promise<void>; /** * Shows a save file file dialog and save the file at the chosen file URI. */ pickFileToSave(options: ISaveDialogOptions): Promise<URI | undefined>; /** * Shows a save file dialog and returns the chosen file URI. */ showSaveDialog(options: ISaveDialogOptions): Promise<URI | undefined>; /** * Shows a open file dialog and returns the chosen file URI. */ showOpenDialog(options: IOpenDialogOptions): Promise<URI[] | undefined>; } const MAX_CONFIRM_FILES = 10; export function getConfirmMessage(start: string, resourcesToConfirm: URI[]): string { const message = [start]; message.push(''); message.push(...resourcesToConfirm.slice(0, MAX_CONFIRM_FILES).map(r => basename(r))); if (resourcesToConfirm.length > MAX_CONFIRM_FILES) { if (resourcesToConfirm.length - MAX_CONFIRM_FILES === 1) { message.push(localize('moreFile', "...1 additional file not shown")); } else { message.push(localize('moreFiles', "...{0} additional files not shown", resourcesToConfirm.length - MAX_CONFIRM_FILES)); } } message.push(''); return message.join('\n'); }
confirmed: boolean; /** * This will only be defined if the confirmation was created
mod.rs
#[doc = r" Value read from the register"] pub struct R { bits: u32, } #[doc = r" Value to write to the register"] pub struct W { bits: u32, } impl super::FLASHWPROT0 {
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.set(w.bits); } #[doc = r" Reads the contents of the register"] #[inline] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r" Writes to the register"] #[inline] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { let mut w = W::reset_value(); f(&mut w); self.register.set(w.bits); } #[doc = r" Writes the reset value to the register"] #[inline] pub fn reset(&self) { self.write(|w| w) } } #[doc = r" Value of the field"] pub struct FW0BITSR { bits: u32, } impl FW0BITSR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u32 { self.bits } } #[doc = r" Proxy"] pub struct _FW0BITSW<'a> { w: &'a mut W, } impl<'a> _FW0BITSW<'a> { #[doc = r" Writes raw bits to the field"] #[inline] pub unsafe fn bits(self, value: u32) -> &'a mut W { const MASK: u32 = 4294967295; const OFFSET: u8 = 0; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } impl R { #[doc = r" Value of the register as raw bits"] #[inline] pub fn bits(&self) -> u32 { self.bits } #[doc = "Bits 0:31 - Write protect flash 0x00000000 - 0x0007FFFF. Each bit provides write protection for 16KB chunks of flash data space. Bits are cleared by writing a 1 to the bit. When read, 0 indicates the region is protected. Bits are sticky (can be set when PROTLOCK is 1, but only cleared by reset)"] #[inline] pub fn fw0bits(&self) -> FW0BITSR { let bits = { const MASK: u32 = 4294967295; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u32) as u32 }; FW0BITSR { bits } } } impl W { #[doc = r" Reset value of the register"] #[inline] pub fn reset_value() -> W { W { bits: 0 } } #[doc = r" Writes raw bits to the register"] #[inline] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } #[doc = "Bits 0:31 - Write protect flash 0x00000000 - 0x0007FFFF. Each bit provides write protection for 16KB chunks of flash data space. Bits are cleared by writing a 1 to the bit. When read, 0 indicates the region is protected. Bits are sticky (can be set when PROTLOCK is 1, but only cleared by reset)"] #[inline] pub fn fw0bits(&mut self) -> _FW0BITSW { _FW0BITSW { w: self } } }
#[doc = r" Modifies the contents of the register"] #[inline] pub fn modify<F>(&self, f: F) where
tauri.ts
import { sendAsync } from "./dispatch_json.ts"; export interface BrowserOptions { title: String; url: String; width: Number; height: Number; resizable: boolean; debug: boolean; } export function close(path: Number): void { //sendAsync("op_mkdir"); } export async function
( options: BrowserOptions, ): Promise<Number> { console.log("DONE HERE", options); return await sendAsync("op_webview_start", options); }
openWebview
dashboard.models.ts
import { BaseData } from '@shared/models/base-data'; import { DashboardId } from '@shared/models/id/dashboard-id'; import { TenantId } from '@shared/models/id/tenant-id'; import { ShortCustomerInfo } from '@shared/models/customer.model'; import { Widget } from './widget.models'; import { Timewindow } from '@shared/models/time/time.models'; import { EntityAliases } from './alias.models'; import { Filters } from '@shared/models/query/query.models'; export interface DashboardInfo extends BaseData<DashboardId> { tenantId?: TenantId; title?: string; assignedCustomers?: Array<ShortCustomerInfo>; } export interface WidgetLayout { sizeX?: number; sizeY?: number; mobileHeight?: number; mobileOrder?: number; col?: number; row?: number; } export interface WidgetLayouts { [id: string]: WidgetLayout; } export interface GridSettings { backgroundColor?: string; color?: string; columns?: number; margin?: number; backgroundSizeMode?: string; backgroundImageUrl?: string; autoFillHeight?: boolean; mobileAutoFillHeight?: boolean; mobileRowHeight?: number; [key: string]: any; } export interface DashboardLayout { widgets: WidgetLayouts; gridSettings: GridSettings; } export interface DashboardLayoutInfo { widgetIds?: string[]; widgetLayouts?: WidgetLayouts; gridSettings?: GridSettings; } export declare type DashboardLayoutId = 'main' | 'right'; export declare type DashboardStateLayouts = {[key in DashboardLayoutId]?: DashboardLayout}; export declare type DashboardLayoutsInfo = {[key in DashboardLayoutId]?: DashboardLayoutInfo}; export interface DashboardState { name: string; root: boolean; layouts: DashboardStateLayouts; } export declare type StateControllerId = 'entity' | 'default' | string; export interface DashboardSettings { stateControllerId?: StateControllerId; showTitle?: boolean; showDashboardsSelect?: boolean; showEntitiesSelect?: boolean; showFilters?: boolean; showDashboardTimewindow?: boolean; showDashboardExport?: boolean; toolbarAlwaysOpen?: boolean; titleColor?: string; } export interface DashboardConfiguration { timewindow?: Timewindow; settings?: DashboardSettings; widgets?: {[id: string]: Widget } | Widget[]; states?: {[id: string]: DashboardState }; entityAliases?: EntityAliases; filters?: Filters; [key: string]: any; }
} export function isPublicDashboard(dashboard: DashboardInfo): boolean { if (dashboard && dashboard.assignedCustomers) { return dashboard.assignedCustomers .filter(customerInfo => customerInfo.public).length > 0; } else { return false; } } export function getDashboardAssignedCustomersText(dashboard: DashboardInfo): string { if (dashboard && dashboard.assignedCustomers && dashboard.assignedCustomers.length > 0) { return dashboard.assignedCustomers .filter(customerInfo => !customerInfo.public) .map(customerInfo => customerInfo.title) .join(', '); } else { return ''; } } export function isCurrentPublicDashboardCustomer(dashboard: DashboardInfo, customerId: string): boolean { if (customerId && dashboard && dashboard.assignedCustomers) { return dashboard.assignedCustomers.filter(customerInfo => { return customerInfo.public && customerId === customerInfo.customerId.id; }).length > 0; } else { return false; } }
export interface Dashboard extends DashboardInfo { configuration?: DashboardConfiguration;
cloneChildren.js
}); var _react = require('react'); var _react2 = _interopRequireDefault(_react); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = function (children, props) { return _react2.default.Children.map(children, function (child) { if (!child) return child; var newProps = typeof props === 'function' ? props(child) : props; return _react2.default.cloneElement(child, newProps); }); };
'use strict'; Object.defineProperty(exports, "__esModule", { value: true
test.py
### # Copyright (c) 2004-2005, Jeremiah Fincher # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, # this list of conditions, and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions, and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the author of this software nor the name of # contributors to this software may be used to endorse or promote products # derived from this software without specific prior written consent. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. ### from supybot.test import * class LimiterTestCase(ChannelPluginTestCase):
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
plugins = ('Limiter',) config = {'supybot.plugins.Limiter.enable': True} def testEnforceLimit(self): origMin = conf.supybot.plugins.Limiter.minimumExcess() origMax = conf.supybot.plugins.Limiter.maximumExcess() try: conf.supybot.plugins.Limiter.minimumExcess.setValue(5) conf.supybot.plugins.Limiter.maximumExcess.setValue(10) self.irc.feedMsg(ircmsgs.join('#foo', prefix='foo!root@host')) m = self.irc.takeMsg() self.assertEqual(m, ircmsgs.limit('#foo', 1+10)) self.irc.feedMsg(ircmsgs.join('#foo', prefix='bar!root@host')) m = self.irc.takeMsg() self.failIf(m is not None) conf.supybot.plugins.Limiter.maximumExcess.setValue(7) self.irc.feedMsg(ircmsgs.part('#foo', prefix='bar!root@host')) m = self.irc.takeMsg() self.assertEqual(m, ircmsgs.limit('#foo', 1+5)) finally: conf.supybot.plugins.Limiter.minimumExcess.setValue(origMin) conf.supybot.plugins.Limiter.maximumExcess.setValue(origMax)
usePresenter.d.ts
/// <reference types="react" /> import { ItemProps } from './Item'; export declare type ItemValueTypes = ItemProps | number | string; declare type PropTypes = { selectedValue: ItemValueTypes; children?: JSX.Element | JSX.Element[]; items?: ItemProps[]; itemHeight: number; preferredNumVisibleRows: number; }; declare type RowItem = { value: string | number; index: number;
}; interface Presenter { items: ItemProps[]; shouldControlComponent: (offset: number) => boolean; index: number; height: number; getRowItemAtOffset: (offset: number) => RowItem; } declare const usePresenter: ({ selectedValue, children, items: propItems, itemHeight, preferredNumVisibleRows }: PropTypes) => Presenter; export default usePresenter;
Marshal.go
package Marshal import ( "errors" "reflect" "strings" "github.com/kostin88/parquet-go/Common" "github.com/kostin88/parquet-go/Layout" "github.com/kostin88/parquet-go/ParquetType" "github.com/kostin88/parquet-go/SchemaHandler" "github.com/kostin88/parquet-go/parquet" ) type Node struct { Val reflect.Value PathMap *SchemaHandler.PathMapType RL int32 DL int32 } //Improve Performance/////////////////////////// //NodeBuf type NodeBufType struct { Index int Buf []*Node } func NewNodeBuf(ln int) *NodeBufType { nodeBuf := new(NodeBufType) nodeBuf.Index = 0 nodeBuf.Buf = make([]*Node, ln) for i := 0; i < ln; i++ { nodeBuf.Buf[i] = new(Node) } return nodeBuf } func (self *NodeBufType) GetNode() *Node { if self.Index >= len(self.Buf) { self.Buf = append(self.Buf, new(Node)) } self.Index++ return self.Buf[self.Index-1] } func (self *NodeBufType) Reset() { self.Index = 0 } ////////for improve performance/////////////////////////////////// type Marshaler interface { Marshal(node *Node, nodeBuf *NodeBufType) []*Node } type ParquetPtr struct{} func (p *ParquetPtr) Marshal(node *Node, nodeBuf *NodeBufType) []*Node { nodes := make([]*Node, 0) if node.Val.IsNil() { return nodes } node.Val = node.Val.Elem() node.DL++ nodes = append(nodes, node) return nodes } type ParquetStruct struct{} func (p *ParquetStruct) Marshal(node *Node, nodeBuf *NodeBufType) []*Node { var ok bool numField := node.Val.Type().NumField() nodes := make([]*Node, 0, numField) for j := 0; j < numField; j++ { tf := node.Val.Type().Field(j) name := tf.Name newNode := nodeBuf.GetNode() //some ignored item if newNode.PathMap, ok = node.PathMap.Children[name]; !ok { continue } newNode.Val = node.Val.Field(j) newNode.RL = node.RL newNode.DL = node.DL nodes = append(nodes, newNode) } return nodes } type ParquetMapStruct struct{} func (p *ParquetMapStruct) Marshal(node *Node, nodeBuf *NodeBufType) []*Node { var ok bool nodes := make([]*Node, 0) keys := node.Val.MapKeys() if len(keys) <= 0 { return nodes } for j := len(keys) - 1; j >= 0; j-- { key := keys[j] newNode := nodeBuf.GetNode() //some ignored item if newNode.PathMap, ok = node.PathMap.Children[key.String()]; !ok { continue } newNode.Val = node.Val.MapIndex(key) newNode.RL = node.RL newNode.DL = node.DL nodes = append(nodes, newNode) } return nodes } type ParquetSlice struct { schemaHandler *SchemaHandler.SchemaHandler } func (p *ParquetSlice) Marshal(node *Node, nodeBuf *NodeBufType) []*Node { nodes := make([]*Node, 0) ln := node.Val.Len() pathMap := node.PathMap path := node.PathMap.Path if *p.schemaHandler.SchemaElements[p.schemaHandler.MapIndex[node.PathMap.Path]].RepetitionType != parquet.FieldRepetitionType_REPEATED { pathMap = pathMap.Children["list"].Children["element"] path += ".list" + ".element" } if ln <= 0 { return nodes } rlNow, _ := p.schemaHandler.MaxRepetitionLevel(Common.StrToPath(path)) for j := ln - 1; j >= 0; j-- { newNode := nodeBuf.GetNode() newNode.PathMap = pathMap newNode.Val = node.Val.Index(j) if j == 0 { newNode.RL = node.RL } else { newNode.RL = rlNow } newNode.DL = node.DL + 1 nodes = append(nodes, newNode) } return nodes } type ParquetMap struct { schemaHandler *SchemaHandler.SchemaHandler } func (p *ParquetMap) Marshal(node *Node, nodeBuf *NodeBufType) []*Node { nodes := make([]*Node, 0) path := node.PathMap.Path + ".map" keys := node.Val.MapKeys() if len(keys) <= 0 { return nodes } rlNow, _ := p.schemaHandler.MaxRepetitionLevel(Common.StrToPath(path)) for j := len(keys) - 1; j >= 0; j-- { key := keys[j] value := node.Val.MapIndex(key) newNode := nodeBuf.GetNode() newNode.PathMap = node.PathMap.Children["map"].Children["key"] newNode.Val = key newNode.DL = node.DL + 1 if j == 0 { newNode.RL = node.RL } else { newNode.RL = rlNow } nodes = append(nodes, newNode) newNode = nodeBuf.GetNode() newNode.PathMap = node.PathMap.Children["map"].Children["value"] newNode.Val = value newNode.DL = node.DL + 1 if j == 0 { newNode.RL = node.RL } else { newNode.RL = rlNow } nodes = append(nodes, newNode) } return nodes } //Convert the objects to table map. srcInterface is a slice of objects func
(srcInterface []interface{}, bgn int, end int, schemaHandler *SchemaHandler.SchemaHandler) (tb *map[string]*Layout.Table, err error) { defer func() { if r := recover(); r != nil { switch x := r.(type) { case string: err = errors.New(x) case error: err = x default: err = errors.New("unkown error") } } }() src := reflect.ValueOf(srcInterface) res := make(map[string]*Layout.Table) pathMap := schemaHandler.PathMap nodeBuf := NewNodeBuf(1) for i := 0; i < len(schemaHandler.SchemaElements); i++ { schema := schemaHandler.SchemaElements[i] pathStr := schemaHandler.IndexMap[int32(i)] numChildren := schema.GetNumChildren() if numChildren == 0 { res[pathStr] = Layout.NewEmptyTable() res[pathStr].Path = Common.StrToPath(pathStr) res[pathStr].MaxDefinitionLevel, _ = schemaHandler.MaxDefinitionLevel(res[pathStr].Path) res[pathStr].MaxRepetitionLevel, _ = schemaHandler.MaxRepetitionLevel(res[pathStr].Path) res[pathStr].RepetitionType = schema.GetRepetitionType() res[pathStr].Type = schemaHandler.SchemaElements[schemaHandler.MapIndex[pathStr]].GetType() res[pathStr].Info = schemaHandler.Infos[i] } } stack := make([]*Node, 0, 100) for i := bgn; i < end; i++ { stack = stack[:0] nodeBuf.Reset() node := nodeBuf.GetNode() node.Val = src.Index(i) if src.Index(i).Type().Kind() == reflect.Interface { node.Val = src.Index(i).Elem() } node.PathMap = pathMap stack = append(stack, node) for len(stack) > 0 { ln := len(stack) node := stack[ln-1] stack = stack[:ln-1] tk := node.Val.Type().Kind() var m Marshaler if tk == reflect.Ptr { m = &ParquetPtr{} } else if tk == reflect.Struct { m = &ParquetStruct{} } else if tk == reflect.Slice { m = &ParquetSlice{schemaHandler: schemaHandler} } else if tk == reflect.Map { schemaIndex := schemaHandler.MapIndex[node.PathMap.Path] sele := schemaHandler.SchemaElements[schemaIndex] if !sele.IsSetConvertedType() { m = &ParquetMapStruct{} } else { m = &ParquetMap{schemaHandler: schemaHandler} } } else { table := res[node.PathMap.Path] schemaIndex := schemaHandler.MapIndex[node.PathMap.Path] sele := schemaHandler.SchemaElements[schemaIndex] table.Values = append(table.Values, ParquetType.GoTypeToParquetType(node.Val.Interface(), sele.Type, sele.ConvertedType)) table.DefinitionLevels = append(table.DefinitionLevels, node.DL) table.RepetitionLevels = append(table.RepetitionLevels, node.RL) continue } nodes := m.Marshal(node, nodeBuf) if len(nodes) == 0 { path := node.PathMap.Path index := schemaHandler.MapIndex[path] numChildren := schemaHandler.SchemaElements[index].GetNumChildren() if numChildren > int32(0) { for key, table := range res { if strings.HasPrefix(key, path) && (len(key) == len(path) || key[len(path)] == '.') { table.Values = append(table.Values, nil) table.DefinitionLevels = append(table.DefinitionLevels, node.DL) table.RepetitionLevels = append(table.RepetitionLevels, node.RL) } } } else { table := res[path] table.Values = append(table.Values, nil) table.DefinitionLevels = append(table.DefinitionLevels, node.DL) table.RepetitionLevels = append(table.RepetitionLevels, node.RL) } } else { for _, node := range nodes { stack = append(stack, node) } } } } return &res, nil }
Marshal
actor_id.go
/* * Copyright 2020 The Yorkie Authors. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package time import ( "bytes" "encoding/hex" "math" ) const actorIDSize = 12 var ( InitialActorID = &ActorID{} MaxActorID = &ActorID{ math.MaxUint8, math.MaxUint8, math.MaxUint8, math.MaxUint8, math.MaxUint8, math.MaxUint8, math.MaxUint8, math.MaxUint8, math.MaxUint8, math.MaxUint8, math.MaxUint8, math.MaxUint8, } ) // ActorID is bytes represented by the hexadecimal string. // It should be generated by unique value. type ActorID [actorIDSize]byte // ActorIDFromHex returns the bytes represented by the hexadecimal string str. func ActorIDFromHex(str string) *ActorID
// String returns the hexadecimal encoding of ActorID. // If the receiver is nil, it would return empty string. func (id *ActorID) String() string { if id == nil { return "" } return hex.EncodeToString(id[:]) } // Compare returns an integer comparing two ActorID lexicographically. // The result will be 0 if id==other, -1 if id < other, and +1 if id > other. // If the receiver or argument is nil, it would panic at runtime. func (id *ActorID) Compare(other *ActorID) int { if id == nil || other == nil { panic("actorID cannot be null") } return bytes.Compare(id[:], other[:]) }
{ if str == "" { return nil } actorID := ActorID{} decoded, err := hex.DecodeString(str) if err != nil { panic("fail to decode hex") } copy(actorID[:], decoded[:actorIDSize]) return &actorID }
enums3.rs
// enums3.rs // Address all the TODOs to make the tests pass! enum Message { Echo(String), Quit, ChangeColor(u8, u8, u8), Move(Point) } struct Point { x: u8, y: u8, } struct State { color: (u8, u8, u8), position: Point, quit: bool, } impl State { fn change_color(&mut self, color: (u8, u8, u8)) { self.color = color; } fn quit(&mut self) { self.quit = true; } fn echo(&self, s: String) { println!("{}", s); } fn move_position(&mut self, p: Point) { self.position = p; } fn process(&mut self, message: Message)
} #[cfg(test)] mod tests { use super::*; #[test] fn test_match_message_call() { let mut state = State { quit: false, position: Point { x: 0, y: 0 }, color: (0, 0, 0), }; state.process(Message::ChangeColor(255, 0, 255)); state.process(Message::Echo(String::from("hello world"))); state.process(Message::Move(Point { x: 10, y: 15 })); state.process(Message::Quit); assert_eq!(state.color, (255, 0, 255)); assert_eq!(state.position.x, 10); assert_eq!(state.position.y, 15); assert_eq!(state.quit, true); } }
{ match message { Message::ChangeColor(a, b, c) => self.change_color((a, b, c)), Message::Quit => self.quit(), Message::Move(value) => self.move_position(value), Message::Echo(value) => self.echo(value), }; }
model_update_key_description_response.go
/* * kms * * KMS v1.0 API, open API * */ package model
"strings" ) // Response Object type UpdateKeyDescriptionResponse struct { KeyInfo *KeyDescriptionInfo `json:"key_info,omitempty"` } func (o UpdateKeyDescriptionResponse) String() string { data, _ := json.Marshal(o) return strings.Join([]string{"UpdateKeyDescriptionResponse", string(data)}, " ") }
import ( "encoding/json"
FieldPath.js
/** * ### Поле ввода пути к данным * * Created by Evgeniy Malyarov on 18.09.2017. */ import React, {Component} from 'react'; import PropTypes from 'prop-types'; import Cascader from 'rc-cascader'; import '../styles/cascader.css'; class FieldPath extends Co
constructor(props, context) { super(props, context); const options = this.fill_options(); const {_obj, _fld} = props; const defaultValue = _obj[_fld].split('.'); let inputValue = ''; if(defaultValue.length) { let curr; defaultValue.forEach((v) => { if(inputValue) { inputValue += '.'; } (curr || options).some((opt) => { if(opt.value == v) { inputValue += opt.label; curr = opt.children || []; return true; } }); }); } this.state = {defaultValue, inputValue, options}; } /** * Заполняет нулевой и первый уровни * @return {Array} */ fill_options() { const options = []; const {_obj} = this.props; if(_obj._manager == $p.cat.scheme_settings){ const {parts, _mgr, _meta} = _obj._owner._owner.child_meta(); for(const fld in _meta.fields){ if(fld !== 'predefined_name' && _meta.fields[fld]) { const {synonym, tooltip, type} = _meta.fields[fld]; const option = { label: synonym || tooltip || fld, value: fld, type: type, }; options.push(option); if(type.is_ref){ this.loadData([option], 1); } } } } else{ } return options; } onChange = (value, selectedOptions) => { const {_obj, _fld, handleValueChange} = this.props; this.setState({ inputValue: selectedOptions.map(o => o.label).join('.'), }); if(_obj && _fld) { _obj[_fld] = value.reduce((sum, v) => sum + (sum ? '.' : '') + v, ''); handleValueChange && handleValueChange(value); } }; loadData = (selected, init) => { const targetOption = selected[selected.length - 1]; targetOption.children = []; for(const name of targetOption.type.types){ const _meta = $p.md.get(name); if(_meta){ for(const fld in _meta.fields){ if(fld == 'predefined_name'){ continue; } const {synonym, tooltip, type} = _meta.fields[fld]; const option = { label: synonym || tooltip || fld, value: fld, type: type, isLeaf: !type.is_ref }; targetOption.children.push(option); if (option.isLeaf === false && init && init < 2){ this.loadData([option], 2); } } } } !init && this.setState({options: [...this.state.options]}); }; prevent(e) { e.preventDefault(); e.stopPropagation(); } render() { const {_obj, _fld, popupVisible} = this.props; const {inputValue, defaultValue, options} = this.state; return ( <Cascader expandTrigger="hover" popupVisible={popupVisible} options={options} defaultValue={defaultValue} onChange={this.onChange} onKeyDown={this.prevent} onKeyPress={this.prevent} loadData={this.loadData} changeOnSelect > <input placeholder="Путь к данным" value={inputValue} onBlur={this.prevent}/> </Cascader> ); } } export default FieldPath;
mponent {
busy.service.ts
import { Injectable } from '@angular/core'; import { NgxSpinnerService } from 'ngx-spinner'; @Injectable({ providedIn: 'root'
export class BusyService { busyRequestCount = 0; constructor(private spinnerService: NgxSpinnerService) { } busy() { this.busyRequestCount++; this.spinnerService.show(undefined, { type: 'line-scale-party', bdColor: 'rgba(255,255,255,0)', color: '#333333' }); } idle() { this.busyRequestCount--; if (this.busyRequestCount <= 0) { this.busyRequestCount = 0; this.spinnerService.hide(); } } }
})
window.rs
use crate::logical_plan::Context; use crate::physical_plan::state::ExecutionState; use crate::prelude::*; use polars_core::frame::groupby::{GroupBy, GroupsProxy}; use polars_core::frame::hash_join::private_left_join_multiple_keys; use polars_core::prelude::*; use polars_core::series::IsSorted; use polars_core::POOL; use polars_utils::sort::perfect_sort; use std::sync::Arc; pub struct WindowExpr { /// the root column that the Function will be applied on. /// This will be used to create a smaller DataFrame to prevent taking unneeded columns by index pub(crate) group_by: Vec<Arc<dyn PhysicalExpr>>, pub(crate) apply_columns: Vec<Arc<str>>, pub(crate) out_name: Option<Arc<str>>, /// A function Expr. i.e. Mean, Median, Max, etc. pub(crate) function: Expr, pub(crate) phys_function: Arc<dyn PhysicalExpr>, pub(crate) options: WindowOptions, pub(crate) expr: Expr, } #[derive(Debug)] enum MapStrategy { Join, // explode now Explode, // will be exploded by subsequent `.flatten()` call ExplodeLater, Map, Nothing, } impl WindowExpr { fn run_aggregation<'a>( &self, df: &DataFrame, state: &ExecutionState, gb: &'a GroupBy, ) -> Result<AggregationContext<'a>>
fn is_explicit_list_agg(&self) -> bool { // col("foo").list() // col("foo").list().alias() // .. // col("foo").list().alias().alias() // // but not: // col("foo").list().sum().alias() // .. // col("foo").min() let mut explicit_list = false; for e in &self.expr { if let Expr::Window { function, .. } = e { // or list().alias let mut finishes_list = false; for e in &**function { match e { Expr::Agg(AggExpr::List(_)) => { finishes_list = true; } Expr::Alias(_, _) => {} _ => break, } } explicit_list = finishes_list; } } explicit_list } fn is_simple_column_expr(&self) -> bool { // col() // or col().alias() let mut simple_col = false; for e in &self.expr { if let Expr::Window { function, .. } = e { // or list().alias for e in &**function { match e { Expr::Column(_) => { simple_col = true; } Expr::Alias(_, _) => {} _ => break, } } } } simple_col } fn is_aggregation(&self) -> bool { // col() // or col().agg() let mut agg_col = false; for e in &self.expr { if let Expr::Window { function, .. } = e { // or list().alias for e in &**function { match e { Expr::Agg(_) => { agg_col = true; } Expr::Alias(_, _) => {} _ => break, } } } } agg_col } fn determine_map_strategy( &self, agg_state: &AggState, sorted_keys: bool, explicit_list: bool, gb: &GroupBy, ) -> Result<MapStrategy> { match (self.options.explode, explicit_list, agg_state) { // Explode // `(col("x").sum() * col("y")).list().over("groups").flatten()` (true, true, _) => Ok(MapStrategy::ExplodeLater), // Explode all the aggregated lists. Maybe add later? (true, false, _) => { Err(PolarsError::ComputeError("This operation is likely not what you want. Please open an issue if you really want to do this".into())) } // explicit list // `(col("x").sum() * col("y")).list().over("groups")` (false, true, _) => { Ok(MapStrategy::Join) } // aggregations //`sum("foo").over("groups")` (false, false, AggState::AggregatedFlat(_)) => { Ok(MapStrategy::Join) } // no explicit aggregations, map over the groups //`(col("x").sum() * col("y")).over("groups")` (false, false, AggState::AggregatedList(_)) => { if sorted_keys { if let GroupsProxy::Idx(g) = gb.get_groups() { debug_assert!(g.is_sorted()) } else { debug_assert!(false) } // Note that group columns must be sorted for this to make sense!!! Ok(MapStrategy::Explode) } else { Ok(MapStrategy::Map) } } // no aggregations, just return column // or an aggregation that has been flattened // we have to check which one //`col("foo").over("groups")` (false, false, AggState::NotAggregated(_)) => { // col() // or col().alias() if self.is_simple_column_expr() { Ok(MapStrategy::Nothing) } else { Ok(MapStrategy::Map) } } // literals, do nothing and let broadcast (false, false, AggState::Literal(_)) => { Ok(MapStrategy::Nothing) } } } } impl PhysicalExpr for WindowExpr { // Note: this was first implemented with expression evaluation but this performed really bad. // Therefore we choose the groupby -> apply -> self join approach // This first cached the groupby and the join tuples, but rayon under a mutex leads to deadlocks: // https://github.com/rayon-rs/rayon/issues/592 fn evaluate(&self, df: &DataFrame, state: &ExecutionState) -> Result<Series> { // This method does the following: // 1. determine groupby tuples based on the group_column // 2. apply an aggregation function // 3. join the results back to the original dataframe // this stores all group values on the original df size // // we have several strategies for this // - 3.1 JOIN // Use a join for aggregations like // `sum("foo").over("groups")` // and explicit `list` aggregations // `(col("x").sum() * col("y")).list().over("groups")` // // - 3.2 EXPLODE // Explicit list aggregations that are followed by `over().flatten()` // # the fastest method to do things over groups when the groups are sorted // # note that it will require an excit `list()` call from now on. // `(col("x").sum() * col("y")).list().over("groups").flatten()` // // - 3.3. MAP to original locations // This will be done for list aggregation that are not excitly aggregated as list // `(col("x").sum() * col("y")).over("groups")` // 4. select the final column and return let groupby_columns = self .group_by .iter() .map(|e| e.evaluate(df, state)) .collect::<Result<Vec<_>>>()?; // if the keys are sorted let sorted_keys = groupby_columns .iter() .all(|s| matches!(s.is_sorted(), IsSorted::Ascending | IsSorted::Descending)); let explicit_list_agg = self.is_explicit_list_agg(); let create_groups = || { // if we flatten this column we need to make sure the groups are sorted. let sorted = self.options.explode || // if not // `col().over()` // and not // `col().list().over` // and not // `col().sum()` // and keys are sorted // we may optimize with explode call (!self.is_simple_column_expr() && !explicit_list_agg && sorted_keys && !self.is_aggregation()); let mut gb = df.groupby_with_series(groupby_columns.clone(), true, sorted)?; let out: Result<GroupsProxy> = Ok(std::mem::take(gb.get_groups_mut())); out }; // Try to get cached grouptuples let (groups, _, cache_key) = if state.cache_window { let mut cache_key = String::with_capacity(32 * groupby_columns.len()); for s in &groupby_columns { cache_key.push_str(s.name()); } let mut gt_map = state.group_tuples.lock().unwrap(); // we run sequential and partitioned // and every partition run the cache should be empty so we expect a max of 1. debug_assert!(gt_map.len() <= 1); if let Some(gt) = gt_map.get_mut(&cache_key) { (std::mem::take(gt), true, cache_key) } else { (create_groups()?, false, cache_key) } } else { (create_groups()?, false, "".to_string()) }; // 2. create GroupBy object and apply aggregation let apply_columns = self .apply_columns .iter() .map(|s| s.as_ref().to_string()) .collect(); let gb = GroupBy::new(df, groupby_columns.clone(), groups, Some(apply_columns)); let mut ac = self.run_aggregation(df, state, &gb)?; let cache_gb = |mut gb: GroupBy| { if state.cache_window { let groups = std::mem::take(gb.get_groups_mut()); let mut gt_map = state.group_tuples.lock().unwrap(); gt_map.insert(cache_key.clone(), groups); } else { // drop the group tuples to reduce allocated memory. drop(gb); } }; use MapStrategy::*; match self.determine_map_strategy(ac.agg_state(), sorted_keys, explicit_list_agg, &gb)? { Nothing => { let mut out = ac.flat_naive().into_owned(); cache_gb(gb); if let Some(name) = &self.out_name { out.rename(name.as_ref()); } Ok(out) } Explode => { let mut out = ac.aggregated().explode()?; cache_gb(gb); if let Some(name) = &self.out_name { out.rename(name.as_ref()); } Ok(out) } ExplodeLater => { let mut out = ac.aggregated(); cache_gb(gb); if let Some(name) = &self.out_name { out.rename(name.as_ref()); } Ok(out) } Map => { // we use an argsort to map the values back // This is a bit more complicated because the final group tuples may differ from the original // so we use the original indices as idx values to argsort the original column // // The example below shows the naive version without group tuple mapping // columns // a // b // a // a // // agg list // [0, 2, 3] // [1] // // flatten // // [0, 2, 3, 1] // // argsort // // [0, 3, 1, 2] // // take by argsorted indexes and voila groups mapped // [0, 1, 2, 3] let out_column = ac.aggregated(); let mut original_idx = Vec::with_capacity(out_column.len()); match gb.get_groups() { GroupsProxy::Idx(groups) => { for g in groups.all() { original_idx.extend_from_slice(g) } } GroupsProxy::Slice(groups) => { for g in groups { original_idx.extend(g[0]..g[0] + 1) } } }; let mut original_idx = original_idx.into_iter(); let flattened = out_column.explode()?; if flattened.len() != df.height() { return Err(PolarsError::ComputeError( "the length of the window expression did not match that of the group" .into(), )); } // idx (new-idx, original-idx) let mut idx_mapping = Vec::with_capacity(out_column.len()); // groups are not changed, we can map by doing a standard argsort. if std::ptr::eq(ac.groups.as_ref(), gb.get_groups()) { let mut iter = 0..flattened.len() as u32; match ac.groups().as_ref() { GroupsProxy::Idx(groups) => { for g in groups.all() { idx_mapping.extend(g.iter().copied().zip(&mut iter)); } } GroupsProxy::Slice(groups) => { for g in groups { idx_mapping.extend((g[0]..g[0] + g[1]).zip(&mut original_idx)); } } } } // groups are changed, we use the new group indexes as arguments of the argsort // and sort by the old indexes else { match ac.groups().as_ref() { GroupsProxy::Idx(groups) => { for g in groups.all() { idx_mapping.extend(g.iter().copied().zip(&mut original_idx)); } } GroupsProxy::Slice(groups) => { for g in groups { idx_mapping.extend((g[0]..g[0] + g[1]).zip(&mut original_idx)); } } } } cache_gb(gb); // Safety: // we only have unique indices ranging from 0..len let idx = unsafe { perfect_sort(&POOL, &idx_mapping) }; let idx = UInt32Chunked::from_vec("", idx); // Safety: // groups should always be in bounds. unsafe { flattened.take_unchecked(&idx) } } Join => { let out_column = ac.aggregated(); let keys = gb.keys(); cache_gb(gb); let get_join_tuples = || { if groupby_columns.len() == 1 { // group key from right column let right = &keys[0]; groupby_columns[0].hash_join_left(right) } else { let df_right = DataFrame::new_no_checks(keys); let df_left = DataFrame::new_no_checks(groupby_columns); private_left_join_multiple_keys(&df_left, &df_right) } }; // try to get cached join_tuples let opt_join_tuples = if state.cache_window { let mut jt_map = state.join_tuples.lock().unwrap(); // we run sequential and partitioned // and every partition run the cache should be empty so we expect a max of 1. debug_assert!(jt_map.len() <= 1); if let Some(opt_join_tuples) = jt_map.get_mut(&cache_key) { std::mem::take(opt_join_tuples) } else { get_join_tuples() } } else { get_join_tuples() }; let mut iter = opt_join_tuples .iter() .map(|(_left, right)| right.map(|i| i as usize)); let mut out = unsafe { out_column.take_opt_iter_unchecked(&mut iter) }; if let Some(name) = &self.out_name { out.rename(name.as_ref()); } if state.cache_window { let mut jt_map = state.join_tuples.lock().unwrap(); jt_map.insert(cache_key, opt_join_tuples); } Ok(out) } } } fn to_field(&self, input_schema: &Schema) -> Result<Field> { self.function.to_field(input_schema, Context::Default) } #[allow(clippy::ptr_arg)] fn evaluate_on_groups<'a>( &self, _df: &DataFrame, _groups: &'a GroupsProxy, _state: &ExecutionState, ) -> Result<AggregationContext<'a>> { Err(PolarsError::InvalidOperation( "window expression not allowed in aggregation".into(), )) } fn as_expression(&self) -> &Expr { &self.expr } }
{ let ac = self .phys_function .evaluate_on_groups(df, gb.get_groups(), state)?; Ok(ac) }
archives.rs
use crate::arbuilder::ArBuilder; use crate::objects::ObjectTempDir; use ar::Archive; use rand::distributions::{Alphanumeric, DistString}; use rand::thread_rng; use std::error::Error; use std::fs::File; use std::io::Write; use std::path::PathBuf; use std::str::from_utf8; use tempdir::TempDir; pub fn extract_objects(archives: &[PathBuf]) -> Result<ObjectTempDir, Box<dyn Error>> { let dir = TempDir::new("armerge")?; let mut objects = Vec::new(); for archive_path in archives { let mut archive = Archive::new(File::open(archive_path)?); let archive_name = archive_path .file_name() .unwrap() .to_string_lossy() .replace('/', "_"); while let Some(entry_result) = archive.next_entry() { let mut entry = entry_result?; let rnd: String = Alphanumeric.sample_string(&mut thread_rng(), 8); let mut obj_path = dir.path().to_owned(); obj_path.push(format!( "{}@{}.{}.o", archive_name, from_utf8(entry.header().identifier())?, &rnd )); let mut file = File::create(&obj_path)?; std::io::copy(&mut entry, &mut file).unwrap(); objects.push(obj_path); } }
} #[cfg(not(target_os = "macos"))] pub fn create_index(archive_path: &std::path::Path, verbose: bool) -> Result<(), Box<dyn Error>> { use std::process::Command; if verbose { println!("ranlib {}", archive_path.to_string_lossy()); } let output = Command::new("ranlib").args(vec![archive_path]).output()?; if output.status.success() { Ok(()) } else { std::io::stdout().write_all(&output.stdout).unwrap(); std::io::stderr().write_all(&output.stderr).unwrap(); panic!("Failed to create archive index with `ranlib`") } } pub fn merge(mut output: impl ArBuilder, archives: &[PathBuf]) -> Result<(), Box<dyn Error>> { let objects_dir = extract_objects(archives)?; for obj_path in objects_dir.objects { output.append_obj(obj_path)?; } output.close()?; Ok(()) }
Ok(ObjectTempDir { dir, objects })
unity-tinymce.ts
import { Component, Directive, ElementRef, OnInit, AfterViewInit, EventEmitter, Output, Input, Inject, ComponentRef} from '@angular/core'; import {Http} from '@angular/http'; declare var tinymce: any; @Component({ selector: 'unity-tinymce', templateUrl: './unity-tinymce.html', }) // @Directive({ // selector: '[ngModel]', // host: { // "[value]": 'ngModel', // "(input)": "ngModelChange.next($event.target.value)" // } // }) export class
{ private elementRef: ElementRef; private elementID: string; private htmlContent: string; // @Input() mceContent: any; @Input() set mceContent(content) { if (content != undefined) { this.htmlContent = content; //Attach tinyMCE to cloned textarea tinymce.init( { mode: 'exact', height: 500, theme: 'modern', plugins: [ 'advlist autolink lists link image charmap print preview anchor', 'searchreplace visualblocks code fullscreen', 'insertdatetime media table contextmenu paste code' ], toolbar: 'insertfile undo redo | styleselect | bold italic | alignleft aligncenter alignright alignjustify | bullist numlist outdent indent | link image', selector: 'textarea#baseTextArea', setup: this.tinyMCESetup.bind(this) } ); } } @Output() contentChanged: EventEmitter<any>; constructor() { // this.elementRef = elementRef; //var randLetter = String.fromCharCode(65 + Math.floor(Math.random() * 26)); //var uniqid = randLetter + Date.now(); //this.elementID = 'tinymce' + uniqid; this.contentChanged = new EventEmitter(); } ngOnDestroy() { //destroy cloned elements tinymce.get("baseTextArea").remove(); // var elem = document.getElementById('baseTextArea'); // elem.parentElement.removeChild(elem); } tinyMCESetup(ed) { ed.on('keyup change', this.tinyMCEOnKeyup.bind(this)); } tinyMCEOnKeyup(e) { this.contentChanged.emit(tinymce.get('baseTextArea').getContent()); } }
UNITYTinyMCE
search_test.go
// Copyright (c) 2017 VMware, Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package api import ( "fmt" "net/http" "testing" "github.com/vmware/harbor/src/common" "github.com/vmware/harbor/src/common/models" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/vmware/harbor/src/common/dao" member "github.com/vmware/harbor/src/common/dao/project" ) func TestSearch(t *testing.T)
{ fmt.Println("Testing Search(SearchGet) API") // create a public project named "search" projectID1, err := dao.AddProject(models.Project{ Name: "search", OwnerID: int(nonSysAdminID), }) require.Nil(t, err) defer dao.DeleteProject(projectID1) err = dao.AddProjectMetadata(&models.ProjectMetadata{ ProjectID: projectID1, Name: "public", Value: "true", }) require.Nil(t, err) memberID1, err := member.AddProjectMember(models.Member{ ProjectID: projectID1, EntityID: int(nonSysAdminID), EntityType: common.UserMember, Role: models.GUEST, }) require.Nil(t, err) defer member.DeleteProjectMemberByID(memberID1) // create a private project named "search-2", the "-" is necessary // in the project name to test some corner cases projectID2, err := dao.AddProject(models.Project{ Name: "search-2", OwnerID: int(nonSysAdminID), }) require.Nil(t, err) defer dao.DeleteProject(projectID2) memberID2, err := member.AddProjectMember(models.Member{ ProjectID: projectID2, EntityID: int(nonSysAdminID), EntityType: common.UserMember, Role: models.GUEST, }) require.Nil(t, err) defer member.DeleteProjectMemberByID(memberID2) // add a repository in project "search" err = dao.AddRepository(models.RepoRecord{ ProjectID: projectID1, Name: "search/hello-world", }) require.Nil(t, err) // add a repository in project "search-2" err = dao.AddRepository(models.RepoRecord{ ProjectID: projectID2, Name: "search-2/hello-world", }) require.Nil(t, err) // search without login result := &searchResult{} err = handleAndParse(&testingRequest{ method: http.MethodGet, url: "/api/search", queryStruct: struct { Keyword string `url:"q"` }{ Keyword: "search", }, }, result) require.Nil(t, err) require.Equal(t, 1, len(result.Project)) require.Equal(t, 1, len(result.Repository)) assert.Equal(t, "search", result.Project[0].Name) assert.Equal(t, "search/hello-world", result.Repository[0]["repository_name"].(string)) // search with user who is the member of the project err = handleAndParse(&testingRequest{ method: http.MethodGet, url: "/api/search", queryStruct: struct { Keyword string `url:"q"` }{ Keyword: "search", }, credential: nonSysAdmin, }, result) require.Nil(t, err) require.Equal(t, 2, len(result.Project)) require.Equal(t, 2, len(result.Repository)) projects := map[string]struct{}{} repositories := map[string]struct{}{} for _, project := range result.Project { projects[project.Name] = struct{}{} } for _, repository := range result.Repository { repositories[repository["repository_name"].(string)] = struct{}{} } _, exist := projects["search"] assert.True(t, exist) _, exist = projects["search-2"] assert.True(t, exist) _, exist = repositories["search/hello-world"] assert.True(t, exist) _, exist = repositories["search-2/hello-world"] assert.True(t, exist) // search with system admin err = handleAndParse(&testingRequest{ method: http.MethodGet, url: "/api/search", queryStruct: struct { Keyword string `url:"q"` }{ Keyword: "search", }, credential: sysAdmin, }, result) require.Nil(t, err) require.Equal(t, 2, len(result.Project)) require.Equal(t, 2, len(result.Repository)) projects = map[string]struct{}{} repositories = map[string]struct{}{} for _, project := range result.Project { projects[project.Name] = struct{}{} } for _, repository := range result.Repository { repositories[repository["repository_name"].(string)] = struct{}{} } _, exist = projects["search"] assert.True(t, exist) _, exist = projects["search-2"] assert.True(t, exist) _, exist = repositories["search/hello-world"] assert.True(t, exist) _, exist = repositories["search-2/hello-world"] assert.True(t, exist) }
delete.js
/* [ { route: '/route-path', controller: FUNCTION,
description: STRING (About this url endpoint), // Optionals ignore: true // does not show on docs }, ] */ module.exports = [ ];
middleware: [{ 'Name of middleware': MIDDLEWARE_FUNCTION }],