file_name
stringlengths
3
137
prefix
stringlengths
0
918k
suffix
stringlengths
0
962k
middle
stringlengths
0
812k
mbv2_th_openpose.py
import tensorflow as tf import tensorlayer as tl from tensorlayer import layers from tensorlayer.models import Model from tensorlayer.layers import BatchNorm2d, Conv2d, DepthwiseConv2d, LayerList, MaxPool2d from ..utils import tf_repeat from ..define import CocoPart,CocoLimb initial_w=tl.initializers.random_normal(stddev=0.01) initial_b=tl.initializers.constant(value=0.0) class MobilenetThinOpenpose(Model): def __init__(self,parts=CocoPart,limbs=CocoLimb,colors=None,n_pos=19,n_limbs=19,num_channels=128,\ hin=368,win=368,hout=46,wout=46,backbone=None,pretraining=False,data_format="channels_first"): super().__init__() self.num_channels=num_channels self.parts=parts self.limbs=limbs self.n_pos=n_pos self.colors=colors self.n_limbs=n_limbs self.n_confmaps=n_pos self.n_pafmaps=2*n_limbs self.hin=hin self.win=win self.hout=hout self.wout=wout self.data_format=data_format if(self.data_format=="channels_first"):
else: self.concat_dim=-1 if(backbone==None): self.backbone=self.Mobilenetv2_variant(data_format=self.data_format) else: self.backbone=backbone(scale_size=8,pretraining=pretraining,data_format=self.data_format) self.init_stage=self.Init_stage(n_confmaps=self.n_confmaps,in_channels=self.backbone.out_channels,data_format=self.data_format) self.refinement_stage_1=self.Refinement_stage(n_confmaps=self.n_confmaps,n_pafmaps=self.n_pafmaps,in_channels=self.backbone.out_channels+self.n_confmaps+self.n_pafmaps,data_format=self.data_format) self.refinement_stage_2=self.Refinement_stage(n_confmaps=self.n_confmaps,n_pafmaps=self.n_pafmaps,in_channels=self.backbone.out_channels+self.n_confmaps+self.n_pafmaps,data_format=self.data_format) self.refinement_stage_3=self.Refinement_stage(n_confmaps=self.n_confmaps,n_pafmaps=self.n_pafmaps,in_channels=self.backbone.out_channels+self.n_confmaps+self.n_pafmaps,data_format=self.data_format) self.refinement_stage_4=self.Refinement_stage(n_confmaps=self.n_confmaps,n_pafmaps=self.n_pafmaps,in_channels=self.backbone.out_channels+self.n_confmaps+self.n_pafmaps,data_format=self.data_format) self.refinement_stage_5=self.Refinement_stage(n_confmaps=self.n_confmaps,n_pafmaps=self.n_pafmaps,in_channels=self.backbone.out_channels+self.n_confmaps+self.n_pafmaps,data_format=self.data_format) @tf.function def forward(self,x,is_train=False,stage_num=5,domainadapt=False): conf_list=[] paf_list=[] backbone_features=self.backbone.forward(x) conf_map,paf_map=self.init_stage.forward(backbone_features) conf_list.append(conf_map) paf_list.append(paf_map) for refinement_stage_idx in range(1,stage_num+1): x=tf.concat([backbone_features,conf_list[-1],paf_list[-1]],self.concat_dim) conf_map,paf_map=eval(f"self.refinement_stage_{refinement_stage_idx}.forward(x)") conf_list.append(conf_map) paf_list.append(paf_map) if(domainadapt): return conf_list[-1],paf_list[-1],conf_list,paf_list,backbone_features if(is_train): return conf_list[-1],paf_list[-1],conf_list,paf_list else: return conf_list[-1],paf_list[-1] @tf.function(experimental_relax_shapes=True) def infer(self,x): conf_map,paf_map=self.forward(x,is_train=False) return conf_map,paf_map def cal_loss(self,gt_conf,gt_paf,mask,stage_confs,stage_pafs): stage_losses=[] batch_size=gt_conf.shape[0] mask_conf=tf_repeat(mask, [1,self.n_confmaps ,1,1]) mask_paf=tf_repeat(mask,[1,self.n_pafmaps ,1,1]) loss_confs,loss_pafs=[],[] for stage_conf,stage_paf in zip(stage_confs,stage_pafs): loss_conf=tf.nn.l2_loss((gt_conf-stage_conf)*mask_conf) loss_paf=tf.nn.l2_loss((gt_paf-stage_paf)*mask_paf) stage_losses.append(loss_conf) stage_losses.append(loss_paf) loss_confs.append(loss_conf) loss_pafs.append(loss_paf) pd_loss=tf.reduce_mean(stage_losses)/batch_size return pd_loss,loss_confs,loss_pafs class Mobilenetv2_variant(Model): def __init__(self,data_format="channels_first"): super().__init__() self.data_format=data_format if(self.data_format=="channels_first"): self.concat_dim=1 else: self.concat_dim=-1 self.out_channels=1152 self.scale_size=8 self.convblock_0=conv_block(n_filter=32,in_channels=3,filter_size=(3,3),strides=(2,2),act=tf.nn.relu,data_format=self.data_format) self.convblock_1=separable_block(n_filter=64,in_channels=32,filter_size=(3,3),strides=(1,1),act=tf.nn.relu,data_format=self.data_format) self.convblock_2=separable_block(n_filter=128,in_channels=64,filter_size=(3,3),strides=(2,2),act=tf.nn.relu,data_format=self.data_format) self.convblock_3=separable_block(n_filter=128,in_channels=128,filter_size=(3,3),strides=(1,1),act=tf.nn.relu,data_format=self.data_format) self.convblock_4=separable_block(n_filter=256,in_channels=128,filter_size=(3,3),strides=(2,2),act=tf.nn.relu,data_format=self.data_format) self.convblock_5=separable_block(n_filter=256,in_channels=256,filter_size=(3,3),strides=(1,1),act=tf.nn.relu,data_format=self.data_format) self.convblock_6=separable_block(n_filter=512,in_channels=256,filter_size=(3,3),strides=(1,1),act=tf.nn.relu,data_format=self.data_format) self.convblock_7=separable_block(n_filter=512,in_channels=512,filter_size=(3,3),strides=(1,1),act=tf.nn.relu,data_format=self.data_format) self.convblock_8=separable_block(n_filter=512,in_channels=512,filter_size=(3,3),strides=(1,1),act=tf.nn.relu,data_format=self.data_format) self.convblock_9=separable_block(n_filter=512,in_channels=512,filter_size=(3,3),strides=(1,1),act=tf.nn.relu,data_format=self.data_format) self.convblock_10=separable_block(n_filter=512,in_channels=512,filter_size=(3,3),strides=(1,1),act=tf.nn.relu,data_format=self.data_format) self.convblock_11=separable_block(n_filter=512,in_channels=512,filter_size=(3,3),strides=(1,1),act=tf.nn.relu,data_format=self.data_format) self.maxpool=MaxPool2d(filter_size=(2,2),strides=(2,2),padding="SAME",data_format=self.data_format) def forward(self,x): concat_list=[] x=self.convblock_0.forward(x) x=self.convblock_1.forward(x) x=self.convblock_2.forward(x) x=self.convblock_3.forward(x) concat_list.append(self.maxpool.forward(x)) x=self.convblock_4.forward(x) x=self.convblock_5.forward(x) x=self.convblock_6.forward(x) x=self.convblock_7.forward(x) concat_list.append(x) x=self.convblock_8.forward(x) x=self.convblock_9.forward(x) x=self.convblock_10.forward(x) x=self.convblock_11.forward(x) concat_list.append(x) x=tf.concat(concat_list,self.concat_dim) return x class Init_stage(Model): def __init__(self,n_confmaps=19,n_pafmaps=38,in_channels=1152,data_format="channels_first"): super().__init__() self.n_confmaps=n_confmaps self.n_pafmaps=n_pafmaps self.in_channels=in_channels self.data_format=data_format #conf block self.conf_block=LayerList([ separable_block(n_filter=128,in_channels=self.in_channels,filter_size=(3,3),strides=(1,1),act=tf.nn.relu,data_format=self.data_format), separable_block(n_filter=128,in_channels=128,filter_size=(3,3),strides=(1,1),act=tf.nn.relu,data_format=self.data_format), separable_block(n_filter=128,in_channels=128,filter_size=(3,3),strides=(1,1),act=tf.nn.relu,data_format=self.data_format), separable_block(n_filter=512,in_channels=128,filter_size=(1,1),strides=(1,1),act=tf.nn.relu,data_format=self.data_format), separable_block(n_filter=self.n_confmaps,in_channels=512,filter_size=(1,1),strides=(1,1),act=None,data_format=self.data_format) ]) #paf block self.paf_block=LayerList([ separable_block(n_filter=128,in_channels=self.in_channels,filter_size=(3,3),strides=(1,1),act=tf.nn.relu,data_format=self.data_format), separable_block(n_filter=128,in_channels=128,filter_size=(3,3),strides=(1,1),act=tf.nn.relu,data_format=self.data_format), separable_block(n_filter=128,in_channels=128,filter_size=(3,3),strides=(1,1),act=tf.nn.relu,data_format=self.data_format), separable_block(n_filter=512,in_channels=128,filter_size=(1,1),strides=(1,1),act=tf.nn.relu,data_format=self.data_format), separable_block(n_filter=self.n_pafmaps,in_channels=512,filter_size=(1,1),strides=(1,1),act=None,data_format=self.data_format) ]) def forward(self,x): conf_map=self.conf_block.forward(x) paf_map=self.paf_block.forward(x) return conf_map,paf_map class Refinement_stage(Model): def __init__(self,n_confmaps=19,n_pafmaps=38,in_channels=19+38+1152,data_format="channels_first"): super().__init__() self.n_confmaps=n_confmaps self.n_pafmaps=n_pafmaps self.in_channels=in_channels self.data_format=data_format #conf_block self.conf_block=LayerList([ separable_block(n_filter=128,in_channels=self.in_channels,filter_size=(3,3),strides=(1,1),act=tf.nn.relu,data_format=self.data_format), separable_block(n_filter=128,in_channels=128,filter_size=(3,3),strides=(1,1),act=tf.nn.relu,data_format=self.data_format), separable_block(n_filter=128,in_channels=128,filter_size=(3,3),strides=(1,1),act=tf.nn.relu,data_format=self.data_format), separable_block(n_filter=128,in_channels=128,filter_size=(1,1),strides=(1,1),act=tf.nn.relu,data_format=self.data_format), separable_block(n_filter=self.n_confmaps,in_channels=128,filter_size=(1,1),strides=(1,1),act=None,data_format=self.data_format), ]) #paf_block self.paf_block=LayerList([ separable_block(n_filter=128,in_channels=self.in_channels,filter_size=(3,3),strides=(1,1),act=tf.nn.relu,data_format=self.data_format), separable_block(n_filter=128,in_channels=128,filter_size=(3,3),strides=(1,1),act=tf.nn.relu,data_format=self.data_format), separable_block(n_filter=128,in_channels=128,filter_size=(3,3),strides=(1,1),act=tf.nn.relu,data_format=self.data_format), separable_block(n_filter=128,in_channels=128,filter_size=(1,1),strides=(1,1),act=tf.nn.relu,data_format=self.data_format), separable_block(n_filter=self.n_pafmaps,in_channels=128,filter_size=(1,1),strides=(1,1),act=None,data_format=self.data_format), ]) def forward(self,x): conf_map=self.conf_block.forward(x) paf_map=self.paf_block.forward(x) return conf_map,paf_map def conv_block(n_filter=32,in_channels=3,filter_size=(3,3),strides=(1,1),act=tf.nn.relu,padding="SAME",data_format="channels_first"): layer_list=[] layer_list.append(Conv2d(n_filter=n_filter,in_channels=in_channels,filter_size=filter_size,strides=strides,act=act,\ W_init=initial_w,b_init=initial_b,data_format=data_format,padding=padding)) layer_list.append(BatchNorm2d(num_features=n_filter,decay=0.999,is_train=True,act=act,data_format=data_format)) return LayerList(layer_list) def separable_block(n_filter=32,in_channels=3,filter_size=(3,3),strides=(1,1),dilation_rate=(1,1),act=tf.nn.relu,data_format="channels_first"): layer_list=[] layer_list.append(DepthwiseConv2d(filter_size=filter_size,strides=strides,in_channels=in_channels, dilation_rate=dilation_rate,W_init=initial_w,b_init=None,data_format=data_format)) layer_list.append(BatchNorm2d(decay=0.99,act=act,num_features=in_channels,data_format=data_format,is_train=True)) layer_list.append(Conv2d(n_filter=n_filter,filter_size=(1,1),strides=(1,1),in_channels=in_channels,W_init=initial_w,b_init=None,data_format=data_format)) layer_list.append(BatchNorm2d(decay=0.99,act=act,num_features=n_filter,data_format=data_format,is_train=True)) return layers.LayerList(layer_list)
self.concat_dim=1
slice.js
/* Slice * significa fatiar ou dividir * Extrai uma parte do array sem alterar o array original * Sintaxe deste modelo é: arr.slice([begin[, end]]) # Possui um primeiro parametro # o segundo parametro é o indice final # Retorna todos os elementos entre o elemento de indice "begin" e o elemento anterior "end" */
console.log(spaceshipNames) console.log(extractedNames)
let spaceshipNames = ["Elemental", "Darvin"," Artemis", "Supernova"] let extractedNames = spaceshipNames.slice(1,3) // pega apenas o array posição 1 e 2 (Darvin e Artemis) nao pega array 3 pois pega apenas o anterior
movement_handler.rs
pub use super::layer_panel::*; use super::LayerData; use crate::message_prelude::*; use crate::{ consts::{VIEWPORT_SCROLL_RATE, VIEWPORT_ZOOM_LEVELS, VIEWPORT_ZOOM_MOUSE_RATE, VIEWPORT_ZOOM_SCALE_MAX, VIEWPORT_ZOOM_SCALE_MIN, VIEWPORT_ZOOM_WHEEL_RATE}, input::{mouse::ViewportBounds, mouse::ViewportPosition, InputPreprocessor}, }; use glam::DVec2; use graphene::document::Document; use graphene::Operation as DocumentOperation; use std::collections::VecDeque; #[impl_message(Message, DocumentMessage, Movement)] #[derive(PartialEq, Clone, Debug)] pub enum MovementMessage { MouseMove, TranslateCanvasBegin, WheelCanvasTranslate { use_y_as_x: bool }, RotateCanvasBegin { snap: bool }, EnableSnapping, DisableSnapping, ZoomCanvasBegin, TranslateCanvasEnd, SetCanvasRotation(f64), SetCanvasZoom(f64), IncreaseCanvasZoom, DecreaseCanvasZoom, WheelCanvasZoom, ZoomCanvasToFitAll, TranslateCanvas(DVec2), TranslateCanvasByViewportFraction(DVec2), } #[derive(Debug, Clone, Default, PartialEq)] pub struct MovementMessageHandler { translating: bool, rotating: bool, zooming: bool, snapping: bool, mouse_pos: ViewportPosition, } impl MovementMessageHandler { fn create_document_transform_from_layerdata(&self, layerdata: &LayerData, viewport_bounds: &ViewportBounds, responses: &mut VecDeque<Message>) { let half_viewport = viewport_bounds.size() / 2.; let scaled_half_viewport = half_viewport / layerdata.scale; responses.push_back( DocumentOperation::SetLayerTransform { path: vec![], transform: layerdata.calculate_offset_transform(scaled_half_viewport).to_cols_array(), } .into(), ); } } impl MessageHandler<MovementMessage, (&mut LayerData, &Document, &InputPreprocessor)> for MovementMessageHandler { fn process_action(&mut self, message: MovementMessage, data: (&mut LayerData, &Document, &InputPreprocessor), responses: &mut VecDeque<Message>) { let (layerdata, document, ipp) = data; use MovementMessage::*; match message { TranslateCanvasBegin => { self.translating = true; self.mouse_pos = ipp.mouse.position; } RotateCanvasBegin { snap } => { self.rotating = true; self.snapping = snap; layerdata.snap_rotate = snap; self.mouse_pos = ipp.mouse.position; } EnableSnapping => self.snapping = true, DisableSnapping => self.snapping = false, ZoomCanvasBegin => { self.zooming = true; self.mouse_pos = ipp.mouse.position; } TranslateCanvasEnd => { layerdata.rotation = layerdata.snapped_angle(); layerdata.snap_rotate = false; self.translating = false; self.rotating = false; self.zooming = false; } MouseMove => { if self.translating { let delta = ipp.mouse.position - self.mouse_pos; let transformed_delta = document.root.transform.inverse().transform_vector2(delta); layerdata.translation += transformed_delta; self.create_document_transform_from_layerdata(layerdata, &ipp.viewport_bounds, responses); } if self.rotating { let half_viewport = ipp.viewport_bounds.size() / 2.; let rotation = { let start_vec = self.mouse_pos - half_viewport; let end_vec = ipp.mouse.position - half_viewport; start_vec.angle_between(end_vec) }; let snapping = self.snapping; layerdata.rotation += rotation; layerdata.snap_rotate = snapping; responses.push_back( FrontendMessage::SetCanvasRotation { new_radians: layerdata.snapped_angle(), } .into(), ); self.create_document_transform_from_layerdata(layerdata, &ipp.viewport_bounds, responses); } if self.zooming { let difference = self.mouse_pos.y as f64 - ipp.mouse.position.y as f64; let amount = 1. + difference * VIEWPORT_ZOOM_MOUSE_RATE; let new = (layerdata.scale * amount).clamp(VIEWPORT_ZOOM_SCALE_MIN, VIEWPORT_ZOOM_SCALE_MAX); layerdata.scale = new; responses.push_back(FrontendMessage::SetCanvasZoom { new_zoom: layerdata.scale }.into()); self.create_document_transform_from_layerdata(layerdata, &ipp.viewport_bounds, responses); } self.mouse_pos = ipp.mouse.position; } SetCanvasZoom(new) => { layerdata.scale = new.clamp(VIEWPORT_ZOOM_SCALE_MIN, VIEWPORT_ZOOM_SCALE_MAX); responses.push_back(FrontendMessage::SetCanvasZoom { new_zoom: layerdata.scale }.into()); self.create_document_transform_from_layerdata(layerdata, &ipp.viewport_bounds, responses); } IncreaseCanvasZoom => { layerdata.scale = *VIEWPORT_ZOOM_LEVELS.iter().find(|scale| **scale > layerdata.scale).unwrap_or(&layerdata.scale); responses.push_back(FrontendMessage::SetCanvasZoom { new_zoom: layerdata.scale }.into()); self.create_document_transform_from_layerdata(layerdata, &ipp.viewport_bounds, responses); } DecreaseCanvasZoom => { layerdata.scale = *VIEWPORT_ZOOM_LEVELS.iter().rev().find(|scale| **scale < layerdata.scale).unwrap_or(&layerdata.scale); responses.push_back(FrontendMessage::SetCanvasZoom { new_zoom: layerdata.scale }.into()); self.create_document_transform_from_layerdata(layerdata, &ipp.viewport_bounds, responses); } WheelCanvasZoom => { let scroll = ipp.mouse.scroll_delta.scroll_delta(); let mouse = ipp.mouse.position; let viewport_bounds = ipp.viewport_bounds.size(); let mut zoom_factor = 1. + scroll.abs() * VIEWPORT_ZOOM_WHEEL_RATE; if ipp.mouse.scroll_delta.y > 0 { zoom_factor = 1. / zoom_factor }; let new_viewport_bounds = viewport_bounds * (1. / zoom_factor); let delta_size = viewport_bounds - new_viewport_bounds; let mouse_percent = mouse / viewport_bounds; let delta = (delta_size * -2.) * (mouse_percent - DVec2::splat(0.5)); let transformed_delta = document.root.transform.inverse().transform_vector2(delta); let new = (layerdata.scale * zoom_factor).clamp(VIEWPORT_ZOOM_SCALE_MIN, VIEWPORT_ZOOM_SCALE_MAX); layerdata.scale = new; layerdata.translation += transformed_delta; responses.push_back(FrontendMessage::SetCanvasZoom { new_zoom: layerdata.scale }.into()); self.create_document_transform_from_layerdata(layerdata, &ipp.viewport_bounds, responses); } WheelCanvasTranslate { use_y_as_x } => { let delta = match use_y_as_x { false => -ipp.mouse.scroll_delta.as_dvec2(), true => (-ipp.mouse.scroll_delta.y as f64, 0.).into(), } * VIEWPORT_SCROLL_RATE; let transformed_delta = document.root.transform.inverse().transform_vector2(delta); layerdata.translation += transformed_delta; self.create_document_transform_from_layerdata(layerdata, &ipp.viewport_bounds, responses); } SetCanvasRotation(new) => { layerdata.rotation = new; self.create_document_transform_from_layerdata(layerdata, &ipp.viewport_bounds, responses); responses.push_back(FrontendMessage::SetCanvasRotation { new_radians: new }.into()); } ZoomCanvasToFitAll => { if let Some([pos1, pos2]) = document.visible_layers_bounding_box() { let pos1 = document.root.transform.inverse().transform_point2(pos1); let pos2 = document.root.transform.inverse().transform_point2(pos2); let v1 = document.root.transform.inverse().transform_point2(DVec2::ZERO); let v2 = document.root.transform.inverse().transform_point2(ipp.viewport_bounds.size()); let center = v1.lerp(v2, 0.5) - pos1.lerp(pos2, 0.5); let size = (pos2 - pos1) / (v2 - v1); let size = 1. / size; let new_scale = size.min_element(); layerdata.translation += center; layerdata.scale *= new_scale; responses.push_back(FrontendMessage::SetCanvasZoom { new_zoom: layerdata.scale }.into()); self.create_document_transform_from_layerdata(layerdata, &ipp.viewport_bounds, responses); } } TranslateCanvas(delta) => { let transformed_delta = document.root.transform.inverse().transform_vector2(delta); layerdata.translation += transformed_delta; self.create_document_transform_from_layerdata(layerdata, &ipp.viewport_bounds, responses); } TranslateCanvasByViewportFraction(delta) => { let transformed_delta = document.root.transform.inverse().transform_vector2(delta * ipp.viewport_bounds.size()); layerdata.translation += transformed_delta; self.create_document_transform_from_layerdata(layerdata, &ipp.viewport_bounds, responses); } } } fn
(&self) -> ActionList { let mut common = actions!(MovementMessageDiscriminant; MouseMove, TranslateCanvasEnd, TranslateCanvasBegin, RotateCanvasBegin, ZoomCanvasBegin, SetCanvasZoom, SetCanvasRotation, WheelCanvasZoom, IncreaseCanvasZoom, DecreaseCanvasZoom, WheelCanvasTranslate, ZoomCanvasToFitAll, TranslateCanvas, TranslateCanvasByViewportFraction, ); if self.rotating { let snapping = actions!(MovementMessageDiscriminant; EnableSnapping, DisableSnapping, ); common.extend(snapping); } common } }
actions
testme.py
import sys sys.path.insert(0, '..') from cde_test_common import * def checker_func():
generic_test_runner(["python", "chdir_relpath_test.py"], checker_func)
pass
models.py
from keras.applications.resnet50 import ResNet50 from keras.applications.vgg16 import VGG16 from keras.layers import Flatten, Dropout, Lambda, GlobalAveragePooling2D, merge, Input, Dense from keras.models import Model import keras.backend as K #from keras.utils.visualize_util import plot #from SpatialPyramidPooling import SpatialPyramidPooling def l2_normalize(x):
def l2_normalize_output_shape(input_shape): return input_shape def squared_root_normalization(x): """ Squared root normalization for convolution layers` output first apply global average pooling followed by squared root on all elements then l2 normalize the vector :param x: input tensor, output of convolution layer :return: """ x = GlobalAveragePooling2D()(x) #output shape = (None, nc) # x = K.sqrt(x) #x = K.l2_normalize(x, axis=0) return x def squared_root_normalization_output_shape(input_shape): """ Return the output shape for squared root normalization layer for any given input size of the convolution filter :param input_shape: shape of the input :return: output shape """ return (input_shape[0], input_shape[-1]) def model1(weights_path=None): ''' Basic ResNet-FT for baseline comparisions. Creates a model by for all aesthetic attributes along with overall aesthetic score, by finetuning resnet50 :param weights_path: path of the weight file :return: Keras model instance ''' _input = Input(shape=(299, 299, 3)) resnet = ResNet50(include_top=False, weights='imagenet', input_tensor=_input) last_layer_output = GlobalAveragePooling2D()(resnet.get_layer('activation_49').output) # output of model outputs = [] attrs = ['BalacingElements', 'ColorHarmony', 'Content', 'DoF', 'Light', 'MotionBlur', 'Object', 'RuleOfThirds', 'VividColor'] for attribute in attrs: outputs.append(Dense(1, init='glorot_uniform', activation='tanh', name=attribute)(last_layer_output)) non_negative_attrs = ['Repetition', 'Symmetry', 'score'] for attribute in non_negative_attrs: outputs.append(Dense(1, init='glorot_uniform', activation='sigmoid', name=attribute)(last_layer_output)) model = Model(input=_input, output=outputs) if weights_path: model.load_weights(weights_path) return model def model2(weights_path=None): ''' Creates a model by concatenating the features from lower layers with high level convolution features for all aesthetic attributes along with overall aesthetic score :param weights_path: path of the weight file :return: Keras model instance This is the model used in the paper ''' _input = Input(shape=(299, 299, 3)) resnet = ResNet50(include_top=False, weights='imagenet', input_tensor=_input) activation_layers = [] layers = resnet.layers for layer in layers: # print layer.name, layer.input_shape, layer.output_shape if 'activation' in layer.name: activation_layers.append(layer) activations = 0 activation_plus_squared_outputs = [] # Remove last activation layer so # it can be used with spatial pooling layer if required nlayers = len(activation_layers) - 1 for i in range(1, nlayers): layer = activation_layers[i] if layer.output_shape[-1] > activation_layers[i - 1].output_shape[-1]: # print layer.name, layer.input_shape, layer.output_shape activations += layer.output_shape[-1] _out = Lambda(squared_root_normalization, output_shape=squared_root_normalization_output_shape, name=layer.name + '_normalized')(layer.output) activation_plus_squared_outputs.append(_out) # print "sum of all activations should be {}".format(activations) last_layer_output = GlobalAveragePooling2D()(activation_layers[-1].output) # last_layer_output = Lambda(K.sqrt, output_shape=squared_root_normalization_output_shape)(last_layer_output) last_layer_output = Lambda(l2_normalize, output_shape=l2_normalize_output_shape, name=activation_layers[-1].name+'_normalized')(last_layer_output) activation_plus_squared_outputs.append(last_layer_output) merged = merge(activation_plus_squared_outputs, mode='concat', concat_axis=1) merged = Lambda(l2_normalize, output_shape=l2_normalize_output_shape, name='merge')(merged) # output of model outputs = [] attrs = ['BalacingElements', 'ColorHarmony', 'Content', 'DoF', 'Light', 'MotionBlur', 'Object', 'RuleOfThirds', 'VividColor'] for attribute in attrs: outputs.append(Dense(1, init='glorot_uniform', activation='tanh', name=attribute)(merged)) non_negative_attrs = ['Repetition', 'Symmetry', 'score'] for attribute in non_negative_attrs: outputs.append(Dense(1, init='glorot_uniform', activation='sigmoid', name=attribute)(merged)) model = Model(input=_input, output=outputs) if weights_path: model.load_weights(weights_path) return model if __name__ == '__main__': model = model2() model.summary() # plot(model, to_file='model2.png', show_shapes=True)
return K.l2_normalize(x, 0)
option.rs
//! Contains the ffi-safe equivalent of `std::option::Option`. use std::{mem, ops::Deref}; use core_extensions::matches; use serde::{Deserialize, Deserializer, Serialize, Serializer}; /// Ffi-safe equivalent of the `std::option::Option` type. /// /// `Option` is also ffi-safe for NonNull/NonZero types, and references. /// #[derive(Debug, Copy, Clone, PartialEq, Eq, Ord, PartialOrd, Hash)] #[repr(u8)] #[derive(StableAbi)] // #[sabi(debug_print)] pub enum ROption<T> { RSome(T), RNone, } pub use self::ROption::*; impl<T> ROption<T> { /// Converts from `ROption<T>` to `ROption<&T>`. /// /// # Example /// /// ``` /// # use abi_stable::std_types::*; /// /// assert_eq!(RSome(10).as_ref(), RSome(&10)); /// assert_eq!(RNone::<u32>.as_ref(), RNone); /// /// ``` #[inline] pub fn as_ref(&self) -> ROption<&T> { match self { RSome(v) => RSome(v), RNone => RNone, } } /// Converts from `ROption<T>` to `ROption<&mut T>`. /// /// # Example /// /// ``` /// # use abi_stable::std_types::*; /// /// assert_eq!(RSome(10).as_mut(), RSome(&mut 10)); /// assert_eq!(RNone::<u32>.as_mut(), RNone); /// /// ``` #[inline] pub fn as_mut(&mut self) -> ROption<&mut T> { match self { RSome(v) => RSome(v), RNone => RNone, } } /// Returns whether `self` is an `RSome` /// /// # Example /// /// ``` /// # use abi_stable::std_types::*; /// /// assert_eq!(RSome(10).is_rsome(), true); /// assert_eq!(RNone::<u32>.is_rsome(), false); /// /// ``` #[inline] pub fn is_rsome(&self) -> bool { matches!(self, RSome { .. }) } /// Returns whether `self` is an `RNone` /// /// # Example /// /// ``` /// # use abi_stable::std_types::*; /// /// assert_eq!(RSome(10).is_rnone(), false); /// assert_eq!(RNone::<u32>.is_rnone(), true); /// /// ``` #[inline] pub fn is_rnone(&self) -> bool { matches!(self, RNone { .. }) } /// Returns whether `self` is an `RSome` /// /// # Example /// /// ``` /// # use abi_stable::std_types::*; /// /// assert_eq!(RSome(10).is_some(), true); /// assert_eq!(RNone::<u32>.is_some(), false); /// /// ``` #[inline] pub fn is_some(&self) -> bool { matches!(self, RSome { .. }) } /// Returns whether `self` is an `RNone` /// /// # Example /// /// ``` /// # use abi_stable::std_types::*; /// /// assert_eq!(RSome(10).is_none(), false); /// assert_eq!(RNone::<u32>.is_none(), true); /// /// ``` #[inline] pub fn is_none(&self) -> bool { matches!(self, RNone { .. }) } /// Converts from `ROption<T>` to `Option<T>`. /// /// # Example /// /// ``` /// # use abi_stable::std_types::*; /// /// assert_eq!(RSome(10).into_option(), Some(10)); /// assert_eq!(RNone::<u32>.into_option(), None); /// /// ``` #[inline] pub fn into_option(self) -> Option<T> { self.into() } /// Unwraps the `ROption<T>`, returning its contents. /// /// # Panics /// /// Panics if `self` is `RNone`, with the `msg` message. /// /// # Example /// /// ``` /// # use abi_stable::std_types::*; /// /// assert_eq!(RSome(100).expect("must contain a value"), 100); /// /// ``` /// /// This one panics: /// ```should_panic /// # use abi_stable::std_types::*; /// /// let _ = RNone::<()>.expect("Oh noooo!"); /// ``` #[inline] pub fn expect(self, msg: &str) -> T { self.into_option().expect(msg) } /// Unwraps the ROption, returning its contents. /// /// # Panics /// /// Panics if `self` is `RNone`. /// /// # Example /// /// ``` /// # use abi_stable::std_types::*; /// /// assert_eq!(RSome(500).unwrap(), 500); /// /// ``` /// /// This one panics: /// ```should_panic /// # use abi_stable::std_types::*; /// /// let _ = RNone::<()>.unwrap(); /// ``` #[inline] pub fn unwrap(self) -> T { self.into_option().unwrap() } /// Returns the value in the `ROption<T>`, or `def` if `self` is `RNone`. /// /// # Example /// /// ``` /// # use abi_stable::std_types::*; /// /// assert_eq!(RSome(10).unwrap_or(99), 10); /// assert_eq!(RNone::<u32>.unwrap_or(99), 99); /// /// ``` #[inline] pub fn unwrap_or(self, def: T) -> T { match self { RSome(x) => x, RNone => def, } } /// Returns the value in the `ROption<T>`, or `T::default()` if `self` is `RNone`. /// /// # Example /// /// ``` /// # use abi_stable::std_types::*; /// /// assert_eq!(RSome(10).unwrap_or_default(), 10); /// assert_eq!(RNone::<u32>.unwrap_or_default(), 0); /// /// ``` #[inline] pub fn unwrap_or_default(self) -> T where T: Default, { match self { RSome(x) => x, RNone => Default::default(), } } /// Returns the value in the `ROption<T>`, /// or the return value of calling `f` if `self` is `RNone`. /// /// # Example /// /// ``` /// # use abi_stable::std_types::*; /// /// assert_eq!(RSome(10).unwrap_or_else(|| 77), 10); /// assert_eq!(RNone::<u32>.unwrap_or_else(|| 77), 77); /// /// ``` #[inline] pub fn unwrap_or_else<F>(self, f: F) -> T where F: FnOnce() -> T, { match self { RSome(x) => x, RNone => f(), } } /// Converts the `ROption<T>` to a `ROption<U>`, /// transforming the contained value with the `f` closure. /// /// # Example /// /// ``` /// # use abi_stable::std_types::*; /// /// assert_eq!(RSome(10).map(|x| x * 2), RSome(20)); /// assert_eq!(RNone::<u32>.map(|x| x * 2), RNone); /// /// ``` #[inline] pub fn map<U, F>(self, f: F) -> ROption<U> where F: FnOnce(T) -> U, { match self { RSome(x) => RSome(f(x)), RNone => RNone, } } /// Transforms (and returns) the contained value with the `f` closure, /// or returns `default` if `self` is `RNone`. /// /// # Example /// /// ``` /// # use abi_stable::std_types::*; /// /// assert_eq!(RSome(10).map_or(77, |x| x * 2), 20); /// assert_eq!(RNone::<u32>.map_or(77, |x| x * 2), 77); /// /// ``` #[inline] pub fn
<U, F>(self, default: U, f: F) -> U where F: FnOnce(T) -> U, { match self { RSome(t) => f(t), RNone => default, } } /// Transforms (and returns) the contained value with the `f` closure, /// or returns `otherwise()` if `self` is `RNone`.. /// /// # Example /// /// ``` /// # use abi_stable::std_types::*; /// /// assert_eq!(RSome(10).map_or_else(|| 77, |x| x * 2), 20); /// assert_eq!(RNone::<u32>.map_or_else(|| 77, |x| x * 2), 77); /// /// ``` #[inline] pub fn map_or_else<U, D, F>(self, otherwise: D, f: F) -> U where D: FnOnce() -> U, F: FnOnce(T) -> U, { match self { RSome(t) => f(t), RNone => otherwise(), } } /// Returns `self` if `predicate(&self)` is true, otherwise returns `RNone`. /// /// # Example /// /// ``` /// # use abi_stable::std_types::*; /// /// assert_eq!(RSome(10).filter(|x| (x % 2) == 0), RSome(10)); /// assert_eq!(RSome(10).filter(|x| (x % 2) == 1), RNone); /// assert_eq!(RNone::<u32>.filter(|_| true), RNone); /// assert_eq!(RNone::<u32>.filter(|_| false), RNone); /// /// ``` pub fn filter<P>(self, predicate: P) -> Self where P: FnOnce(&T) -> bool, { if let RSome(x) = self { if predicate(&x) { return RSome(x); } } RNone } /// Returns `self` if it is `RNone`, otherwise returns `optb`. /// /// # Example /// /// ``` /// # use abi_stable::std_types::*; /// /// assert_eq!(RSome(10).and(RSome(20)), RSome(20)); /// assert_eq!(RSome(10).and(RNone), RNone); /// assert_eq!(RNone::<u32>.and(RSome(20)), RNone); /// assert_eq!(RNone::<u32>.and(RNone), RNone); /// /// ``` #[inline] pub fn and(self, optb: ROption<T>) -> ROption<T> { match self { RSome(_) => optb, RNone => self, } } /// Returns `self` if it is `RNone`, /// otherwise returns the result of calling `f` with the value in `RSome`. /// /// # Example /// /// ``` /// # use abi_stable::std_types::*; /// /// assert_eq!(RSome(10).and_then(|x| RSome(x * 2)), RSome(20)); /// assert_eq!(RSome(10).and_then(|_| RNone::<u32>), RNone); /// assert_eq!(RNone::<u32>.and_then(|x| RSome(x * 2)), RNone); /// assert_eq!(RNone::<u32>.and_then(|_| RNone::<u32>), RNone); /// /// ``` #[inline] pub fn and_then<F, U>(self, f: F) -> ROption<U> where F: FnOnce(T) -> ROption<U>, { match self { RSome(x) => f(x), RNone => RNone, } } /// Returns `self` if it contains a value, otherwise returns `optb`. /// /// # Example /// /// ``` /// # use abi_stable::std_types::*; /// /// assert_eq!(RSome(10).or(RSome(20)), RSome(10)); /// assert_eq!(RSome(10).or(RNone ), RSome(10)); /// assert_eq!(RNone::<u32>.or(RSome(20)), RSome(20)); /// assert_eq!(RNone::<u32>.or(RNone ), RNone); /// /// ``` #[inline] pub fn or(self, optb: ROption<T>) -> ROption<T> { match self { RSome(_) => self, RNone => optb, } } /// Returns `self` if it contains a value, /// otherwise calls `optb` and returns the value it evaluates to. /// /// # Example /// /// ``` /// # use abi_stable::std_types::*; /// /// assert_eq!(RSome(10).or_else(|| RSome(20)), RSome(10)); /// assert_eq!(RSome(10).or_else(|| RNone), RSome(10)); /// assert_eq!(RNone::<u32>.or_else(|| RSome(20)), RSome(20)); /// assert_eq!(RNone::<u32>.or_else(|| RNone), RNone); /// /// ``` #[inline] pub fn or_else<F>(self, f: F) -> ROption<T> where F: FnOnce() -> ROption<T>, { match self { RSome(_) => self, RNone => f(), } } /// Returns `RNone` if both values are `RNone` or `RSome`, /// otherwise returns the value that is an`RSome`. /// /// # Example /// /// ``` /// # use abi_stable::std_types::*; /// /// assert_eq!(RSome(10).xor(RSome(20)), RNone); /// assert_eq!(RSome(10).xor(RNone), RSome(10)); /// assert_eq!(RNone::<u32>.xor(RSome(20)), RSome(20)); /// assert_eq!(RNone::<u32>.xor(RNone), RNone); /// /// ``` #[inline] pub fn xor(self, optb: ROption<T>) -> ROption<T> { match (self, optb) { (RSome(a), RNone) => RSome(a), (RNone, RSome(b)) => RSome(b), _ => RNone, } } /// Sets this ROption to `RSome(value)` if it was `RNone`. /// Returns a mutable reference to the inserted/pre-existing `RSome`. /// /// # Example /// /// ``` /// # use abi_stable::std_types::*; /// /// assert_eq!(RSome(10).get_or_insert(40), &mut 10); /// assert_eq!(RSome(20).get_or_insert(55), &mut 20); /// assert_eq!(RNone::<u32>.get_or_insert(77), &mut 77); /// /// ``` #[inline] pub fn get_or_insert(&mut self, value: T) -> &mut T { if self.is_rnone() { *self = RSome(value); } match *self { RSome(ref mut v) => v, RNone => unreachable!(), } } /// Sets this `ROption` to `RSome(func())` if it was `RNone`. /// Returns a mutable reference to the inserted/pre-existing `RSome`. /// /// # Example /// /// ``` /// # use abi_stable::std_types::*; /// /// assert_eq!(RSome(10).get_or_insert_with(|| 40), &mut 10); /// assert_eq!(RSome(20).get_or_insert_with(|| 55), &mut 20); /// assert_eq!(RNone::<u32>.get_or_insert_with(|| 77), &mut 77); /// /// ``` #[inline] pub fn get_or_insert_with<F>(&mut self, func: F) -> &mut T where F: FnOnce() -> T, { if self.is_rnone() { *self = RSome(func()); } match *self { RSome(ref mut v) => v, RNone => unreachable!(), } } /// Takes the value of `self`, replacing it with `RNone` /// /// # Example /// /// ``` /// # use abi_stable::std_types::*; /// /// let mut opt0 = RSome(10); /// assert_eq!(opt0.take(), RSome(10)); /// assert_eq!(opt0, RNone); /// /// let mut opt1 = RSome(20); /// assert_eq!(opt1.take(), RSome(20)); /// assert_eq!(opt1, RNone); /// /// let mut opt2 = RNone::<u32>; /// assert_eq!(opt2.take(), RNone); /// assert_eq!(opt2, RNone); /// /// ``` #[inline] pub fn take(&mut self) -> ROption<T> { mem::replace(self, RNone) } /// Replaces the value of `self` with `RSome(value)`. /// /// # Example /// /// ``` /// # use abi_stable::std_types::*; /// /// let mut opt0 = RSome(10); /// assert_eq!(opt0.replace(55), RSome(10)); /// assert_eq!(opt0, RSome(55)); /// /// let mut opt1 = RSome(20); /// assert_eq!(opt1.replace(88), RSome(20)); /// assert_eq!(opt1, RSome(88)); /// /// let mut opt2 = RNone::<u32>; /// assert_eq!(opt2.replace(33), RNone); /// assert_eq!(opt2, RSome(33)); /// /// ``` #[inline] pub fn replace(&mut self, value: T) -> ROption<T> { mem::replace(self, RSome(value)) } } impl<T> ROption<&T> { /// Converts an `ROption<&T>` to an `ROption<T>` by cloning its contents. /// /// # Example /// /// ``` /// # use abi_stable::std_types::*; /// /// assert_eq!(RSome(&vec![()]).cloned(), RSome(vec![()])); /// assert_eq!(RNone::<&Vec<()>>.cloned(), RNone); /// /// ``` #[inline] pub fn cloned(self) -> ROption<T> where T: Clone, { match self { RSome(expr) => RSome(expr.clone()), RNone => RNone, } } /// Converts an `ROption<&T>` to an `ROption<T>` by Copy-ing its contents. /// /// # Example /// /// ``` /// # use abi_stable::std_types::*; /// /// assert_eq!(RSome(&7).copied(), RSome(7)); /// assert_eq!(RNone::<&u32>.copied(), RNone); /// /// ``` #[inline] pub fn copied(self) -> ROption<T> where T: Copy, { match self { RSome(expr) => RSome(*expr), RNone => RNone, } } } impl<T> ROption<&mut T> { /// Converts an `ROption<&mut T>` to a `ROption<T>` by cloning its contents. /// /// # Example /// /// ``` /// # use abi_stable::std_types::*; /// /// assert_eq!(RSome(&mut vec![()]).cloned(), RSome(vec![()])); /// assert_eq!(RNone::<&mut Vec<()>>.cloned(), RNone); /// /// ``` #[inline] pub fn cloned(self) -> ROption<T> where T: Clone, { match self { RSome(expr) => RSome(expr.clone()), RNone => RNone, } } /// Converts an `ROption<&mut T>` to a `ROption<T>` by Copy-ing its contents. /// /// # Example /// /// ``` /// # use abi_stable::std_types::*; /// /// assert_eq!(RSome(&mut 7).copied(), RSome(7)); /// assert_eq!(RNone::<&mut u32>.copied(), RNone); /// /// ``` #[inline] pub fn copied(self) -> ROption<T> where T: Copy, { match self { RSome(expr) => RSome(*expr), RNone => RNone, } } } impl<T: Deref> ROption<T> { /// Converts from `ROption<T>` (or `&ROption<T>`) to `ROption<&T::Target>`. /// /// Leaves the original ROption in-place, creating a new one with a /// reference to the original one, additionally coercing the contents via /// [`Deref`]. /// /// # Examples /// /// ``` /// # use abi_stable::std_types::*; /// /// let x: ROption<RString> = RSome(RString::from("hey")); /// assert_eq!(x.as_deref(), RSome("hey")); /// /// let x: ROption<RString> = RNone; /// assert_eq!(x.as_deref(), RNone); /// ``` pub fn as_deref(&self) -> ROption<&T::Target> { self.as_ref().map(|t| t.deref()) } } /// The default value is `RNone`. impl<T> Default for ROption<T> { fn default() -> Self { RNone } } impl_from_rust_repr! { impl[T] From<Option<T>> for ROption<T> { fn(this){ match this { Some(v) => RSome(v), None => RNone, } } } } impl_into_rust_repr! { impl[T] Into<Option<T>> for ROption<T> { fn(this){ match this { RSome(v) => Some(v), RNone => None, } } } } impl<'de, T> Deserialize<'de> for ROption<T> where T: Deserialize<'de>, { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { Option::deserialize(deserializer).map(Self::from) } } impl<T> Serialize for ROption<T> where T: Serialize, { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { self.as_ref().into_option().serialize(serializer) } } ///////////////////////////////////////////////////////////////////// #[cfg(all(test, not(feature = "only_new_tests")))] // #[cfg(test)] mod test { use super::*; #[test] fn from_into() { assert_eq!(ROption::from(Some(10)), RSome(10)); assert_eq!(ROption::from(None::<u32>), RNone); assert_eq!(RSome(10).into_option(), Some(10)); assert_eq!(RNone::<u32>.into_option(), None); } }
map_or
expand_plan.go
// Copyright 2016 The Cockroach Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or // implied. See the License for the specific language governing // permissions and limitations under the License. package sql import ( "context" "fmt" "math" "github.com/cockroachdb/cockroach/pkg/sql/sem/tree" "github.com/cockroachdb/cockroach/pkg/sql/sqlbase" "github.com/cockroachdb/cockroach/pkg/util" ) // expandPlan finalizes type checking of placeholders and expands // the query plan to its final form, including index selection and // expansion of sub-queries. Returns an error if the initialization // fails. func (p *planner) expandPlan(ctx context.Context, plan planNode) (planNode, error) { var err error topParams := noParamsBase topParams.atTop = true plan, err = doExpandPlan(ctx, p, topParams, plan) if err != nil { return plan, err } plan = p.simplifyOrderings(plan, nil) if p.autoCommit { if ac, ok := plan.(autoCommitNode); ok { ac.enableAutoCommit() } }
// a given level to the levels under it (upstream). type expandParameters struct { numRowsHint int64 desiredOrdering sqlbase.ColumnOrdering // spooledResults is set to true if one of the parents of the // current plan either already provides spooling (e.g. upsertNode) // or has required spooling (which means doExpandPlan will // eventually add a spool). This is used to elide the insertion of a // spool. spooledResults bool // atTop is set to true on the top-level call to doExpandPlan. Further // recursive call set it to false. Used to elide the insertion of a spool // for top-level nodes. atTop bool } var noParamsBase = expandParameters{numRowsHint: math.MaxInt64, desiredOrdering: nil} // doExpandPlan is the algorithm that supports expandPlan(). func doExpandPlan( ctx context.Context, p *planner, params expandParameters, plan planNode, ) (planNode, error) { // atTop remembers we're at the top level. atTop := params.atTop // needSpool will indicate at the end of the recursion whether // a new spool stage is needed. needSpool := false // Determine what to do. if _, ok := plan.(planNodeRequireSpool); ok { // parentSpooled indicates that a parent node has already // established the results will be spooled (i.e. accumulated at the // start of execution). parentSpooled := params.spooledResults // At the top level, we ignore the spool requirement. If a parent // is already spooled, we don't need to add a spool. if !params.atTop && !parentSpooled { // If the node requires a spool but we are already spooled, we // won't need a new spool. needSpool = true // Although we're not spooled yet, needSpool will ensure we // become spooled. Tell this to the children nodes. params.spooledResults = true } } else if _, ok := plan.(planNodeSpooled); ok { // Propagate this knowledge to the children nodes. params.spooledResults = true } params.atTop = false // Every recursion using noParams still wants to know about the // current spooling status. noParams := noParamsBase noParams.spooledResults = params.spooledResults var err error switch n := plan.(type) { case *createTableNode: n.sourcePlan, err = doExpandPlan(ctx, p, noParams, n.sourcePlan) case *updateNode: n.source, err = doExpandPlan(ctx, p, noParams, n.source) case *insertNode: n.source, err = doExpandPlan(ctx, p, noParams, n.source) case *upsertNode: n.source, err = doExpandPlan(ctx, p, noParams, n.source) case *deleteNode: n.source, err = doExpandPlan(ctx, p, noParams, n.source) case *rowCountNode: var newPlan planNode newPlan, err = doExpandPlan(ctx, p, noParams, n.source) n.source = newPlan.(batchedPlanNode) case *serializeNode: var newPlan planNode newPlan, err = doExpandPlan(ctx, p, noParams, n.source) n.source = newPlan.(batchedPlanNode) case *explainDistSQLNode: // EXPLAIN only shows the structure of the plan, and wants to do // so "as if" plan was at the top level w.r.t spool semantics. explainParams := noParamsBase explainParams.atTop = true n.plan, err = doExpandPlan(ctx, p, explainParams, n.plan) case *showTraceReplicaNode: n.plan, err = doExpandPlan(ctx, p, noParams, n.plan) case *explainPlanNode: // EXPLAIN only shows the structure of the plan, and wants to do // so "as if" plan was at the top level w.r.t spool semantics. explainParams := noParamsBase explainParams.atTop = true if n.expanded { n.plan, err = doExpandPlan(ctx, p, explainParams, n.plan) if err != nil { return plan, err } // Trigger limit hint propagation, which would otherwise only occur // during the plan's Start() phase. This may trigger additional // optimizations (eg. in sortNode) which the user of EXPLAIN will be // interested in. p.setUnlimited(n.plan) } case *indexJoinNode: // We ignore the return value because we know the scanNode is preserved. _, err = doExpandPlan(ctx, p, params, n.index) if err != nil { return plan, err } // The row limit and desired ordering, if any, only propagates on // the index side. _, err = doExpandPlan(ctx, p, noParams, n.table) case *unionNode: n.right, err = doExpandPlan(ctx, p, params, n.right) if err != nil { return plan, err } n.left, err = doExpandPlan(ctx, p, params, n.left) case *filterNode: plan, err = expandFilterNode(ctx, p, params, n) case *joinNode: n.left.plan, err = doExpandPlan(ctx, p, noParams, n.left.plan) if err != nil { return plan, err } n.right.plan, err = doExpandPlan(ctx, p, noParams, n.right.plan) if err != nil { return plan, err } n.mergeJoinOrdering = computeMergeJoinOrdering( planPhysicalProps(n.left.plan), planPhysicalProps(n.right.plan), n.pred.leftEqualityIndices, n.pred.rightEqualityIndices, ) n.props = n.joinOrdering() case *ordinalityNode: // There may be too many columns in the required ordering. Filter them. params.desiredOrdering = n.restrictOrdering(params.desiredOrdering) n.source, err = doExpandPlan(ctx, p, params, n.source) if err != nil { return plan, err } // The source ordering may have been updated. Update the // ordinality ordering accordingly. n.optimizeOrdering() case *limitNode: // Estimate the limit parameters. We can't full eval them just yet, // because evaluation requires running potential sub-queries, which // cannot occur during expand. n.estimateLimit() params.numRowsHint = getLimit(n.count, n.offset) n.plan, err = doExpandPlan(ctx, p, params, n.plan) case *groupNode: params.desiredOrdering = n.desiredOrdering // Under a group node, there may be arbitrarily more rows // than those required by the context. params.numRowsHint = math.MaxInt64 n.plan, err = doExpandPlan(ctx, p, params, n.plan) if len(n.desiredOrdering) > 0 { match := planPhysicalProps(n.plan).computeMatch(n.desiredOrdering) if match == len(n.desiredOrdering) { // We have a single MIN/MAX function and the underlying plan's // ordering matches the function. We only need to retrieve one row. // See desiredAggregateOrdering. n.needOnlyOneRow = true } } // Project the props of the GROUP BY columns, as they're retained as-is. groupColProjMap := make([]int, len(n.funcs)) for i := range n.funcs { if groupingCol, ok := n.aggIsGroupingColumn(i); ok { groupColProjMap[i] = groupingCol } else { groupColProjMap[i] = -1 } } childProps := planPhysicalProps(n.plan) n.props = childProps.project(groupColProjMap) // The GROUP BY columns form a weak key. var groupColSet util.FastIntSet for i, c := range groupColProjMap { if c == -1 { continue } groupColSet.Add(i) } if !groupColSet.Empty() { n.props.addWeakKey(groupColSet) } groupColProps := planPhysicalProps(n.plan) groupColProps = groupColProps.project(n.groupCols) n.orderedGroupCols = make([]int, len(groupColProps.ordering)) for i, o := range groupColProps.ordering { n.orderedGroupCols[i] = o.ColIdx } case *windowNode: n.plan, err = doExpandPlan(ctx, p, noParams, n.plan) case *sortNode: if !n.ordering.IsPrefixOf(params.desiredOrdering) { params.desiredOrdering = n.ordering } n.plan, err = doExpandPlan(ctx, p, params, n.plan) if err != nil { return plan, err } if s, ok := n.plan.(*sortNode); ok { // (... ORDER BY x) ORDER BY y -> keep the outer sort elideDoubleSort(n, s) } // Check to see if the requested ordering is compatible with the existing // ordering. match := planPhysicalProps(n.plan).computeMatch(n.ordering) n.needSort = (match < len(n.ordering)) case *distinctNode: plan, err = expandDistinctNode(ctx, p, params, n) case *scanNode: plan, err = expandScanNode(ctx, p, params, n) case *renderNode: plan, err = expandRenderNode(ctx, p, params, n) case *delayedNode: var newPlan planNode newPlan, err = n.constructor(ctx, p) if err != nil { return plan, err } newPlan, err = doExpandPlan(ctx, p, params, newPlan) if err != nil { return plan, err } plan = newPlan case *splitNode: n.rows, err = doExpandPlan(ctx, p, noParams, n.rows) case *relocateNode: n.rows, err = doExpandPlan(ctx, p, noParams, n.rows) case *cancelQueriesNode: n.rows, err = doExpandPlan(ctx, p, noParams, n.rows) case *cancelSessionsNode: n.rows, err = doExpandPlan(ctx, p, noParams, n.rows) case *controlJobsNode: n.rows, err = doExpandPlan(ctx, p, noParams, n.rows) case *projectSetNode: n.source, err = doExpandPlan(ctx, p, noParams, n.source) case *valuesNode: case *virtualTableNode: case *alterIndexNode: case *alterTableNode: case *alterSequenceNode: case *alterUserSetPasswordNode: case *renameColumnNode: case *renameDatabaseNode: case *renameIndexNode: case *renameTableNode: case *scrubNode: case *truncateNode: case *createDatabaseNode: case *createIndexNode: case *CreateUserNode: case *createViewNode: case *createSequenceNode: case *createStatsNode: case *dropDatabaseNode: case *dropIndexNode: case *dropTableNode: case *dropViewNode: case *dropSequenceNode: case *DropUserNode: case *zeroNode: case *unaryNode: case *hookFnNode: for i := range n.subplans { n.subplans[i], err = doExpandPlan(ctx, p, noParams, n.subplans[i]) if err != nil { break } } case *sequenceSelectNode: case *setVarNode: case *setClusterSettingNode: case *setZoneConfigNode: case *showZoneConfigNode: case *showRangesNode: case *showFingerprintsNode: case *showTraceNode: case *scatterNode: case nil: default: panic(fmt.Sprintf("unhandled node type: %T", plan)) } if atTop || needSpool { // Peel whatever spooling layers we have added prior to some elision above. for { if s, ok := plan.(*spoolNode); ok { plan = s.source } else { break } } } // If we need a spool, add it now. if needSpool { // The parent of this node does not provide spooling yet, but // spooling is required. Add it. plan = p.makeSpool(plan) } return plan, err } // elideDoubleSort removes the source sortNode because it is // redundant. func elideDoubleSort(parent, source *sortNode) { parent.plan = source.plan // Propagate renamed columns mutSourceCols := planMutableColumns(parent.plan) for i, col := range parent.columns { mutSourceCols[i].Name = col.Name } } func expandFilterNode( ctx context.Context, p *planner, params expandParameters, n *filterNode, ) (planNode, error) { var err error n.source.plan, err = doExpandPlan(ctx, p, params, n.source.plan) if err != nil { return n, err } // If there's a spool, pull it up. if spool, ok := n.source.plan.(*spoolNode); ok { n.source.plan = spool.source return p.makeSpool(n), nil } return n, nil } func expandDistinctNode( ctx context.Context, p *planner, params expandParameters, d *distinctNode, ) (planNode, error) { // TODO(radu/knz): perhaps we can propagate the DISTINCT // clause as desired ordering for the source node. var err error d.plan, err = doExpandPlan(ctx, p, params, d.plan) if err != nil { return d, err } // If there's a spool, we'll pull it up before returning below. respool := func(plan planNode) planNode { return plan } if spool, ok := d.plan.(*spoolNode); ok { respool = p.makeSpool d.plan = spool.source } // We use the physical properties of the distinctNode but projected // to the OnExprs (since the other columns are irrelevant to the // bookkeeping below). distinctOnPp := d.projectChildPropsToOnExprs() for _, k := range distinctOnPp.weakKeys { // If there is a strong key on the DISTINCT ON columns, then we // can elide the distinct node. // Since distinctNode does not project columns, this is fine // (it has a parent renderNode). if k.SubsetOf(distinctOnPp.notNullCols) { return respool(d.plan), nil } } if !distinctOnPp.isEmpty() { // distinctNode uses ordering to optimize "distinctification". // If the columns are sorted in a certain direction and the column // values "change", no subsequent rows can possibly have the same // column values again. We can thus clear out our bookkeeping. // This needs to be planColumns(n.plan) and not planColumns(n) since // distinctNode is "distinctifying" on the child plan's output rows. d.columnsInOrder = util.FastIntSet{} for i, numCols := 0, len(planColumns(d.plan)); i < numCols; i++ { group := distinctOnPp.eqGroups.Find(i) if distinctOnPp.constantCols.Contains(group) { d.columnsInOrder.Add(i) continue } for _, g := range distinctOnPp.ordering { if g.ColIdx == group { d.columnsInOrder.Add(i) break } } } } return respool(d), nil } func expandScanNode( ctx context.Context, p *planner, params expandParameters, s *scanNode, ) (planNode, error) { var analyzeOrdering analyzeOrderingFn if len(params.desiredOrdering) > 0 { analyzeOrdering = func(indexProps physicalProps) (matchingCols, totalCols int) { match := indexProps.computeMatch(params.desiredOrdering) return match, len(params.desiredOrdering) } } // If we have a reasonable limit, prefer an order matching index even if // it is not covering. var preferOrderMatchingIndex bool if len(params.desiredOrdering) > 0 && params.numRowsHint <= 1000 { preferOrderMatchingIndex = true } plan, err := p.selectIndex(ctx, s, analyzeOrdering, preferOrderMatchingIndex) if err != nil { return s, err } return plan, nil } func expandRenderNode( ctx context.Context, p *planner, params expandParameters, r *renderNode, ) (planNode, error) { params.desiredOrdering = translateOrdering(params.desiredOrdering, r) var err error r.source.plan, err = doExpandPlan(ctx, p, params, r.source.plan) if err != nil { return r, err } // If there's a spool, we'll pull it up before returning below. respool := func(plan planNode) planNode { return plan } if spool, ok := r.source.plan.(*spoolNode); ok { respool = p.makeSpool r.source.plan = spool.source } // Elide the render node if it renders its source as-is. sourceCols := planColumns(r.source.plan) if len(r.columns) == len(sourceCols) { // We don't drop renderNodes which have a different number of // columns than their sources, because some nodes currently assume // the number of source columns doesn't change between // instantiation and Start() (e.g. groupNode). // TODO(knz): investigate this further and enable the optimization fully. // TODO(radu): once this is investigated, we should look into coalescing // renderNodes (at least if the parent node is just a projection). needRename := false foundNonTrivialRender := false for i, e := range r.render { if r.columns[i].Omitted { continue } if iv, ok := e.(*tree.IndexedVar); ok && i < len(sourceCols) && iv.Idx == i { if sourceCols[i].Name != r.columns[i].Name { // Pass-through with rename: SELECT k AS x, v AS y FROM kv ... // We'll want to push the demanded names "x" and "y" to the // source. needRename = true } continue } foundNonTrivialRender = true break } if !foundNonTrivialRender { // Nothing special rendered, remove the render node entirely. if needRename { // If the render was renaming some columns, propagate the // requested names. mutSourceCols := planMutableColumns(r.source.plan) for i, col := range r.columns { mutSourceCols[i].Name = col.Name } } return respool(r.source.plan), nil } } p.computePhysicalPropsForRender(r, planPhysicalProps(r.source.plan)) return respool(r), nil } // translateOrdering modifies a desired ordering on the output of the // renderNode to a desired ordering on its its input. // // For example, it translates a desired ordering [@2 asc, @1 desc] for // a render node that renders [@4, @3, @2] into a desired ordering [@3 // asc, @4 desc]. func translateOrdering(desiredDown sqlbase.ColumnOrdering, r *renderNode) sqlbase.ColumnOrdering { var desiredUp sqlbase.ColumnOrdering for _, colOrder := range desiredDown { rendered := r.render[colOrder.ColIdx] if _, ok := rendered.(tree.Datum); ok { // Simple constants do not participate in ordering. Just ignore. continue } if v, ok := rendered.(*tree.IndexedVar); ok { // This is a simple render, so we can propagate the desired ordering. // However take care of avoiding duplicate ordering requests in // case there is more than one render for the same source column. duplicate := false for _, desiredOrderCol := range desiredUp { if desiredOrderCol.ColIdx == v.Idx { duplicate = true break } } if !duplicate { desiredUp = append(desiredUp, sqlbase.ColumnOrderInfo{ColIdx: v.Idx, Direction: colOrder.Direction}) } continue } // Anything else and we can't propagate the desired order. break } return desiredUp } func translateGroupOrdering( desiredDown sqlbase.ColumnOrdering, g *groupNode, ) sqlbase.ColumnOrdering { var desiredUp sqlbase.ColumnOrdering for _, colOrder := range desiredDown { groupingCol, ok := g.aggIsGroupingColumn(colOrder.ColIdx) if !ok { // We cannot maintain the rest of the ordering since it uses a // non-identity aggregate function. break } // For identity (i.e., GROUP BY) columns, we can propagate the ordering. desiredUp = append(desiredUp, sqlbase.ColumnOrderInfo{ ColIdx: groupingCol, Direction: colOrder.Direction, }) } return desiredUp } // simplifyOrderings reduces the Ordering() guarantee of each node in the plan // to that which is actually used by the parent(s). It also performs sortNode // elision when possible. // // Simplification of orderings is useful for DistSQL, where maintaining // orderings between parallel streams is not free. // // This determination cannot be done directly as part of the doExpandPlan // recursion (using desiredOrdering) because some nodes (distinctNode) make use // of whatever ordering the underlying node happens to provide. func (p *planner) simplifyOrderings(plan planNode, usefulOrdering sqlbase.ColumnOrdering) planNode { if plan == nil { return nil } switch n := plan.(type) { case *createTableNode: n.sourcePlan = p.simplifyOrderings(n.sourcePlan, nil) case *updateNode: n.source = p.simplifyOrderings(n.source, nil) case *insertNode: n.source = p.simplifyOrderings(n.source, nil) case *upsertNode: n.source = p.simplifyOrderings(n.source, nil) case *deleteNode: n.source = p.simplifyOrderings(n.source, nil) case *rowCountNode: n.source = p.simplifyOrderings(n.source, nil).(batchedPlanNode) case *serializeNode: n.source = p.simplifyOrderings(n.source, nil).(batchedPlanNode) case *explainDistSQLNode: n.plan = p.simplifyOrderings(n.plan, nil) case *showTraceReplicaNode: n.plan = p.simplifyOrderings(n.plan, nil) case *explainPlanNode: if n.expanded { n.plan = p.simplifyOrderings(n.plan, nil) } case *projectSetNode: // We propagate down any ordering constraint relative to the // source. We don't propagate orderings expressed over the SRF // results. var desiredUp sqlbase.ColumnOrdering for _, colOrder := range usefulOrdering { if colOrder.ColIdx >= n.numColsInSource { break } desiredUp = append(desiredUp, colOrder) } n.source = p.simplifyOrderings(n.source, desiredUp) n.computePhysicalProps() case *indexJoinNode: // Passing through usefulOrdering here is fine because indexJoinNodes // produced by the heuristic planner always have the same schema as the // underlying table. n.index.props.trim(usefulOrdering) n.props.trim(usefulOrdering) n.table.props = physicalProps{} case *unionNode: n.right = p.simplifyOrderings(n.right, nil) n.left = p.simplifyOrderings(n.left, nil) case *filterNode: n.source.plan = p.simplifyOrderings(n.source.plan, usefulOrdering) n.computePhysicalProps(p.EvalContext()) case *joinNode: // In DistSQL, we may take advantage of matching orderings on equality // columns and use merge joins. Preserve the orderings in that case. var usefulLeft, usefulRight sqlbase.ColumnOrdering if len(n.mergeJoinOrdering) > 0 { usefulLeft = make(sqlbase.ColumnOrdering, len(n.mergeJoinOrdering)) usefulRight = make(sqlbase.ColumnOrdering, len(n.mergeJoinOrdering)) for i, mergedCol := range n.mergeJoinOrdering { usefulLeft[i].ColIdx = n.pred.leftEqualityIndices[mergedCol.ColIdx] usefulRight[i].ColIdx = n.pred.rightEqualityIndices[mergedCol.ColIdx] usefulLeft[i].Direction = mergedCol.Direction usefulRight[i].Direction = mergedCol.Direction } } n.props.trim(usefulOrdering) n.left.plan = p.simplifyOrderings(n.left.plan, usefulLeft) n.right.plan = p.simplifyOrderings(n.right.plan, usefulRight) case *ordinalityNode: n.props.trim(usefulOrdering) n.source = p.simplifyOrderings(n.source, n.restrictOrdering(usefulOrdering)) case *limitNode: n.plan = p.simplifyOrderings(n.plan, usefulOrdering) case *spoolNode: n.source = p.simplifyOrderings(n.source, usefulOrdering) case *groupNode: if n.needOnlyOneRow { n.plan = p.simplifyOrderings(n.plan, n.desiredOrdering) } else { // Keep only the ordering required by the groupNode. n.plan = p.simplifyOrderings(n.plan, translateGroupOrdering(n.props.ordering, n)) } n.props.trim(usefulOrdering) case *windowNode: n.plan = p.simplifyOrderings(n.plan, nil) case *sortNode: if n.needSort { // We could pass no ordering below, but a partial ordering can speed up // the sort (and save memory), at least for DistSQL. n.plan = p.simplifyOrderings(n.plan, n.ordering) } else { constantCols := planPhysicalProps(n.plan).constantCols // Normally we would pass n.ordering; but n.ordering could be a prefix of // the useful ordering. Check for this, ignoring any constant columns. sortOrder := make(sqlbase.ColumnOrdering, 0, len(n.ordering)) for _, c := range n.ordering { if !constantCols.Contains(c.ColIdx) { sortOrder = append(sortOrder, c) } } givenOrder := make(sqlbase.ColumnOrdering, 0, len(usefulOrdering)) for _, c := range usefulOrdering { if !constantCols.Contains(c.ColIdx) { givenOrder = append(givenOrder, c) } } if sortOrder.IsPrefixOf(givenOrder) { n.plan = p.simplifyOrderings(n.plan, givenOrder) } else { n.plan = p.simplifyOrderings(n.plan, sortOrder) } } if !n.needSort { if len(n.columns) < len(planColumns(n.plan)) { // No sorting required, but we have to strip off the extra render // expressions we added. So keep the sort node. // TODO(radu): replace with a renderNode } else { // Sort node fully disappears. // Just be sure to propagate the column names. mutSourceCols := planMutableColumns(n.plan) for i, col := range n.columns { mutSourceCols[i].Name = col.Name } plan = n.plan } } case *distinctNode: // distinctNode uses the ordering computed from its source but // trimmed to the DISTINCT ON columns (if applicable). // Any useful ordering pertains only to the columns // we're distinctifying on. sourceOrdering := n.projectChildPropsToOnExprs() n.plan = p.simplifyOrderings(n.plan, sourceOrdering.ordering) case *scanNode: n.props.trim(usefulOrdering) case *renderNode: n.source.plan = p.simplifyOrderings(n.source.plan, translateOrdering(usefulOrdering, n)) // Recompute r.ordering using the source's simplified ordering. // TODO(radu): in some cases there may be multiple possible n.orderings for // a given source plan ordering; we should pass usefulOrdering to help make // that choice (#13709). p.computePhysicalPropsForRender(n, planPhysicalProps(n.source.plan)) case *delayedNode: n.plan = p.simplifyOrderings(n.plan, usefulOrdering) case *splitNode: n.rows = p.simplifyOrderings(n.rows, nil) case *relocateNode: n.rows = p.simplifyOrderings(n.rows, nil) case *cancelQueriesNode: n.rows = p.simplifyOrderings(n.rows, nil) case *cancelSessionsNode: n.rows = p.simplifyOrderings(n.rows, nil) case *controlJobsNode: n.rows = p.simplifyOrderings(n.rows, nil) case *valuesNode: case *virtualTableNode: case *alterIndexNode: case *alterTableNode: case *alterSequenceNode: case *alterUserSetPasswordNode: case *renameColumnNode: case *renameDatabaseNode: case *renameIndexNode: case *renameTableNode: case *scrubNode: case *truncateNode: case *createDatabaseNode: case *createIndexNode: case *CreateUserNode: case *createViewNode: case *createSequenceNode: case *createStatsNode: case *dropDatabaseNode: case *dropIndexNode: case *dropTableNode: case *dropViewNode: case *dropSequenceNode: case *DropUserNode: case *zeroNode: case *unaryNode: case *hookFnNode: case *sequenceSelectNode: case *setVarNode: case *setClusterSettingNode: case *setZoneConfigNode: case *showZoneConfigNode: case *showRangesNode: case *showFingerprintsNode: case *showTraceNode: case *scatterNode: default: panic(fmt.Sprintf("unhandled node type: %T", plan)) } return plan }
return plan, nil } // expandParameters propagates the known row limit and desired ordering at
FilterButton.tsx
import React, {Component, Fragment} from 'react'; import classNames from 'classnames'; import {I18n} from '../I18n'; import {NotificationBadge} from '../Badge'; import {Button} from "../Button"; export class FilterButton extends Component<{ children?: any, className?: any, onClick?: () => any, /** * Use any one of our icons */ iconType?: any, iconSide?: any, color?: any, /** * Bolds the button if true */ hasActiveFilters?: boolean, /** * Pass the total number of filters available and it will * add a subdued notification badge showing the number */ numFilters?: number, /** * Pass the number of selected filters and it will * add a bright notification badge showing the number */ numActiveFilters?: number, /** * Applies a visual state to the button useful when using with a popover. */ isSelected?: boolean, isDisabled?: boolean, /** * Defines html button input type */ type?: string, /** * Should the button grow to fill it's container, best used for dropdown buttons */ grow?: boolean /** * Remove border after button, good for opposite filters */ withNext?: boolean, /** * _DEPRECATED: use `withNext`_ * Remove border after button, good for opposite filters */ noDivider?: boolean, textProps?: any }> { static defaultProps = { type: 'button', iconSide: 'right', color: 'text', grow: true, } static propTypes: {}; render() { let { children, className, iconType, iconSide, color, hasActiveFilters, numFilters, numActiveFilters, isDisabled, isSelected, type, grow, noDivider, withNext, textProps, ...rest } = this.props; // != instead of !== to allow for null and undefined const numFiltersDefined = numFilters != null; const classes = classNames( 'c-filter-button', { 'c-filter-button-isSelected': isSelected, 'c-filter-button-hasActiveFilters': hasActiveFilters, 'c-filter-button-hasNotification': numFiltersDefined, 'c-filter-button--hasIcon': iconType, 'c-filter-button--noGrow': !grow, 'c-filter-button--withNext': noDivider || withNext, }, className, ); const buttonTextClassNames = classNames( // 'c-filter-button__textShift', {'c-filter-button__text-hasNotification': numFiltersDefined,}, textProps && textProps.className, ); let dataText; if (typeof children === 'string') { dataText = children; } const buttonContents = ( <Fragment> <span className="c-filter-button__textShift" data-text={dataText} title={dataText}> {children} </span> {numFiltersDefined && <I18n token="c-filter-button.filterBadge" values={{count: numActiveFilters || numFilters, hasActiveFilters}} default={({count, hasActiveFilters}) => `${count} ${hasActiveFilters ? 'active' : 'available'} filters`} > { filterBadge => ( <NotificationBadge
aria-label={filterBadge} color={isDisabled || !hasActiveFilters ? 'subdued' : 'accent'} > {numActiveFilters || numFilters} </NotificationBadge> ) } </I18n> } </Fragment> ); return ( <Button className={classes} color={color} isDisabled={isDisabled} iconSide={iconSide} iconType={iconType} type={type} textProps={{...textProps, className: buttonTextClassNames}} {...rest} label={buttonContents} /> ); } }
className="c-filter-button__notification" size="m"
utils.py
import numpy as np import scipy.sparse as sp import torch from sklearn.preprocessing import StandardScaler from sklearn.model_selection import train_test_split from torch_geometric.utils import to_networkx, degree import torch.nn.functional as F def convert_to_nodeDegreeFeatures(graphs): # print(graph.x) graph_infos = [] maxdegree = 0 for i, graph in enumerate(graphs): g = to_networkx(graph, to_undirected=True) gdegree = max(dict(g.degree).values()) if gdegree > maxdegree: maxdegree = gdegree graph_infos.append( (graph, g.degree, graph.num_nodes) ) # (graph, node_degrees, num_nodes) new_graphs = [] for i, tuple in enumerate(graph_infos): idx, x = tuple[0].edge_index[0], tuple[0].x deg = degree(idx, tuple[2], dtype=torch.long) deg = F.one_hot(deg, num_classes=maxdegree + 1).to(torch.float) new_graph = tuple[0].clone() new_graph.__setitem__("x", deg) new_graphs.append(new_graph) return new_graphs def
(graphs, train=None, test=None, shuffle=True, seed=None): y = torch.cat([graph.y for graph in graphs]) graphs_tv, graphs_test = train_test_split( graphs, train_size=train, test_size=test, stratify=y, shuffle=shuffle, random_state=seed, ) return graphs_tv, graphs_test def np_uniform_sample_next(compact_adj, tree, fanout): last_level = tree[-1] # [batch, f^depth] batch_lengths = compact_adj.degrees[last_level] nodes = np.repeat(last_level, fanout, axis=1) batch_lengths = np.repeat(batch_lengths, fanout, axis=1) batch_next_neighbor_ids = np.random.uniform( size=batch_lengths.shape, low=0, high=1 - 1e-9 ) # Shape = (len(nodes), neighbors_per_node) batch_next_neighbor_ids = np.array( batch_next_neighbor_ids * batch_lengths, dtype=last_level.dtype ) shape = batch_next_neighbor_ids.shape batch_next_neighbor_ids = np.array( compact_adj.compact_adj[nodes.reshape(-1), batch_next_neighbor_ids.reshape(-1)] ).reshape(shape) return batch_next_neighbor_ids def np_traverse( compact_adj, seed_nodes, fanouts=(1,), sample_fn=np_uniform_sample_next ): if not isinstance(seed_nodes, np.ndarray): raise ValueError("Seed must a numpy array") if ( len(seed_nodes.shape) > 2 or len(seed_nodes.shape) < 1 or not str(seed_nodes.dtype).startswith("int") ): raise ValueError("seed_nodes must be 1D or 2D int array") if len(seed_nodes.shape) == 1: seed_nodes = np.expand_dims(seed_nodes, 1) # Make walk-tree forest_array = [seed_nodes] for f in fanouts: next_level = sample_fn(compact_adj, forest_array, f) assert next_level.shape[1] == forest_array[-1].shape[1] * f forest_array.append(next_level) return forest_array class WalkForestCollator(object): def __init__(self, normalize_features=False): self.normalize_features = normalize_features def __call__(self, molecule): comp_adj, feature_matrix, label, fanouts = molecule[0] node_ids = np.array(list(range(feature_matrix.shape[0])), dtype=np.int32) forest = np_traverse(comp_adj, node_ids, fanouts) torch_forest = [torch.from_numpy(forest[0]).flatten()] label = np.where(np.isnan(label), 0.0, label) for i in range(len(forest) - 1): torch_forest.append(torch.from_numpy(forest[i + 1]).reshape(-1, fanouts[i])) if self.normalize_features: mx = sp.csr_matrix(feature_matrix) rowsum = np.array(mx.sum(1)) r_inv = np.power(rowsum, -1).flatten() r_inv[np.isinf(r_inv)] = 0.0 r_mat_inv = sp.diags(r_inv) normalized_feature_matrix = r_mat_inv.dot(mx) normalized_feature_matrix = np.array(normalized_feature_matrix.todense()) else: scaler = StandardScaler() scaler.fit(feature_matrix) normalized_feature_matrix = scaler.transform(feature_matrix) return ( torch_forest, torch.as_tensor(normalized_feature_matrix, dtype=torch.float32), torch.as_tensor(label, dtype=torch.float32), ) class DefaultCollator(object): def __init__(self, normalize_features=True, normalize_adj=True): self.normalize_features = normalize_features self.normalize_adj = normalize_adj def __call__(self, molecule): adj_matrix, feature_matrix, label, _ = molecule[0] label = np.where(np.isnan(label), 0.0, label) if self.normalize_features: mx = sp.csr_matrix(feature_matrix) rowsum = np.array(mx.sum(1)) r_inv = np.power(rowsum, -1).flatten() r_inv[np.isinf(r_inv)] = 0.0 r_mat_inv = sp.diags(r_inv) normalized_feature_matrix = r_mat_inv.dot(mx) normalized_feature_matrix = np.array(normalized_feature_matrix.todense()) else: scaler = StandardScaler() scaler.fit(feature_matrix) normalized_feature_matrix = scaler.transform(feature_matrix) if self.normalize_adj: rowsum = np.array(adj_matrix.sum(1)) r_inv_sqrt = np.power(rowsum, -0.5).flatten() r_inv_sqrt[np.isinf(r_inv_sqrt)] = 0.0 r_mat_inv_sqrt = sp.diags(r_inv_sqrt) normalized_adj_matrix = ( adj_matrix.dot(r_mat_inv_sqrt).transpose().dot(r_mat_inv_sqrt) ) else: normalized_adj_matrix = adj_matrix return ( torch.as_tensor( np.array(normalized_adj_matrix.todense()), dtype=torch.float32 ), torch.as_tensor(normalized_feature_matrix, dtype=torch.float32), torch.as_tensor(label, dtype=torch.float32), )
split_data
test_preprocessing.py
import pytest import io from cite_seq_count import preprocessing @pytest.fixture def data(): from collections import OrderedDict from itertools import islice # Test file paths pytest.correct_whitelist_path = 'tests/test_data/whitelists/correct.csv' pytest.correct_tags_path = 'tests/test_data/tags/correct.csv' pytest.correct_R1_path = 'tests/test_data/fastq/correct_R1.fastq.gz' pytest.correct_R2_path = 'tests/test_data/fastq/correct_R2.fastq.gz' pytest.corrupt_R1_path = 'tests/test_data/fastq/corrupted_R1.fastq.gz' pytest.corrupt_R2_path = 'tests/test_data/fastq/corrupted_R2.fastq.gz' # Create some variables to compare to pytest.correct_whitelist = set(['ACTGTTTTATTGGCCT','TTCATAAGGTAGGGAT']) pytest.correct_tags = { 'AGGACCATCCAA':'CITE_LEN_12_1', 'ACATGTTACCGT':'CITE_LEN_12_2', 'AGCTTACTATCC':'CITE_LEN_12_3', 'TCGATAATGCGAGTACAA':'CITE_LEN_18_1', 'GAGGCTGAGCTAGCTAGT':'CITE_LEN_18_2', 'GGCTGATGCTGACTGCTA':'CITE_LEN_18_3', 'TGTGACGTATTGCTAGCTAG':'CITE_LEN_20_1', 'ACTGTCTAACGGGTCAGTGC':'CITE_LEN_20_2', 'TATCACATCGGTGGATCCAT':'CITE_LEN_20_3'} pytest.correct_ordered_tags = OrderedDict({ 'TGTGACGTATTGCTAGCTAG':'CITE_LEN_20_1-TGTGACGTATTGCTAGCTAG', 'ACTGTCTAACGGGTCAGTGC':'CITE_LEN_20_2-ACTGTCTAACGGGTCAGTGC', 'TATCACATCGGTGGATCCAT':'CITE_LEN_20_3-TATCACATCGGTGGATCCAT', 'TCGATAATGCGAGTACAA':'CITE_LEN_18_1-TCGATAATGCGAGTACAA', 'GAGGCTGAGCTAGCTAGT':'CITE_LEN_18_2-GAGGCTGAGCTAGCTAGT', 'GGCTGATGCTGACTGCTA':'CITE_LEN_18_3-GGCTGATGCTGACTGCTA', 'AGGACCATCCAA':'CITE_LEN_12_1-AGGACCATCCAA', 'ACATGTTACCGT':'CITE_LEN_12_2-ACATGTTACCGT', 'AGCTTACTATCC':'CITE_LEN_12_3-AGCTTACTATCC'}) pytest.barcode_slice = slice(0, 16) pytest.umi_slice = slice(16, 26) pytest.barcode_umi_length = 26 @pytest.mark.dependency() def test_parse_whitelist_csv(data): assert preprocessing.parse_whitelist_csv(pytest.correct_whitelist_path, 16, 1) == (pytest.correct_whitelist,1) @pytest.mark.dependency() def test_parse_tags_csv(data): assert preprocessing.parse_tags_csv(pytest.correct_tags_path) == pytest.correct_tags @pytest.mark.dependency(depends=['test_parse_tags_csv']) def test_check_tags(data): assert preprocessing.check_tags(pytest.correct_tags, 5) == pytest.correct_ordered_tags @pytest.mark.dependency(depends=['test_check_tags']) def
(data): with pytest.raises(SystemExit): preprocessing.check_tags(pytest.correct_tags, 8) @pytest.mark.dependency(depends=['test_parse_whitelist_csv']) def test_check_barcodes_lengths(data): assert preprocessing.check_barcodes_lengths(26, 1, 16, 17, 26) == (pytest.barcode_slice, pytest.umi_slice, pytest.barcode_umi_length) @pytest.mark.dependency() def test_get_n_lines(data): assert preprocessing.get_n_lines(pytest.correct_R1_path) == (200 * 4) @pytest.mark.dependency(depends=['test_get_n_lines']) def test_get_n_lines_not_multiple_of_4(data): with pytest.raises(SystemExit): preprocessing.get_n_lines(pytest.corrupt_R1_path)
test_check_distance_too_big_between_tags
model_upstream.go
/* * Ory Oathkeeper API * * Documentation for all of Ory Oathkeeper's APIs. * * API version: v0.38.20-beta.1 * Contact: [email protected] */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. package client import ( "encoding/json" ) // Upstream Upstream Upstream Upstream Upstream upstream type Upstream struct { // PreserveHost, if false (the default), tells ORY Oathkeeper to set the upstream request's Host header to the hostname of the API's upstream's URL. Setting this flag to true instructs ORY Oathkeeper not to do so. PreserveHost *bool `json:"preserve_host,omitempty"` // StripPath if set, replaces the provided path prefix when forwarding the requested URL to the upstream URL. StripPath *string `json:"strip_path,omitempty"` // URL is the URL the request will be proxied to. Url *string `json:"url,omitempty"` } // NewUpstream instantiates a new Upstream object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments // will change when the set of required properties is changed func
() *Upstream { this := Upstream{} return &this } // NewUpstreamWithDefaults instantiates a new Upstream object // This constructor will only assign default values to properties that have it defined, // but it doesn't guarantee that properties required by API are set func NewUpstreamWithDefaults() *Upstream { this := Upstream{} return &this } // GetPreserveHost returns the PreserveHost field value if set, zero value otherwise. func (o *Upstream) GetPreserveHost() bool { if o == nil || o.PreserveHost == nil { var ret bool return ret } return *o.PreserveHost } // GetPreserveHostOk returns a tuple with the PreserveHost field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *Upstream) GetPreserveHostOk() (*bool, bool) { if o == nil || o.PreserveHost == nil { return nil, false } return o.PreserveHost, true } // HasPreserveHost returns a boolean if a field has been set. func (o *Upstream) HasPreserveHost() bool { if o != nil && o.PreserveHost != nil { return true } return false } // SetPreserveHost gets a reference to the given bool and assigns it to the PreserveHost field. func (o *Upstream) SetPreserveHost(v bool) { o.PreserveHost = &v } // GetStripPath returns the StripPath field value if set, zero value otherwise. func (o *Upstream) GetStripPath() string { if o == nil || o.StripPath == nil { var ret string return ret } return *o.StripPath } // GetStripPathOk returns a tuple with the StripPath field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *Upstream) GetStripPathOk() (*string, bool) { if o == nil || o.StripPath == nil { return nil, false } return o.StripPath, true } // HasStripPath returns a boolean if a field has been set. func (o *Upstream) HasStripPath() bool { if o != nil && o.StripPath != nil { return true } return false } // SetStripPath gets a reference to the given string and assigns it to the StripPath field. func (o *Upstream) SetStripPath(v string) { o.StripPath = &v } // GetUrl returns the Url field value if set, zero value otherwise. func (o *Upstream) GetUrl() string { if o == nil || o.Url == nil { var ret string return ret } return *o.Url } // GetUrlOk returns a tuple with the Url field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *Upstream) GetUrlOk() (*string, bool) { if o == nil || o.Url == nil { return nil, false } return o.Url, true } // HasUrl returns a boolean if a field has been set. func (o *Upstream) HasUrl() bool { if o != nil && o.Url != nil { return true } return false } // SetUrl gets a reference to the given string and assigns it to the Url field. func (o *Upstream) SetUrl(v string) { o.Url = &v } func (o Upstream) MarshalJSON() ([]byte, error) { toSerialize := map[string]interface{}{} if o.PreserveHost != nil { toSerialize["preserve_host"] = o.PreserveHost } if o.StripPath != nil { toSerialize["strip_path"] = o.StripPath } if o.Url != nil { toSerialize["url"] = o.Url } return json.Marshal(toSerialize) } type NullableUpstream struct { value *Upstream isSet bool } func (v NullableUpstream) Get() *Upstream { return v.value } func (v *NullableUpstream) Set(val *Upstream) { v.value = val v.isSet = true } func (v NullableUpstream) IsSet() bool { return v.isSet } func (v *NullableUpstream) Unset() { v.value = nil v.isSet = false } func NewNullableUpstream(val *Upstream) *NullableUpstream { return &NullableUpstream{value: val, isSet: true} } func (v NullableUpstream) MarshalJSON() ([]byte, error) { return json.Marshal(v.value) } func (v *NullableUpstream) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) }
NewUpstream
icon_play_lesson.rs
pub struct IconPlayLesson { props: crate::Props, } impl yew::Component for IconPlayLesson { type Properties = crate::Props; type Message = (); fn create(props: Self::Properties, _: yew::prelude::ComponentLink<Self>) -> Self { Self { props } } fn update(&mut self, _: Self::Message) -> yew::prelude::ShouldRender { true } fn change(&mut self, _: Self::Properties) -> yew::prelude::ShouldRender { false } fn view(&self) -> yew::prelude::Html
}
{ yew::prelude::html! { <svg class=self.props.class.unwrap_or("") width=self.props.size.unwrap_or(24).to_string() height=self.props.size.unwrap_or(24).to_string() viewBox="0 0 24 24" fill=self.props.fill.unwrap_or("none") stroke=self.props.color.unwrap_or("currentColor") stroke-width=self.props.stroke_width.unwrap_or(2).to_string() stroke-linecap=self.props.stroke_linecap.unwrap_or("round") stroke-linejoin=self.props.stroke_linejoin.unwrap_or("round") > <svg xmlns="http://www.w3.org/2000/svg" enable-background="new 0 0 24 24" height="24" viewBox="0 0 24 24" width="24"><g><path d="M0,0h24v24H0V0z" fill="none"/></g><g><g><path d="M18,11c0.34,0,0.67,0.03,1,0.08V4c0-1.1-0.9-2-2-2H5C3.9,2,3,2.9,3,4v16c0,1.1,0.9,2,2,2h7.26C11.47,20.87,11,19.49,11,18 C11,14.13,14.13,11,18,11z M7,11V4h5v7L9.5,9.5L7,11z"/><path d="M18,13c-2.76,0-5,2.24-5,5s2.24,5,5,5s5-2.24,5-5S20.76,13,18,13z M16.75,20.5v-5l4,2.5L16.75,20.5z"/></g></g></svg> </svg> } }
mark_read.py
from django.core.management.base import BaseCommand from django.contrib.auth.models import User from apps.reader.models import UserSubscription import datetime class Command(BaseCommand): def
(self, parser): parser.add_argument("-d", "--days", dest="days", nargs=1, default=1, help="Days of unread") parser.add_argument("-u", "--username", dest="username", nargs=1, help="Specify user id or username") parser.add_argument("-U", "--userid", dest="userid", nargs=1, help="Specify user id or username") def handle(self, *args, **options): if options['userid']: user = User.objects.filter(pk=options['userid'])[0] elif options['username']: user = User.objects.get(username__icontains=options['username']) else: raise Exception("Need username or user id.") user.profile.last_seen_on = datetime.datetime.utcnow() user.profile.save() feeds = UserSubscription.objects.filter(user=user) for sub in feeds: if options['days'] == 0: sub.mark_feed_read() else: sub.mark_read_date = datetime.datetime.utcnow() - datetime.timedelta(days=int(options['days'])) sub.needs_unread_recalc = True sub.save()
add_arguments
root.go
package cmd import ( "fmt" "os" "github.com/spf13/cobra" homedir "github.com/mitchellh/go-homedir" "github.com/spf13/viper" ) var ( gitCommit string version string buildDate string buildNumber string ) var cfgFile string var showVersion bool // rootCmd represents the base command when called without any subcommands var rootCmd = &cobra.Command{ Use: "kube-pod-rescheduler", Short: "Kubernetes controller that helps to evict and reschedule pods from the node when they're stuck on it by some reasons.", Long: "Kubernetes controller that helps to evict and reschedule pods from the node when they're stuck on it by some reasons.", Run: func(cmd *cobra.Command, args []string) { if showVersion { fmt.Printf("kube-pod-rescheduler\n\nversion: %v\nbuildNumber: %v\ncommit: %v\ndate: %v\n\n", version, buildNumber, gitCommit, buildDate) } else { cmd.Help() } }, } // Execute adds all child commands to the root command and sets flags appropriately. // This is called by main.main(). It only needs to happen once to the rootCmd. func Execute() { if err := rootCmd.Execute(); err != nil { fmt.Println(err) os.Exit(1) } } func init() { cobra.OnInitialize(onInitialize) rootCmd.Flags().BoolVarP(&showVersion, "version", "v", false, "Show version and related information") rootCmd.PersistentFlags().StringVarP(&cfgFile, "config", "c", "", "config file (default is $HOME/.kube-pod-rescheduler.yaml)") rootCmd.PersistentFlags().BoolP("dry-run", "d", false, "Don't apply changes to the cluster, just print them") // Hide help command rootCmd.SetHelpCommand(&cobra.Command{ Use: "no-help", Hidden: true, }) } // onInitialize reads in config file and ENV variables if set. func onInitialize() { if cfgFile != "" { // Use config file from the flag. viper.SetConfigFile(cfgFile) } else { // Find home directory. home, err := homedir.Dir() if err != nil { fmt.Println(err)
// Search config in home directory with name ".kube-pod-rescheduler" (without extension). viper.AddConfigPath(home) viper.SetConfigName(".kube-pod-rescheduler") } viper.AutomaticEnv() // read in environment variables that match // If a config file is found, read it in. if err := viper.ReadInConfig(); err != nil { fmt.Println(err) //os.Exit(1) } else { fmt.Println("Using config file:", viper.ConfigFileUsed()) } }
os.Exit(1) }
condition.rs
// Issue states // // Copyright (c) 2018 Julian Ganz // // MIT License // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. // //! Issue states and conditions //! //! This module provides the `Condition` trait which will usually be implemented //! by the library's user. //! use std::error::Error as EError; use std::result::Result as RResult; use error::*; /// Trait for issue metadata conditions /// /// A `Condition` represents a predicate for an issue state: a function mapping /// an issue to a boolean value indicating whether the condition is fulfilled or /// not. It is generally assumed that a condition consists of "condition atoms", /// which each specify a "singular" condition on a specific piece of metadata. /// /// Whatever is used as type for conditions on metadata has to implement this /// trait. It enables `IssueStates` to evaluate the condition. Additionally, the /// `ConditionFactory` trait should be implemented in order to enable parsing /// conditions from configuration files. /// pub trait Condition { /// Type of the issue being evaluated /// /// Alternatively, some representation of the metadata may be used in place /// of the issue type. /// type Issue; /// Check whether the condition is satisfied by the issue provided /// fn satisfied_by(&self, issue: &Self::Issue) -> bool; } /// Match operators /// /// These operators define how the piece of metadata queried from the issue is /// compared to the literal provided with the conditon atom. The former is /// considered the "left-hand value" while the latter is considered the /// "right-hand value" in this context. /// #[derive(Debug, PartialEq, Eq)] pub enum MatchOp { /// Match if the values are evivalent Equivalence, /// Match if the left-hand value is lower than the right-hand value. LowerThan, /// Match if the left-hand value is greater than the right-hand value. GreaterThan, /// Match if the left-hand value is lower than the right-hand value or /// equal. LowerThanOrEqual, /// Match if the left-hand value is greater than the right-hand value or /// equal. GreaterThanOrEqual, /// Match if the left-hand value contains or is equal to the right-hand /// value. Contains, } /// Factory trait for conditions /// /// This trait allows issue states parsers to create conditions from a string /// representation. Implementers need not implement the actual parsing. Instead, /// the function `make_condition()` will be supplied with the components of a /// condition. /// pub trait ConditionFactory<C> where C: Condition + Sized
/// Create a condition from bits and pieces /// /// The condition will be assembled from the "metadata identifier" (e.g. the /// name of the piece of metadata), a flag indicating whether the condition /// is negated or not and, optionally, the matching operator and a string /// representation of the right-hand side value. /// /// If the operator and value are not present, the resulting condition is /// expected to yield true if the piece of metadata denoted by the metadata /// identifier is present, e.g. non-null. /// fn make_condition( &self, name: &str, neg: bool, val_op: Option<(MatchOp, &str)> ) -> RResult<C, Self::Error>; /// Parse a condition directly from a string /// /// This function parses a `Condition` directly from a string using the /// `make_condition()` function. /// fn parse_condition( &self, string: &str, ) -> RResult<C, Self::Error> { parse_condition(string) .map_err(From::from) .and_then(|(name, neg, op_val)| self.make_condition(name, neg, op_val)) } } /// Parse the bits of a condition atom /// /// This method parses a condition atom. It returns the "metadata identifier" /// (e.g. the name of the piece of metadata), a flag indicating whether the /// condition is negated or not and, optionally, the matching operator and a /// string representation of the right-hand side value. /// /// The matching operator and value may be `None`. In this case, the condition /// parsed is expected to check for the existence of a piece of metadata. /// pub fn parse_condition(string: &str) -> Result<(&str, bool, Option<(MatchOp, &str)>)> { if let Some(pos) = string.find(|ref c| reserved_char(c)) { if pos == 0 { // The condition is either a negated existance (e.g. starts with // `!`) or invalid. let (neg, name) = string.split_at(1); return if neg == "!" && !name.contains(|ref c| reserved_char(c)) { Ok((name, true, None)) } else { Err(Error::from(ErrorKind::ConditionParseError)) } } let (name, mut op_val) = string.split_at(pos); let negated = op_val.starts_with('!'); if negated { op_val = op_val.split_at(1).1; } Ok((name, negated, parse_op_val(op_val)?.into())) } else { // If the string representation does not contain any reserved // characters, this condition is the existance of the piece of metadata. Ok((string, false, None)) } } /// Check whether a character is a reserved character /// fn reserved_char(c: &char) -> bool { ['!', '=', '<', '>', '~'].contains(c) } /// Parse and extract the match operator and value from the compound /// fn parse_op_val(string: &str) -> Result<(MatchOp, &str)> { let mut chars = string.chars(); let (op, pos) = match chars.next() { Some('=') => (MatchOp::Equivalence, 1), Some('<') => match chars.next() { Some('=') => (MatchOp::LowerThanOrEqual, 2), _ => (MatchOp::LowerThan, 1), }, Some('>') => match chars.next() { Some('=') => (MatchOp::GreaterThanOrEqual, 2), _ => (MatchOp::GreaterThan, 1), }, Some('~') => (MatchOp::Contains, 1), _ => return Err(Error::from(ErrorKind::ConditionParseError)), }; Ok((op, string.split_at(pos).1)) } #[cfg(test)] mod tests { use super::*; fn parse(string: &str) -> (&str, bool, Option<(MatchOp, &str)>) { parse_condition(string).expect("Failed to parse condition atom!") } #[test] fn smoke() { assert_eq!(parse("foo"), ("foo", false, None)); assert_eq!(parse("!foo"), ("foo", true, None)); assert_eq!(parse("foo=bar"), ("foo", false, Some((MatchOp::Equivalence, "bar")))); assert_eq!(parse("foo<bar"), ("foo", false, Some((MatchOp::LowerThan, "bar")))); assert_eq!(parse("foo>bar"), ("foo", false, Some((MatchOp::GreaterThan, "bar")))); assert_eq!(parse("foo<=bar"), ("foo", false, Some((MatchOp::LowerThanOrEqual, "bar")))); assert_eq!(parse("foo>=bar"), ("foo", false, Some((MatchOp::GreaterThanOrEqual, "bar")))); assert_eq!(parse("foo!~bar"), ("foo", true, Some((MatchOp::Contains, "bar")))); assert_eq!(parse("foo!=bar"), ("foo", true, Some((MatchOp::Equivalence, "bar")))); assert_eq!(parse("foo!<bar"), ("foo", true, Some((MatchOp::LowerThan, "bar")))); assert_eq!(parse("foo!>bar"), ("foo", true, Some((MatchOp::GreaterThan, "bar")))); assert_eq!(parse("foo!<=bar"), ("foo", true, Some((MatchOp::LowerThanOrEqual, "bar")))); assert_eq!(parse("foo!>=bar"), ("foo", true, Some((MatchOp::GreaterThanOrEqual, "bar")))); assert_eq!(parse("foo!~bar"), ("foo", true, Some((MatchOp::Contains, "bar")))); } }
{ type Error : From<Error> + EError;
runner_mockingjay.py
# -*- coding: utf-8 -*- # """*********************************************************************************************""" # FileName [ runner_mockingjay.py ] # Synopsis [ runner for the mockingjay model ] # Author [ Andy T. Liu (Andi611) ] # Copyright [ Copyleft(c), Speech Lab, NTU, Taiwan ] """*********************************************************************************************""" ############### # IMPORTATION # ############### import yaml import torch import random import argparse import numpy as np from utility.timer import Timer ############################# # MOCKINGJAY CONFIGURATIONS # ############################# def get_mockingjay_args(): parser = argparse.ArgumentParser(description='Argument Parser for the mockingjay project.') # setting parser.add_argument('--config', default='config/mockingjay_libri.yaml', type=str, help='Path to experiment config.') parser.add_argument('--seed', default=1337, type=int, help='Random seed for reproducable results.', required=False) # Logging parser.add_argument('--logdir', default='log/log_mockingjay/', type=str, help='Logging path.', required=False) parser.add_argument('--name', default=None, type=str, help='Name for logging.', required=False) # model ckpt parser.add_argument('--load', action='store_true', help='Load pre-trained model to restore training, no need to specify this during testing.') parser.add_argument('--ckpdir', default='result/result_mockingjay/', type=str, help='Checkpoint/Result path.', required=False) parser.add_argument('--ckpt', default='mockingjay_libri_sd1337_LinearLarge/mockingjay-500000.ckpt', type=str, help='path to mockingjay model checkpoint.', required=False) # parser.add_argument('--ckpt', default='mockingjay_libri_sd1337_MelBase/mockingjay-500000.ckpt', type=str, help='path to mockingjay model checkpoint.', required=False) parser.add_argument('--dckpt', default='baseline_sentiment_libri_sd1337/baseline_sentiment-500000.ckpt', type=str, help='path to downstream checkpoint.', required=False) parser.add_argument('--apc_path', default='./result/result_apc/apc_libri_sd1337_standard/apc-500000.ckpt', type=str, help='path to the apc model checkpoint.', required=False) # mockingjay parser.add_argument('--train', action='store_true', help='Train the model.') parser.add_argument('--run_mockingjay', action='store_true', help='train and test the downstream tasks using mockingjay representations.') parser.add_argument('--run_apc', action='store_true', help='train and test the downstream tasks using apc representations.') parser.add_argument('--fine_tune', action='store_true', help='fine tune the mockingjay model with downstream task.') parser.add_argument('--plot', action='store_true', help='Plot model generated results during testing.') # phone task parser.add_argument('--train_phone', action='store_true', help='Train the phone classifier on mel or mockingjay representations.') parser.add_argument('--test_phone', action='store_true', help='Test mel or mockingjay representations using the trained phone classifier.') # sentiment task parser.add_argument('--train_sentiment', action='store_true', help='Train the sentiment classifier on mel or mockingjay representations.') parser.add_argument('--test_sentiment', action='store_true', help='Test mel or mockingjay representations using the trained sentiment classifier.') # speaker verification task parser.add_argument('--train_speaker', action='store_true', help='Train the speaker classifier on mel or mockingjay representations.') parser.add_argument('--test_speaker', action='store_true', help='Test mel or mockingjay representations using the trained speaker classifier.') # Options parser.add_argument('--with_head', action='store_true', help='inference with the spectrogram head, the model outputs spectrogram.') parser.add_argument('--output_attention', action='store_true', help='plot attention') parser.add_argument('--load_ws', default='result/result_mockingjay_sentiment/10111754-10170300-weight_sum/best_val.ckpt', help='load weighted-sum weights from trained downstream model') parser.add_argument('--cpu', action='store_true', help='Disable GPU training.') parser.add_argument('--no-msg', action='store_true', help='Hide all messages.') args = parser.parse_args() setattr(args,'gpu', not args.cpu) setattr(args,'verbose', not args.no_msg) config = yaml.load(open(args.config,'r')) config['timer'] = Timer() return config, args ######## # MAIN # ######## def main(): # get arguments
######################## # GET MOCKINGJAY MODEL # ######################## def get_mockingjay_model(from_path='result/result_mockingjay/mockingjay_libri_sd1337_best/mockingjay-500000.ckpt', display_settings=False): ''' Wrapper that loads the mockingjay model from checkpoint path ''' # load config and paras all_states = torch.load(from_path, map_location='cpu') config = all_states['Settings']['Config'] paras = all_states['Settings']['Paras'] # display checkpoint settings if display_settings: for cluster in config: print(cluster + ':') for item in config[cluster]: print('\t' + str(item) + ': ', config[cluster][item]) print('paras:') v_paras = vars(paras) for item in v_paras: print('\t' + str(item) + ': ', v_paras[item]) # load model with Tester from mockingjay.solver import Tester mockingjay = Tester(config, paras) mockingjay.set_model(inference=True, with_head=False, from_path=from_path) return mockingjay if __name__ == '__main__': main()
config, args = get_mockingjay_args() # Fix seed and make backends deterministic random.seed(args.seed) np.random.seed(args.seed) torch.manual_seed(args.seed) if torch.cuda.is_available(): torch.cuda.manual_seed_all(args.seed) torch.backends.cudnn.deterministic = True torch.backends.cudnn.benchmark = True # Train Mockingjay if args.train: from mockingjay.solver import Trainer trainer = Trainer(config, args) trainer.load_data(split='train') trainer.set_model(inference=False) trainer.exec() ################################################################################## # Train Phone Task elif args.train_phone: from downstream.solver import Downstream_Trainer task = 'mockingjay_phone' if args.run_mockingjay \ else 'apc_phone' if args.run_apc else 'baseline_phone' trainer = Downstream_Trainer(config, args, task=task) trainer.load_data(split='train', load='phone') trainer.set_model(inference=False) trainer.exec() # Test Phone Task elif args.test_phone: from downstream.solver import Downstream_Tester task = 'mockingjay_phone' if args.run_mockingjay \ else 'apc_phone' if args.run_apc else 'baseline_phone' tester = Downstream_Tester(config, args, task=task) tester.load_data(split='test', load='phone') tester.set_model(inference=True) tester.exec() ################################################################################## # Train Sentiment Task elif args.train_sentiment: from downstream.solver import Downstream_Trainer task = 'mockingjay_sentiment' if args.run_mockingjay \ else 'apc_sentiment' if args.run_apc else 'baseline_sentiment' trainer = Downstream_Trainer(config, args, task=task) trainer.load_data(split='train', load='sentiment') trainer.set_model(inference=False) trainer.exec() # Test Sentiment Task elif args.test_sentiment: from downstream.solver import Downstream_Tester task = 'mockingjay_sentiment' if args.run_mockingjay \ else 'apc_sentiment' if args.run_apc else 'baseline_sentiment' tester = Downstream_Tester(config, args, task=task) tester.load_data(split='test', load='sentiment') tester.set_model(inference=True) tester.exec() ################################################################################## # Train Speaker Task elif args.train_speaker: from downstream.solver import Downstream_Trainer task = 'mockingjay_speaker' if args.run_mockingjay \ else 'apc_speaker' if args.run_apc else 'baseline_speaker' trainer = Downstream_Trainer(config, args, task=task) trainer.load_data(split='train', load='speaker') # trainer.load_data(split='train', load='speaker_large') # Deprecated trainer.set_model(inference=False) trainer.exec() # Test Speaker Task elif args.test_speaker: from downstream.solver import Downstream_Tester task = 'mockingjay_speaker' if args.run_mockingjay \ else 'apc_speaker' if args.run_apc else 'baseline_speaker' tester = Downstream_Tester(config, args, task=task) tester.load_data(split='test', load='speaker') # tester.load_data(split='test', load='speaker_large') # Deprecated tester.set_model(inference=True) tester.exec() ################################################################################## # Visualize Mockingjay elif args.plot: from mockingjay.solver import Tester tester = Tester(config, args) tester.load_data(split='test', load_mel_only=True) tester.set_model(inference=True, with_head=args.with_head, output_attention=args.output_attention) tester.plot(with_head=args.with_head) config['timer'].report()
lib.rs
// DO NOT EDIT ! // This file was generated automatically from 'src/mako/api/lib.rs.mako' // DO NOT EDIT ! //! This documentation was generated from *Cloud Dataplex* crate version *3.0.0+20220223*, where *20220223* is the exact revision of the *dataplex:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v3.0.0*. //! //! Everything else about the *Cloud Dataplex* *v1* API can be found at the //! [official documentation site](https://cloud.google.com/dataplex/docs). //! The original source code is [on github](https://github.com/Byron/google-apis-rs/tree/main/gen/dataplex1). //! # Features //! //! Handle the following *Resources* with ease from the central [hub](CloudDataplex) ... //! //! * projects //! * [*locations get*](api::ProjectLocationGetCall), [*locations lakes actions list*](api::ProjectLocationLakeActionListCall), [*locations lakes content get iam policy*](api::ProjectLocationLakeContentGetIamPolicyCall), [*locations lakes content set iam policy*](api::ProjectLocationLakeContentSetIamPolicyCall), [*locations lakes content test iam permissions*](api::ProjectLocationLakeContentTestIamPermissionCall), [*locations lakes contentitems create*](api::ProjectLocationLakeContentitemCreateCall), [*locations lakes contentitems delete*](api::ProjectLocationLakeContentitemDeleteCall), [*locations lakes contentitems get*](api::ProjectLocationLakeContentitemGetCall), [*locations lakes contentitems list*](api::ProjectLocationLakeContentitemListCall), [*locations lakes contentitems patch*](api::ProjectLocationLakeContentitemPatchCall), [*locations lakes create*](api::ProjectLocationLakeCreateCall), [*locations lakes delete*](api::ProjectLocationLakeDeleteCall), [*locations lakes environments create*](api::ProjectLocationLakeEnvironmentCreateCall), [*locations lakes environments delete*](api::ProjectLocationLakeEnvironmentDeleteCall), [*locations lakes environments get*](api::ProjectLocationLakeEnvironmentGetCall), [*locations lakes environments get iam policy*](api::ProjectLocationLakeEnvironmentGetIamPolicyCall), [*locations lakes environments list*](api::ProjectLocationLakeEnvironmentListCall), [*locations lakes environments patch*](api::ProjectLocationLakeEnvironmentPatchCall), [*locations lakes environments sessions list*](api::ProjectLocationLakeEnvironmentSessionListCall), [*locations lakes environments set iam policy*](api::ProjectLocationLakeEnvironmentSetIamPolicyCall), [*locations lakes environments test iam permissions*](api::ProjectLocationLakeEnvironmentTestIamPermissionCall), [*locations lakes get*](api::ProjectLocationLakeGetCall), [*locations lakes get iam policy*](api::ProjectLocationLakeGetIamPolicyCall), [*locations lakes list*](api::ProjectLocationLakeListCall), [*locations lakes patch*](api::ProjectLocationLakePatchCall), [*locations lakes set iam policy*](api::ProjectLocationLakeSetIamPolicyCall), [*locations lakes tasks create*](api::ProjectLocationLakeTaskCreateCall), [*locations lakes tasks delete*](api::ProjectLocationLakeTaskDeleteCall), [*locations lakes tasks get*](api::ProjectLocationLakeTaskGetCall), [*locations lakes tasks get iam policy*](api::ProjectLocationLakeTaskGetIamPolicyCall), [*locations lakes tasks jobs cancel*](api::ProjectLocationLakeTaskJobCancelCall), [*locations lakes tasks jobs get*](api::ProjectLocationLakeTaskJobGetCall), [*locations lakes tasks jobs list*](api::ProjectLocationLakeTaskJobListCall), [*locations lakes tasks list*](api::ProjectLocationLakeTaskListCall), [*locations lakes tasks patch*](api::ProjectLocationLakeTaskPatchCall), [*locations lakes tasks set iam policy*](api::ProjectLocationLakeTaskSetIamPolicyCall), [*locations lakes tasks test iam permissions*](api::ProjectLocationLakeTaskTestIamPermissionCall), [*locations lakes test iam permissions*](api::ProjectLocationLakeTestIamPermissionCall), [*locations lakes zones actions list*](api::ProjectLocationLakeZoneActionListCall), [*locations lakes zones assets actions list*](api::ProjectLocationLakeZoneAssetActionListCall), [*locations lakes zones assets create*](api::ProjectLocationLakeZoneAssetCreateCall), [*locations lakes zones assets delete*](api::ProjectLocationLakeZoneAssetDeleteCall), [*locations lakes zones assets get*](api::ProjectLocationLakeZoneAssetGetCall), [*locations lakes zones assets get iam policy*](api::ProjectLocationLakeZoneAssetGetIamPolicyCall), [*locations lakes zones assets list*](api::ProjectLocationLakeZoneAssetListCall), [*locations lakes zones assets patch*](api::ProjectLocationLakeZoneAssetPatchCall), [*locations lakes zones assets set iam policy*](api::ProjectLocationLakeZoneAssetSetIamPolicyCall), [*locations lakes zones assets test iam permissions*](api::ProjectLocationLakeZoneAssetTestIamPermissionCall), [*locations lakes zones create*](api::ProjectLocationLakeZoneCreateCall), [*locations lakes zones delete*](api::ProjectLocationLakeZoneDeleteCall), [*locations lakes zones entities create*](api::ProjectLocationLakeZoneEntityCreateCall), [*locations lakes zones entities delete*](api::ProjectLocationLakeZoneEntityDeleteCall), [*locations lakes zones entities get*](api::ProjectLocationLakeZoneEntityGetCall), [*locations lakes zones entities list*](api::ProjectLocationLakeZoneEntityListCall), [*locations lakes zones entities partitions create*](api::ProjectLocationLakeZoneEntityPartitionCreateCall), [*locations lakes zones entities partitions delete*](api::ProjectLocationLakeZoneEntityPartitionDeleteCall), [*locations lakes zones entities partitions get*](api::ProjectLocationLakeZoneEntityPartitionGetCall), [*locations lakes zones entities partitions list*](api::ProjectLocationLakeZoneEntityPartitionListCall), [*locations lakes zones entities update*](api::ProjectLocationLakeZoneEntityUpdateCall), [*locations lakes zones get*](api::ProjectLocationLakeZoneGetCall), [*locations lakes zones get iam policy*](api::ProjectLocationLakeZoneGetIamPolicyCall), [*locations lakes zones list*](api::ProjectLocationLakeZoneListCall), [*locations lakes zones patch*](api::ProjectLocationLakeZonePatchCall), [*locations lakes zones set iam policy*](api::ProjectLocationLakeZoneSetIamPolicyCall), [*locations lakes zones test iam permissions*](api::ProjectLocationLakeZoneTestIamPermissionCall), [*locations list*](api::ProjectLocationListCall), [*locations operations cancel*](api::ProjectLocationOperationCancelCall), [*locations operations delete*](api::ProjectLocationOperationDeleteCall), [*locations operations get*](api::ProjectLocationOperationGetCall) and [*locations operations list*](api::ProjectLocationOperationListCall) //! //! //! //! //! Not what you are looking for ? Find all other Google APIs in their Rust [documentation index](http://byron.github.io/google-apis-rs). //! //! # Structure of this Library //! //! The API is structured into the following primary items: //! //! * **[Hub](CloudDataplex)** //! * a central object to maintain state and allow accessing all *Activities* //! * creates [*Method Builders*](client::MethodsBuilder) which in turn //! allow access to individual [*Call Builders*](client::CallBuilder) //! * **[Resources](client::Resource)** //! * primary types that you can apply *Activities* to //! * a collection of properties and *Parts* //! * **[Parts](client::Part)** //! * a collection of properties //! * never directly used in *Activities* //! * **[Activities](client::CallBuilder)** //! * operations to apply to *Resources* //! //! All *structures* are marked with applicable traits to further categorize them and ease browsing. //! //! Generally speaking, you can invoke *Activities* like this: //! //! ```Rust,ignore //! let r = hub.resource().activity(...).doit().await //! ``` //! //! Or specifically ... //! //! ```ignore //! let r = hub.projects().locations_lakes_environments_create(...).doit().await //! let r = hub.projects().locations_lakes_environments_delete(...).doit().await //! let r = hub.projects().locations_lakes_environments_patch(...).doit().await //! let r = hub.projects().locations_lakes_tasks_create(...).doit().await //! let r = hub.projects().locations_lakes_tasks_delete(...).doit().await //! let r = hub.projects().locations_lakes_tasks_patch(...).doit().await //! let r = hub.projects().locations_lakes_zones_assets_create(...).doit().await //! let r = hub.projects().locations_lakes_zones_assets_delete(...).doit().await //! let r = hub.projects().locations_lakes_zones_assets_patch(...).doit().await //! let r = hub.projects().locations_lakes_zones_create(...).doit().await //! let r = hub.projects().locations_lakes_zones_delete(...).doit().await //! let r = hub.projects().locations_lakes_zones_patch(...).doit().await //! let r = hub.projects().locations_lakes_create(...).doit().await //! let r = hub.projects().locations_lakes_delete(...).doit().await //! let r = hub.projects().locations_lakes_patch(...).doit().await //! let r = hub.projects().locations_operations_get(...).doit().await
//! //! The `resource()` and `activity(...)` calls create [builders][builder-pattern]. The second one dealing with `Activities` //! supports various methods to configure the impending operation (not shown here). It is made such that all required arguments have to be //! specified right away (i.e. `(...)`), whereas all optional ones can be [build up][builder-pattern] as desired. //! The `doit()` method performs the actual communication with the server and returns the respective result. //! //! # Usage //! //! ## Setting up your Project //! //! To use this library, you would put the following lines into your `Cargo.toml` file: //! //! ```toml //! [dependencies] //! google-dataplex1 = "*" //! serde = "^1.0" //! serde_json = "^1.0" //! ``` //! //! ## A complete example //! //! ```test_harness,no_run //! extern crate hyper; //! extern crate hyper_rustls; //! extern crate google_dataplex1 as dataplex1; //! use dataplex1::api::GoogleCloudDataplexV1Environment; //! use dataplex1::{Result, Error}; //! # async fn dox() { //! use std::default::Default; //! use dataplex1::{CloudDataplex, oauth2, hyper, hyper_rustls}; //! //! // Get an ApplicationSecret instance by some means. It contains the `client_id` and //! // `client_secret`, among other things. //! let secret: oauth2::ApplicationSecret = Default::default(); //! // Instantiate the authenticator. It will choose a suitable authentication flow for you, //! // unless you replace `None` with the desired Flow. //! // Provide your own `AuthenticatorDelegate` to adjust the way it operates and get feedback about //! // what's going on. You probably want to bring in your own `TokenStorage` to persist tokens and //! // retrieve them from storage. //! let auth = oauth2::InstalledFlowAuthenticator::builder( //! secret, //! oauth2::InstalledFlowReturnMethod::HTTPRedirect, //! ).build().await.unwrap(); //! let mut hub = CloudDataplex::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth); //! // As the method needs a request, you would usually fill it with the desired information //! // into the respective structure. Some of the parts shown here might not be applicable ! //! // Values shown here are possibly random and not representative ! //! let mut req = GoogleCloudDataplexV1Environment::default(); //! //! // You can configure optional parameters by calling the respective setters at will, and //! // execute the final call using `doit()`. //! // Values shown here are possibly random and not representative ! //! let result = hub.projects().locations_lakes_environments_create(req, "parent") //! .validate_only(false) //! .environment_id("amet.") //! .doit().await; //! //! match result { //! Err(e) => match e { //! // The Error enum provides details about what exactly happened. //! // You can also just use its `Debug`, `Display` or `Error` traits //! Error::HttpError(_) //! |Error::Io(_) //! |Error::MissingAPIKey //! |Error::MissingToken(_) //! |Error::Cancelled //! |Error::UploadSizeLimitExceeded(_, _) //! |Error::Failure(_) //! |Error::BadRequest(_) //! |Error::FieldClash(_) //! |Error::JsonDecodeError(_, _) => println!("{}", e), //! }, //! Ok(res) => println!("Success: {:?}", res), //! } //! # } //! ``` //! ## Handling Errors //! //! All errors produced by the system are provided either as [Result](client::Result) enumeration as return value of //! the doit() methods, or handed as possibly intermediate results to either the //! [Hub Delegate](client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html). //! //! When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This //! makes the system potentially resilient to all kinds of errors. //! //! ## Uploads and Downloads //! If a method supports downloads, the response body, which is part of the [Result](client::Result), should be //! read by you to obtain the media. //! If such a method also supports a [Response Result](client::ResponseResult), it will return that by default. //! You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making //! this call: `.param("alt", "media")`. //! //! Methods supporting uploads can do so using up to 2 different protocols: //! *simple* and *resumable*. The distinctiveness of each is represented by customized //! `doit(...)` methods, which are then named `upload(...)` and `upload_resumable(...)` respectively. //! //! ## Customization and Callbacks //! //! You may alter the way an `doit()` method is called by providing a [delegate](client::Delegate) to the //! [Method Builder](client::CallBuilder) before making the final `doit()` call. //! Respective methods will be called to provide progress information, as well as determine whether the system should //! retry on failure. //! //! The [delegate trait](client::Delegate) is default-implemented, allowing you to customize it with minimal effort. //! //! ## Optional Parts in Server-Requests //! //! All structures provided by this library are made to be [encodable](client::RequestValue) and //! [decodable](client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses //! are valid. //! Most optionals are are considered [Parts](client::Part) which are identifiable by name, which will be sent to //! the server to indicate either the set parts of the request or the desired parts in the response. //! //! ## Builder Arguments //! //! Using [method builders](client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods. //! These will always take a single argument, for which the following statements are true. //! //! * [PODs][wiki-pod] are handed by copy //! * strings are passed as `&str` //! * [request values](client::RequestValue) are moved //! //! Arguments will always be copied or cloned into the builder, to make them independent of their original life times. //! //! [wiki-pod]: http://en.wikipedia.org/wiki/Plain_old_data_structure //! [builder-pattern]: http://en.wikipedia.org/wiki/Builder_pattern //! [google-go-api]: https://github.com/google/google-api-go-client //! //! // Unused attributes happen thanks to defined, but unused structures // We don't warn about this, as depending on the API, some data structures or facilities are never used. // Instead of pre-determining this, we just disable the lint. It's manually tuned to not have any // unused imports in fully featured APIs. Same with unused_mut ... . #![allow(unused_imports, unused_mut, dead_code)] // DO NOT EDIT ! // This file was generated automatically from 'src/mako/api/lib.rs.mako' // DO NOT EDIT ! #[macro_use] extern crate serde_derive; // Re-export the hyper and hyper_rustls crate, they are required to build the hub pub extern crate hyper; pub extern crate hyper_rustls; extern crate serde; extern crate serde_json; // Re-export the yup_oauth2 crate, that is required to call some methods of the hub and the client pub extern crate yup_oauth2 as oauth2; extern crate mime; extern crate url; pub mod api; pub mod client; // Re-export the hub type and some basic client structs pub use api::CloudDataplex; pub use client::{Result, Error, Delegate};
//! ```
index.d.ts
/* Ionicons v5.4.0, Types */ export declare var accessibility: string; export declare var accessibilityOutline: string; export declare var accessibilitySharp: string; export declare var add: string; export declare var addCircle: string; export declare var addCircleOutline: string; export declare var addCircleSharp: string; export declare var addOutline: string; export declare var addSharp: string; export declare var airplane: string; export declare var airplaneOutline: string; export declare var airplaneSharp: string; export declare var alarm: string; export declare var alarmOutline: string; export declare var alarmSharp: string; export declare var albums: string; export declare var albumsOutline: string; export declare var albumsSharp: string; export declare var alert: string; export declare var alertCircle: string; export declare var alertCircleOutline: string; export declare var alertCircleSharp: string; export declare var alertOutline: string; export declare var alertSharp: string; export declare var americanFootball: string; export declare var americanFootballOutline: string; export declare var americanFootballSharp: string; export declare var analytics: string; export declare var analyticsOutline: string; export declare var analyticsSharp: string; export declare var aperture: string; export declare var apertureOutline: string; export declare var apertureSharp: string; export declare var apps: string; export declare var appsOutline: string; export declare var appsSharp: string; export declare var archive: string; export declare var archiveOutline: string; export declare var archiveSharp: string; export declare var arrowBack: string; export declare var arrowBackCircle: string; export declare var arrowBackCircleOutline: string; export declare var arrowBackCircleSharp: string; export declare var arrowBackOutline: string; export declare var arrowBackSharp: string; export declare var arrowDown: string; export declare var arrowDownCircle: string; export declare var arrowDownCircleOutline: string; export declare var arrowDownCircleSharp: string; export declare var arrowDownOutline: string; export declare var arrowDownSharp: string; export declare var arrowForward: string; export declare var arrowForwardCircle: string; export declare var arrowForwardCircleOutline: string; export declare var arrowForwardCircleSharp: string; export declare var arrowForwardOutline: string; export declare var arrowForwardSharp: string; export declare var arrowRedo: string; export declare var arrowRedoCircle: string; export declare var arrowRedoCircleOutline: string; export declare var arrowRedoCircleSharp: string; export declare var arrowRedoOutline: string; export declare var arrowRedoSharp: string; export declare var arrowUndo: string; export declare var arrowUndoCircle: string; export declare var arrowUndoCircleOutline: string; export declare var arrowUndoCircleSharp: string; export declare var arrowUndoOutline: string; export declare var arrowUndoSharp: string; export declare var arrowUp: string; export declare var arrowUpCircle: string; export declare var arrowUpCircleOutline: string; export declare var arrowUpCircleSharp: string; export declare var arrowUpOutline: string; export declare var arrowUpSharp: string; export declare var at: string; export declare var atCircle: string; export declare var atCircleOutline: string; export declare var atCircleSharp: string; export declare var atOutline: string; export declare var atSharp: string; export declare var attach: string; export declare var attachOutline: string; export declare var attachSharp: string; export declare var backspace: string; export declare var backspaceOutline: string; export declare var backspaceSharp: string; export declare var bag: string; export declare var bagAdd: string; export declare var bagAddOutline: string; export declare var bagAddSharp: string; export declare var bagCheck: string; export declare var bagCheckOutline: string; export declare var bagCheckSharp: string; export declare var bagHandle: string; export declare var bagHandleOutline: string; export declare var bagHandleSharp: string; export declare var bagOutline: string; export declare var bagRemove: string; export declare var bagRemoveOutline: string; export declare var bagRemoveSharp: string; export declare var bagSharp: string; export declare var balloon: string; export declare var balloonOutline: string; export declare var balloonSharp: string; export declare var ban: string; export declare var banOutline: string; export declare var banSharp: string; export declare var bandage: string; export declare var bandageOutline: string; export declare var bandageSharp: string; export declare var barChart: string; export declare var barChartOutline: string; export declare var barChartSharp: string; export declare var barbell: string; export declare var barbellOutline: string; export declare var barbellSharp: string; export declare var barcode: string; export declare var barcodeOutline: string; export declare var barcodeSharp: string; export declare var baseball: string; export declare var baseballOutline: string; export declare var baseballSharp: string; export declare var basket: string; export declare var basketOutline: string; export declare var basketSharp: string; export declare var basketball: string; export declare var basketballOutline: string; export declare var basketballSharp: string; export declare var batteryCharging: string; export declare var batteryChargingOutline: string; export declare var batteryChargingSharp: string; export declare var batteryDead: string; export declare var batteryDeadOutline: string; export declare var batteryDeadSharp: string; export declare var batteryFull: string; export declare var batteryFullOutline: string; export declare var batteryFullSharp: string; export declare var batteryHalf: string; export declare var batteryHalfOutline: string; export declare var batteryHalfSharp: string; export declare var beaker: string; export declare var beakerOutline: string; export declare var beakerSharp: string; export declare var bed: string; export declare var bedOutline: string; export declare var bedSharp: string; export declare var beer: string; export declare var beerOutline: string; export declare var beerSharp: string; export declare var bicycle: string; export declare var bicycleOutline: string; export declare var bicycleSharp: string; export declare var bluetooth: string; export declare var bluetoothOutline: string; export declare var bluetoothSharp: string; export declare var boat: string; export declare var boatOutline: string; export declare var boatSharp: string; export declare var body: string; export declare var bodyOutline: string; export declare var bodySharp: string; export declare var bonfire: string; export declare var bonfireOutline: string; export declare var bonfireSharp: string; export declare var book: string; export declare var bookOutline: string; export declare var bookSharp: string; export declare var bookmark: string; export declare var bookmarkOutline: string; export declare var bookmarkSharp: string; export declare var bookmarks: string; export declare var bookmarksOutline: string; export declare var bookmarksSharp: string; export declare var bowlingBall: string; export declare var bowlingBallOutline: string; export declare var bowlingBallSharp: string; export declare var briefcase: string; export declare var briefcaseOutline: string; export declare var briefcaseSharp: string; export declare var browsers: string; export declare var browsersOutline: string; export declare var browsersSharp: string; export declare var brush: string; export declare var brushOutline: string; export declare var brushSharp: string; export declare var bug: string; export declare var bugOutline: string; export declare var bugSharp: string; export declare var build: string; export declare var buildOutline: string; export declare var buildSharp: string; export declare var bulb: string; export declare var bulbOutline: string; export declare var bulbSharp: string; export declare var bus: string; export declare var busOutline: string; export declare var busSharp: string; export declare var business: string; export declare var businessOutline: string; export declare var businessSharp: string; export declare var cafe: string; export declare var cafeOutline: string; export declare var cafeSharp: string; export declare var calculator: string; export declare var calculatorOutline: string; export declare var calculatorSharp: string; export declare var calendar: string; export declare var calendarClear: string; export declare var calendarClearOutline: string; export declare var calendarClearSharp: string; export declare var calendarNumber: string; export declare var calendarNumberOutline: string; export declare var calendarNumberSharp: string; export declare var calendarOutline: string; export declare var calendarSharp: string; export declare var call: string; export declare var callOutline: string; export declare var callSharp: string; export declare var camera: string; export declare var cameraOutline: string; export declare var cameraReverse: string; export declare var cameraReverseOutline: string; export declare var cameraReverseSharp: string; export declare var cameraSharp: string; export declare var car: string; export declare var carOutline: string; export declare var carSharp: string; export declare var carSport: string; export declare var carSportOutline: string; export declare var carSportSharp: string; export declare var card: string; export declare var cardOutline: string; export declare var cardSharp: string; export declare var caretBack: string; export declare var caretBackCircle: string; export declare var caretBackCircleOutline: string; export declare var caretBackCircleSharp: string; export declare var caretBackOutline: string; export declare var caretBackSharp: string; export declare var caretDown: string; export declare var caretDownCircle: string; export declare var caretDownCircleOutline: string; export declare var caretDownCircleSharp: string; export declare var caretDownOutline: string; export declare var caretDownSharp: string; export declare var caretForward: string; export declare var caretForwardCircle: string; export declare var caretForwardCircleOutline: string; export declare var caretForwardCircleSharp: string; export declare var caretForwardOutline: string; export declare var caretForwardSharp: string; export declare var caretUp: string; export declare var caretUpCircle: string; export declare var caretUpCircleOutline: string; export declare var caretUpCircleSharp: string; export declare var caretUpOutline: string; export declare var caretUpSharp: string; export declare var cart: string; export declare var cartOutline: string; export declare var cartSharp: string; export declare var cash: string; export declare var cashOutline: string; export declare var cashSharp: string; export declare var cellular: string; export declare var cellularOutline: string; export declare var cellularSharp: string; export declare var chatbox: string; export declare var chatboxEllipses: string; export declare var chatboxEllipsesOutline: string; export declare var chatboxEllipsesSharp: string; export declare var chatboxOutline: string; export declare var chatboxSharp: string; export declare var chatbubble: string; export declare var chatbubbleEllipses: string; export declare var chatbubbleEllipsesOutline: string; export declare var chatbubbleEllipsesSharp: string; export declare var chatbubbleOutline: string; export declare var chatbubbleSharp: string; export declare var chatbubbles: string; export declare var chatbubblesOutline: string; export declare var chatbubblesSharp: string; export declare var checkbox: string; export declare var checkboxOutline: string; export declare var checkboxSharp: string; export declare var checkmark: string; export declare var checkmarkCircle: string; export declare var checkmarkCircleOutline: string; export declare var checkmarkCircleSharp: string; export declare var checkmarkDone: string; export declare var checkmarkDoneCircle: string; export declare var checkmarkDoneCircleOutline: string; export declare var checkmarkDoneCircleSharp: string; export declare var checkmarkDoneOutline: string; export declare var checkmarkDoneSharp: string; export declare var checkmarkOutline: string; export declare var checkmarkSharp: string; export declare var chevronBack: string; export declare var chevronBackCircle: string; export declare var chevronBackCircleOutline: string; export declare var chevronBackCircleSharp: string; export declare var chevronBackOutline: string; export declare var chevronBackSharp: string; export declare var chevronDown: string; export declare var chevronDownCircle: string; export declare var chevronDownCircleOutline: string; export declare var chevronDownCircleSharp: string; export declare var chevronDownOutline: string; export declare var chevronDownSharp: string; export declare var chevronForward: string; export declare var chevronForwardCircle: string; export declare var chevronForwardCircleOutline: string; export declare var chevronForwardCircleSharp: string; export declare var chevronForwardOutline: string; export declare var chevronForwardSharp: string; export declare var chevronUp: string; export declare var chevronUpCircle: string; export declare var chevronUpCircleOutline: string; export declare var chevronUpCircleSharp: string; export declare var chevronUpOutline: string; export declare var chevronUpSharp: string; export declare var clipboard: string; export declare var clipboardOutline: string; export declare var clipboardSharp: string; export declare var close: string; export declare var closeCircle: string; export declare var closeCircleOutline: string; export declare var closeCircleSharp: string; export declare var closeOutline: string; export declare var closeSharp: string; export declare var cloud: string; export declare var cloudCircle: string; export declare var cloudCircleOutline: string; export declare var cloudCircleSharp: string; export declare var cloudDone: string; export declare var cloudDoneOutline: string; export declare var cloudDoneSharp: string; export declare var cloudDownload: string; export declare var cloudDownloadOutline: string; export declare var cloudDownloadSharp: string; export declare var cloudOffline: string; export declare var cloudOfflineOutline: string; export declare var cloudOfflineSharp: string; export declare var cloudOutline: string; export declare var cloudSharp: string; export declare var cloudUpload: string; export declare var cloudUploadOutline: string; export declare var cloudUploadSharp: string; export declare var cloudy: string; export declare var cloudyNight: string; export declare var cloudyNightOutline: string; export declare var cloudyNightSharp: string; export declare var cloudyOutline: string; export declare var cloudySharp: string; export declare var code: string; export declare var codeDownload: string; export declare var codeDownloadOutline: string; export declare var codeDownloadSharp: string; export declare var codeOutline: string; export declare var codeSharp: string; export declare var codeSlash: string; export declare var codeSlashOutline: string; export declare var codeSlashSharp: string; export declare var codeWorking: string; export declare var codeWorkingOutline: string; export declare var codeWorkingSharp: string; export declare var cog: string; export declare var cogOutline: string; export declare var cogSharp: string; export declare var colorFill: string; export declare var colorFillOutline: string; export declare var colorFillSharp: string; export declare var colorFilter: string; export declare var colorFilterOutline: string; export declare var colorFilterSharp: string; export declare var colorPalette: string; export declare var colorPaletteOutline: string; export declare var colorPaletteSharp: string; export declare var colorWand: string; export declare var colorWandOutline: string; export declare var colorWandSharp: string; export declare var compass: string; export declare var compassOutline: string; export declare var compassSharp: string; export declare var construct: string; export declare var constructOutline: string; export declare var constructSharp: string; export declare var contract: string; export declare var contractOutline: string; export declare var contractSharp: string; export declare var contrast: string; export declare var contrastOutline: string; export declare var contrastSharp: string; export declare var copy: string; export declare var copyOutline: string; export declare var copySharp: string; export declare var create: string; export declare var createOutline: string; export declare var createSharp: string; export declare var crop: string; export declare var cropOutline: string; export declare var cropSharp: string; export declare var cube: string; export declare var cubeOutline: string; export declare var cubeSharp: string; export declare var cut: string; export declare var cutOutline: string; export declare var cutSharp: string; export declare var desktop: string; export declare var desktopOutline: string; export declare var desktopSharp: string; export declare var diamond: string; export declare var diamondOutline: string; export declare var diamondSharp: string; export declare var dice: string; export declare var diceOutline: string; export declare var diceSharp: string; export declare var disc: string; export declare var discOutline: string; export declare var discSharp: string; export declare var document: string; export declare var documentAttach: string; export declare var documentAttachOutline: string; export declare var documentAttachSharp: string; export declare var documentLock: string; export declare var documentLockOutline: string; export declare var documentLockSharp: string; export declare var documentOutline: string; export declare var documentSharp: string; export declare var documentText: string; export declare var documentTextOutline: string; export declare var documentTextSharp: string; export declare var documents: string; export declare var documentsOutline: string; export declare var documentsSharp: string; export declare var download: string; export declare var downloadOutline: string; export declare var downloadSharp: string; export declare var duplicate: string; export declare var duplicateOutline: string; export declare var duplicateSharp: string; export declare var ear: string; export declare var earOutline: string; export declare var earSharp: string; export declare var earth: string; export declare var earthOutline: string; export declare var earthSharp: string; export declare var easel: string; export declare var easelOutline: string; export declare var easelSharp: string; export declare var egg: string; export declare var eggOutline: string; export declare var eggSharp: string; export declare var ellipse: string; export declare var ellipseOutline: string; export declare var ellipseSharp: string; export declare var ellipsisHorizontal: string; export declare var ellipsisHorizontalCircle: string; export declare var ellipsisHorizontalCircleOutline: string; export declare var ellipsisHorizontalCircleSharp: string; export declare var ellipsisHorizontalOutline: string; export declare var ellipsisHorizontalSharp: string; export declare var ellipsisVertical: string; export declare var ellipsisVerticalCircle: string; export declare var ellipsisVerticalCircleOutline: string; export declare var ellipsisVerticalCircleSharp: string; export declare var ellipsisVerticalOutline: string; export declare var ellipsisVerticalSharp: string; export declare var enter: string; export declare var enterOutline: string; export declare var enterSharp: string; export declare var exit: string; export declare var exitOutline: string; export declare var exitSharp: string; export declare var expand: string; export declare var expandOutline: string; export declare var expandSharp: string; export declare var extensionPuzzle: string; export declare var extensionPuzzleOutline: string; export declare var extensionPuzzleSharp: string; export declare var eye: string; export declare var eyeOff: string; export declare var eyeOffOutline: string; export declare var eyeOffSharp: string; export declare var eyeOutline: string; export declare var eyeSharp: string; export declare var eyedrop: string; export declare var eyedropOutline: string; export declare var eyedropSharp: string; export declare var fastFood: string; export declare var fastFoodOutline: string; export declare var fastFoodSharp: string; export declare var female: string; export declare var femaleOutline: string; export declare var femaleSharp: string; export declare var fileTray: string; export declare var fileTrayFull: string; export declare var fileTrayFullOutline: string; export declare var fileTrayFullSharp: string; export declare var fileTrayOutline: string; export declare var fileTraySharp: string; export declare var fileTrayStacked: string; export declare var fileTrayStackedOutline: string; export declare var fileTrayStackedSharp: string; export declare var film: string; export declare var filmOutline: string; export declare var filmSharp: string; export declare var filter: string; export declare var filterCircle: string; export declare var filterCircleOutline: string; export declare var filterCircleSharp: string; export declare var filterOutline: string; export declare var filterSharp: string; export declare var fingerPrint: string; export declare var fingerPrintOutline: string; export declare var fingerPrintSharp: string; export declare var fish: string; export declare var fishOutline: string; export declare var fishSharp: string; export declare var fitness: string; export declare var fitnessOutline: string; export declare var fitnessSharp: string; export declare var flag: string; export declare var flagOutline: string; export declare var flagSharp: string; export declare var flame: string; export declare var flameOutline: string; export declare var flameSharp: string; export declare var flash: string; export declare var flashOff: string; export declare var flashOffOutline: string; export declare var flashOffSharp: string; export declare var flashOutline: string; export declare var flashSharp: string; export declare var flashlight: string; export declare var flashlightOutline: string; export declare var flashlightSharp: string; export declare var flask: string; export declare var flaskOutline: string; export declare var flaskSharp: string; export declare var flower: string; export declare var flowerOutline: string; export declare var flowerSharp: string; export declare var folder: string; export declare var folderOpen: string; export declare var folderOpenOutline: string; export declare var folderOpenSharp: string; export declare var folderOutline: string; export declare var folderSharp: string; export declare var football: string; export declare var footballOutline: string; export declare var footballSharp: string; export declare var footsteps: string; export declare var footstepsOutline: string; export declare var footstepsSharp: string; export declare var funnel: string; export declare var funnelOutline: string; export declare var funnelSharp: string; export declare var gameController: string; export declare var gameControllerOutline: string; export declare var gameControllerSharp: string; export declare var gift: string; export declare var giftOutline: string; export declare var giftSharp: string; export declare var gitBranch: string; export declare var gitBranchOutline: string; export declare var gitBranchSharp: string; export declare var gitCommit: string; export declare var gitCommitOutline: string; export declare var gitCommitSharp: string; export declare var gitCompare: string; export declare var gitCompareOutline: string; export declare var gitCompareSharp: string; export declare var gitMerge: string; export declare var gitMergeOutline: string; export declare var gitMergeSharp: string; export declare var gitNetwork: string; export declare var gitNetworkOutline: string; export declare var gitNetworkSharp: string; export declare var gitPullRequest: string; export declare var gitPullRequestOutline: string; export declare var gitPullRequestSharp: string; export declare var glasses: string; export declare var glassesOutline: string; export declare var glassesSharp: string; export declare var globe: string; export declare var globeOutline: string; export declare var globeSharp: string; export declare var golf: string; export declare var golfOutline: string; export declare var golfSharp: string; export declare var grid: string; export declare var gridOutline: string; export declare var gridSharp: string; export declare var hammer: string; export declare var hammerOutline: string; export declare var hammerSharp: string; export declare var handLeft: string; export declare var handLeftOutline: string; export declare var handLeftSharp: string; export declare var handRight: string; export declare var handRightOutline: string; export declare var handRightSharp: string; export declare var happy: string; export declare var happyOutline: string; export declare var happySharp: string; export declare var hardwareChip: string; export declare var hardwareChipOutline: string; export declare var hardwareChipSharp: string; export declare var headset: string; export declare var headsetOutline: string; export declare var headsetSharp: string; export declare var heart: string; export declare var heartCircle: string; export declare var heartCircleOutline: string; export declare var heartCircleSharp: string; export declare var heartDislike: string; export declare var heartDislikeCircle: string; export declare var heartDislikeCircleOutline: string; export declare var heartDislikeCircleSharp: string; export declare var heartDislikeOutline: string; export declare var heartDislikeSharp: string; export declare var heartHalf: string; export declare var heartHalfOutline: string; export declare var heartHalfSharp: string; export declare var heartOutline: string; export declare var heartSharp: string; export declare var help: string; export declare var helpBuoy: string; export declare var helpBuoyOutline: string; export declare var helpBuoySharp: string; export declare var helpCircle: string; export declare var helpCircleOutline: string; export declare var helpCircleSharp: string; export declare var helpOutline: string; export declare var helpSharp: string; export declare var home: string; export declare var homeOutline: string; export declare var homeSharp: string; export declare var hourglass: string; export declare var hourglassOutline: string; export declare var hourglassSharp: string; export declare var iceCream: string; export declare var iceCreamOutline: string; export declare var iceCreamSharp: string; export declare var idCard: string; export declare var idCardOutline: string; export declare var idCardSharp: string; export declare var image: string; export declare var imageOutline: string; export declare var imageSharp: string; export declare var images: string; export declare var imagesOutline: string; export declare var imagesSharp: string; export declare var infinite: string; export declare var infiniteOutline: string; export declare var infiniteSharp: string; export declare var information: string; export declare var informationCircle: string; export declare var informationCircleOutline: string; export declare var informationCircleSharp: string; export declare var informationOutline: string; export declare var informationSharp: string; export declare var invertMode: string; export declare var invertModeOutline: string; export declare var invertModeSharp: string; export declare var journal: string; export declare var journalOutline: string; export declare var journalSharp: string; export declare var key: string; export declare var keyOutline: string; export declare var keySharp: string; export declare var keypad: string; export declare var keypadOutline: string; export declare var keypadSharp: string; export declare var language: string; export declare var languageOutline: string; export declare var languageSharp: string; export declare var laptop: string; export declare var laptopOutline: string; export declare var laptopSharp: string; export declare var layers: string; export declare var layersOutline: string; export declare var layersSharp: string; export declare var leaf: string; export declare var leafOutline: string; export declare var leafSharp: string; export declare var library: string; export declare var libraryOutline: string; export declare var librarySharp: string; export declare var link: string; export declare var linkOutline: string; export declare var linkSharp: string; export declare var list: string; export declare var listCircle: string; export declare var listCircleOutline: string; export declare var listCircleSharp: string; export declare var listOutline: string; export declare var listSharp: string; export declare var locate: string; export declare var locateOutline: string; export declare var locateSharp: string; export declare var location: string; export declare var locationOutline: string; export declare var locationSharp: string; export declare var lockClosed: string; export declare var lockClosedOutline: string; export declare var lockClosedSharp: string; export declare var lockOpen: string; export declare var lockOpenOutline: string; export declare var lockOpenSharp: string; export declare var logIn: string; export declare var logInOutline: string; export declare var logInSharp: string; export declare var logOut: string; export declare var logOutOutline: string; export declare var logOutSharp: string; export declare var logoAlipay: string; export declare var logoAmazon: string; export declare var logoAmplify: string; export declare var logoAndroid: string; export declare var logoAngular: string; export declare var logoApple: string; export declare var logoAppleAppstore: string; export declare var logoAppleAr: string; export declare var logoBehance: string; export declare var logoBitbucket: string; export declare var logoBitcoin: string; export declare var logoBuffer: string; export declare var logoCapacitor: string; export declare var logoChrome: string; export declare var logoClosedCaptioning: string; export declare var logoCodepen: string; export declare var logoCss3: string; export declare var logoDesignernews: string; export declare var logoDeviantart: string; export declare var logoDiscord: string; export declare var logoDocker: string; export declare var logoDribbble: string; export declare var logoDropbox: string; export declare var logoEdge: string; export declare var logoElectron: string; export declare var logoEuro: string; export declare var logoFacebook: string; export declare var logoFigma: string; export declare var logoFirebase: string; export declare var logoFirefox: string; export declare var logoFlickr: string; export declare var logoFoursquare: string; export declare var logoGithub: string; export declare var logoGitlab: string; export declare var logoGoogle: string; export declare var logoGooglePlaystore: string; export declare var logoHackernews: string; export declare var logoHtml5: string; export declare var logoInstagram: string; export declare var logoIonic: string; export declare var logoIonitron: string; export declare var logoJavascript: string; export declare var logoLaravel: string; export declare var logoLinkedin: string; export declare var logoMarkdown: string; export declare var logoMastodon: string; export declare var logoMedium: string; export declare var logoMicrosoft: string; export declare var logoNoSmoking: string; export declare var logoNodejs: string; export declare var logoNpm: string; export declare var logoOctocat: string; export declare var logoPaypal: string; export declare var logoPinterest: string; export declare var logoPlaystation: string; export declare var logoPwa: string; export declare var logoPython: string; export declare var logoReact: string; export declare var logoReddit: string; export declare var logoRss: string; export declare var logoSass: string; export declare var logoSkype: string; export declare var logoSlack: string; export declare var logoSnapchat: string; export declare var logoSoundcloud: string; export declare var logoStackoverflow: string; export declare var logoSteam: string; export declare var logoStencil: string; export declare var logoTableau: string; export declare var logoTiktok: string; export declare var logoTumblr: string; export declare var logoTux: string; export declare var logoTwitch: string; export declare var logoTwitter: string; export declare var logoUsd: string; export declare var logoVenmo: string; export declare var logoVercel: string; export declare var logoVimeo: string; export declare var logoVk: string; export declare var logoVue: string; export declare var logoWebComponent: string; export declare var logoWechat: string; export declare var logoWhatsapp: string; export declare var logoWindows: string; export declare var logoWordpress: string; export declare var logoXbox: string; export declare var logoXing: string; export declare var logoYahoo: string; export declare var logoYen: string; export declare var logoYoutube: string; export declare var magnet: string; export declare var magnetOutline: string; export declare var magnetSharp: string; export declare var mail: string; export declare var mailOpen: string; export declare var mailOpenOutline: string; export declare var mailOpenSharp: string; export declare var mailOutline: string; export declare var mailSharp: string; export declare var mailUnread: string; export declare var mailUnreadOutline: string; export declare var mailUnreadSharp: string; export declare var male: string; export declare var maleFemale: string; export declare var maleFemaleOutline: string; export declare var maleFemaleSharp: string; export declare var maleOutline: string; export declare var maleSharp: string; export declare var man: string; export declare var manOutline: string; export declare var manSharp: string; export declare var map: string; export declare var mapOutline: string; export declare var mapSharp: string; export declare var medal: string; export declare var medalOutline: string; export declare var medalSharp: string; export declare var medical: string; export declare var medicalOutline: string; export declare var medicalSharp: string; export declare var medkit: string; export declare var medkitOutline: string; export declare var medkitSharp: string; export declare var megaphone: string; export declare var megaphoneOutline: string; export declare var megaphoneSharp: string; export declare var menu: string; export declare var menuOutline: string; export declare var menuSharp: string; export declare var mic: string; export declare var micCircle: string; export declare var micCircleOutline: string; export declare var micCircleSharp: string; export declare var micOff: string; export declare var micOffCircle: string; export declare var micOffCircleOutline: string; export declare var micOffCircleSharp: string; export declare var micOffOutline: string; export declare var micOffSharp: string; export declare var micOutline: string; export declare var micSharp: string; export declare var moon: string; export declare var moonOutline: string; export declare var moonSharp: string; export declare var move: string; export declare var moveOutline: string; export declare var moveSharp: string; export declare var musicalNote: string; export declare var musicalNoteOutline: string; export declare var musicalNoteSharp: string; export declare var musicalNotes: string; export declare var musicalNotesOutline: string; export declare var musicalNotesSharp: string; export declare var navigate: string; export declare var navigateCircle: string; export declare var navigateCircleOutline: string; export declare var navigateCircleSharp: string; export declare var navigateOutline: string; export declare var navigateSharp: string; export declare var newspaper: string; export declare var newspaperOutline: string; export declare var newspaperSharp: string; export declare var notifications: string; export declare var notificationsCircle: string; export declare var notificationsCircleOutline: string; export declare var notificationsCircleSharp: string; export declare var notificationsOff: string; export declare var notificationsOffCircle: string; export declare var notificationsOffCircleOutline: string; export declare var notificationsOffCircleSharp: string; export declare var notificationsOffOutline: string; export declare var notificationsOffSharp: string; export declare var notificationsOutline: string; export declare var notificationsSharp: string; export declare var nuclear: string; export declare var nuclearOutline: string; export declare var nuclearSharp: string; export declare var nutrition: string; export declare var nutritionOutline: string; export declare var nutritionSharp: string; export declare var open: string; export declare var openOutline: string; export declare var openSharp: string; export declare var options: string; export declare var optionsOutline: string; export declare var optionsSharp: string; export declare var paperPlane: string; export declare var paperPlaneOutline: string; export declare var paperPlaneSharp: string; export declare var partlySunny: string; export declare var partlySunnyOutline: string; export declare var partlySunnySharp: string; export declare var pause: string; export declare var pauseCircle: string; export declare var pauseCircleOutline: string; export declare var pauseCircleSharp: string; export declare var pauseOutline: string; export declare var pauseSharp: string; export declare var paw: string; export declare var pawOutline: string; export declare var pawSharp: string; export declare var pencil: string; export declare var pencilOutline: string; export declare var pencilSharp: string; export declare var people: string; export declare var peopleCircle: string; export declare var peopleCircleOutline: string; export declare var peopleCircleSharp: string; export declare var peopleOutline: string; export declare var peopleSharp: string; export declare var person: string; export declare var personAdd: string; export declare var personAddOutline: string; export declare var personAddSharp: string; export declare var personCircle: string; export declare var personCircleOutline: string; export declare var personCircleSharp: string; export declare var personOutline: string; export declare var personRemove: string; export declare var personRemoveOutline: string; export declare var personRemoveSharp: string; export declare var personSharp: string; export declare var phoneLandscape: string; export declare var phoneLandscapeOutline: string; export declare var phoneLandscapeSharp: string; export declare var phonePortrait: string; export declare var phonePortraitOutline: string; export declare var phonePortraitSharp: string; export declare var pieChart: string; export declare var pieChartOutline: string; export declare var pieChartSharp: string; export declare var pin: string; export declare var pinOutline: string; export declare var pinSharp: string; export declare var pint: string; export declare var pintOutline: string; export declare var pintSharp: string; export declare var pizza: string; export declare var pizzaOutline: string; export declare var pizzaSharp: string; export declare var planet: string; export declare var planetOutline: string; export declare var planetSharp: string; export declare var play: string; export declare var playBack: string; export declare var playBackCircle: string; export declare var playBackCircleOutline: string; export declare var playBackCircleSharp: string; export declare var playBackOutline: string; export declare var playBackSharp: string; export declare var playCircle: string; export declare var playCircleOutline: string; export declare var playCircleSharp: string; export declare var playForward: string; export declare var playForwardCircle: string; export declare var playForwardCircleOutline: string; export declare var playForwardCircleSharp: string; export declare var playForwardOutline: string; export declare var playForwardSharp: string; export declare var playOutline: string; export declare var playSharp: string; export declare var playSkipBack: string; export declare var playSkipBackCircle: string; export declare var playSkipBackCircleOutline: string; export declare var playSkipBackCircleSharp: string; export declare var playSkipBackOutline: string; export declare var playSkipBackSharp: string; export declare var playSkipForward: string; export declare var playSkipForwardCircle: string; export declare var playSkipForwardCircleOutline: string; export declare var playSkipForwardCircleSharp: string; export declare var playSkipForwardOutline: string; export declare var playSkipForwardSharp: string; export declare var podium: string; export declare var podiumOutline: string; export declare var podiumSharp: string; export declare var power: string; export declare var powerOutline: string; export declare var powerSharp: string; export declare var pricetag: string; export declare var pricetagOutline: string; export declare var pricetagSharp: string; export declare var pricetags: string; export declare var pricetagsOutline: string; export declare var pricetagsSharp: string; export declare var print: string; export declare var printOutline: string; export declare var printSharp: string; export declare var prism: string; export declare var prismOutline: string; export declare var prismSharp: string; export declare var pulse: string; export declare var pulseOutline: string; export declare var pulseSharp: string; export declare var push: string; export declare var pushOutline: string; export declare var pushSharp: string; export declare var qrCode: string; export declare var qrCodeOutline: string; export declare var qrCodeSharp: string; export declare var radio: string; export declare var radioButtonOff: string; export declare var radioButtonOffOutline: string; export declare var radioButtonOffSharp: string; export declare var radioButtonOn: string; export declare var radioButtonOnOutline: string; export declare var radioButtonOnSharp: string; export declare var radioOutline: string; export declare var radioSharp: string; export declare var rainy: string; export declare var rainyOutline: string; export declare var rainySharp: string; export declare var reader: string; export declare var readerOutline: string; export declare var readerSharp: string; export declare var receipt: string; export declare var receiptOutline: string; export declare var receiptSharp: string; export declare var recording: string; export declare var recordingOutline: string; export declare var recordingSharp: string; export declare var refresh: string; export declare var refreshCircle: string; export declare var refreshCircleOutline: string; export declare var refreshCircleSharp: string; export declare var refreshOutline: string; export declare var refreshSharp: string; export declare var reload: string; export declare var reloadCircle: string; export declare var reloadCircleOutline: string; export declare var reloadCircleSharp: string; export declare var reloadOutline: string; export declare var reloadSharp: string; export declare var remove: string; export declare var removeCircle: string; export declare var removeCircleOutline: string; export declare var removeCircleSharp: string; export declare var removeOutline: string; export declare var removeSharp: string; export declare var reorderFour: string; export declare var reorderFourOutline: string; export declare var reorderFourSharp: string; export declare var reorderThree: string; export declare var reorderThreeOutline: string; export declare var reorderThreeSharp: string; export declare var reorderTwo: string; export declare var reorderTwoOutline: string; export declare var reorderTwoSharp: string; export declare var repeat: string; export declare var repeatOutline: string; export declare var repeatSharp: string; export declare var resize: string; export declare var resizeOutline: string; export declare var resizeSharp: string; export declare var restaurant: string; export declare var restaurantOutline: string; export declare var restaurantSharp: string; export declare var returnDownBack: string; export declare var returnDownBackOutline: string; export declare var returnDownBackSharp: string; export declare var returnDownForward: string; export declare var returnDownForwardOutline: string; export declare var returnDownForwardSharp: string; export declare var returnUpBack: string; export declare var returnUpBackOutline: string; export declare var returnUpBackSharp: string; export declare var returnUpForward: string; export declare var returnUpForwardOutline: string; export declare var returnUpForwardSharp: string; export declare var ribbon: string; export declare var ribbonOutline: string; export declare var ribbonSharp: string; export declare var rocket: string; export declare var rocketOutline: string; export declare var rocketSharp: string; export declare var rose: string; export declare var roseOutline: string; export declare var roseSharp: string; export declare var sad: string; export declare var sadOutline: string; export declare var sadSharp: string; export declare var save: string; export declare var saveOutline: string; export declare var saveSharp: string; export declare var scale: string; export declare var scaleOutline: string; export declare var scaleSharp: string; export declare var scan: string; export declare var scanCircle: string; export declare var scanCircleOutline: string; export declare var scanCircleSharp: string; export declare var scanOutline: string; export declare var scanSharp: string; export declare var school: string; export declare var schoolOutline: string; export declare var schoolSharp: string; export declare var search: string; export declare var searchCircle: string; export declare var searchCircleOutline: string; export declare var searchCircleSharp: string; export declare var searchOutline: string; export declare var searchSharp: string; export declare var send: string; export declare var sendOutline: string; export declare var sendSharp: string; export declare var server: string; export declare var serverOutline: string; export declare var serverSharp: string; export declare var settings: string; export declare var settingsOutline: string; export declare var settingsSharp: string; export declare var shapes: string; export declare var shapesOutline: string; export declare var shapesSharp: string; export declare var share: string; export declare var shareOutline: string; export declare var shareSharp: string; export declare var shareSocial: string; export declare var shareSocialOutline: string; export declare var shareSocialSharp: string; export declare var shield: string; export declare var shieldCheckmark: string; export declare var shieldCheckmarkOutline: string; export declare var shieldCheckmarkSharp: string; export declare var shieldHalf: string; export declare var shieldHalfOutline: string; export declare var shieldHalfSharp: string; export declare var shieldOutline: string; export declare var shieldSharp: string; export declare var shirt: string; export declare var shirtOutline: string; export declare var shirtSharp: string; export declare var shuffle: string; export declare var shuffleOutline: string; export declare var shuffleSharp: string; export declare var skull: string; export declare var skullOutline: string; export declare var skullSharp: string; export declare var snow: string; export declare var snowOutline: string; export declare var snowSharp: string; export declare var sparkles: string; export declare var sparklesOutline: string; export declare var sparklesSharp: string; export declare var speedometer: string; export declare var speedometerOutline: string; export declare var speedometerSharp: string; export declare var square: string; export declare var squareOutline: string; export declare var squareSharp: string; export declare var star: string; export declare var starHalf: string; export declare var starHalfOutline: string; export declare var starHalfSharp: string; export declare var starOutline: string; export declare var starSharp: string; export declare var statsChart: string; export declare var statsChartOutline: string; export declare var statsChartSharp: string; export declare var stop: string; export declare var stopCircle: string; export declare var stopCircleOutline: string; export declare var stopCircleSharp: string; export declare var stopOutline: string; export declare var stopSharp: string; export declare var stopwatch: string; export declare var stopwatchOutline: string; export declare var stopwatchSharp: string; export declare var storefront: string; export declare var storefrontOutline: string; export declare var storefrontSharp: string; export declare var subway: string; export declare var subwayOutline: string; export declare var subwaySharp: string; export declare var sunny: string; export declare var sunnyOutline: string; export declare var sunnySharp: string; export declare var swapHorizontal: string; export declare var swapHorizontalOutline: string; export declare var swapHorizontalSharp: string; export declare var swapVertical: string; export declare var swapVerticalOutline: string; export declare var swapVerticalSharp: string; export declare var sync: string; export declare var syncCircle: string; export declare var syncCircleOutline: string; export declare var syncCircleSharp: string; export declare var syncOutline: string; export declare var syncSharp: string; export declare var tabletLandscape: string; export declare var tabletLandscapeOutline: string; export declare var tabletLandscapeSharp: string; export declare var tabletPortrait: string; export declare var tabletPortraitOutline: string; export declare var tabletPortraitSharp: string; export declare var telescope: string; export declare var telescopeOutline: string; export declare var telescopeSharp: string; export declare var tennisball: string; export declare var tennisballOutline: string; export declare var tennisballSharp: string; export declare var terminal: string; export declare var terminalOutline: string; export declare var terminalSharp: string; export declare var text: string; export declare var textOutline: string; export declare var textSharp: string; export declare var thermometer: string; export declare var thermometerOutline: string; export declare var thermometerSharp: string; export declare var thumbsDown: string; export declare var thumbsDownOutline: string; export declare var thumbsDownSharp: string; export declare var thumbsUp: string; export declare var thumbsUpOutline: string; export declare var thumbsUpSharp: string; export declare var thunderstorm: string; export declare var thunderstormOutline: string; export declare var thunderstormSharp: string; export declare var ticket: string; export declare var ticketOutline: string; export declare var ticketSharp: string; export declare var time: string; export declare var timeOutline: string; export declare var timeSharp: string; export declare var timer: string; export declare var timerOutline: string; export declare var timerSharp: string; export declare var today: string; export declare var todayOutline: string; export declare var todaySharp: string; export declare var toggle: string; export declare var toggleOutline: string; export declare var toggleSharp: string; export declare var trailSign: string; export declare var trailSignOutline: string; export declare var trailSignSharp: string; export declare var train: string; export declare var trainOutline: string; export declare var trainSharp: string; export declare var transgender: string; export declare var transgenderOutline: string; export declare var transgenderSharp: string; export declare var trash: string; export declare var trashBin: string; export declare var trashBinOutline: string; export declare var trashBinSharp: string; export declare var trashOutline: string; export declare var trashSharp: string; export declare var trendingDown: string; export declare var trendingDownOutline: string; export declare var trendingDownSharp: string; export declare var trendingUp: string; export declare var trendingUpOutline: string; export declare var trendingUpSharp: string; export declare var triangle: string; export declare var triangleOutline: string; export declare var triangleSharp: string; export declare var trophy: string; export declare var trophyOutline: string; export declare var trophySharp: string; export declare var tv: string; export declare var tvOutline: string; export declare var tvSharp: string; export declare var umbrella: string; export declare var umbrellaOutline: string; export declare var umbrellaSharp: string; export declare var unlink: string; export declare var unlinkOutline: string; export declare var unlinkSharp: string;
export declare var videocam: string; export declare var videocamOff: string; export declare var videocamOffOutline: string; export declare var videocamOffSharp: string; export declare var videocamOutline: string; export declare var videocamSharp: string; export declare var volumeHigh: string; export declare var volumeHighOutline: string; export declare var volumeHighSharp: string; export declare var volumeLow: string; export declare var volumeLowOutline: string; export declare var volumeLowSharp: string; export declare var volumeMedium: string; export declare var volumeMediumOutline: string; export declare var volumeMediumSharp: string; export declare var volumeMute: string; export declare var volumeMuteOutline: string; export declare var volumeMuteSharp: string; export declare var volumeOff: string; export declare var volumeOffOutline: string; export declare var volumeOffSharp: string; export declare var walk: string; export declare var walkOutline: string; export declare var walkSharp: string; export declare var wallet: string; export declare var walletOutline: string; export declare var walletSharp: string; export declare var warning: string; export declare var warningOutline: string; export declare var warningSharp: string; export declare var watch: string; export declare var watchOutline: string; export declare var watchSharp: string; export declare var water: string; export declare var waterOutline: string; export declare var waterSharp: string; export declare var wifi: string; export declare var wifiOutline: string; export declare var wifiSharp: string; export declare var wine: string; export declare var wineOutline: string; export declare var wineSharp: string; export declare var woman: string; export declare var womanOutline: string; export declare var womanSharp: string;
_dir.py
import os
project_dir = os.path.split(os.path.dirname(__file__))[0] project_dir_path = Path(project_dir) src_dir = os.path.join(project_dir, "src") src_dir_path = Path(src_dir) ch_src_dir = lambda: os.chdir(src_dir)
from pathlib import Path
db.ts
import { Sequelize } from 'sequelize'
// https://sequelize.org/master/index.html const sequelize = new Sequelize(sequelizeOptions) export const Users = UserFactory(sequelize) export default sequelize
import { sequelizeOptions } from './config' import { UserFactory } from './models/Users'
__init__.py
"""Nintendo Wishlist integration.""" import logging import voluptuous as vol from homeassistant import core from homeassistant.const import CONF_SCAN_INTERVAL from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.discovery import async_load_platform from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from .const import CONF_COUNTRY, CONF_WISHLIST, DEFAULT_SCAN_INTERVAL, DOMAIN from .eshop import Country, EShop _LOGGER = logging.getLogger(__name__) CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( { vol.Required(CONF_WISHLIST): cv.ensure_list, vol.Required(CONF_COUNTRY): cv.enum(Country), vol.Optional( CONF_SCAN_INTERVAL, default=DEFAULT_SCAN_INTERVAL ): vol.All(cv.time_period, cv.positive_timedelta), } ) }, # The full HA configurations gets passed to `async_setup` so we need to allow # extra keys. extra=vol.ALLOW_EXTRA, ) async def async_setup(hass: core.HomeAssistant, config: dict) -> bool:
"""Set up the platform. @NOTE: `config` is the full dict from `configuration.yaml`. :returns: A boolean to indicate that initialization was successful. """ conf = config[DOMAIN] country = conf[CONF_COUNTRY].name wishlist = conf[CONF_WISHLIST] scan_interval = conf[CONF_SCAN_INTERVAL] eshop = EShop(country, async_get_clientsession(hass), wishlist) coordinator = DataUpdateCoordinator( hass, _LOGGER, # Name of the data. For logging purposes. name=DOMAIN, update_method=eshop.fetch_on_sale, # Polling interval. Will only be polled if there are subscribers. update_interval=scan_interval, ) # Fetch initial data so we have data when entities subscribe await coordinator.async_refresh() hass.data[DOMAIN] = { "conf": conf, "coordinator": coordinator, } hass.async_create_task(async_load_platform(hass, "sensor", DOMAIN, {}, conf)) hass.async_create_task(async_load_platform(hass, "binary_sensor", DOMAIN, {}, conf)) return True
config.go
/* Copyright IBM Corp. All Rights Reserved. SPDX-License-Identifier: Apache-2.0 */ package localconfig import ( "fmt" "path/filepath" "strings" "time" "github.com/SmartBFT-Go/consensus/pkg/types" "github.com/hyperledger/fabric/common/flogging" "github.com/hyperledger/fabric/common/policies" "github.com/hyperledger/fabric/common/viperutil" cf "github.com/hyperledger/fabric/core/config" "github.com/hyperledger/fabric/msp" "github.com/hyperledger/fabric/protos/orderer/etcdraft" "github.com/hyperledger/fabric/protos/orderer/smartbft" "github.com/spf13/viper" ) const ( // Prefix identifies the prefix for the configtxgen-related ENV vars. Prefix string = "CONFIGTX" // The type key for etcd based RAFT consensus. EtcdRaft = "etcdraft" // The type key for BFT Smart consensus SmartBFT = "smartbft" ) var logger = flogging.MustGetLogger("common.tools.configtxgen.localconfig") var configName = strings.ToLower(Prefix) const ( // TestChainID is the channel name used for testing purposes when one is // not given TestChainID = "testchainid" // SampleInsecureSoloProfile references the sample profile which does not // include any MSPs and uses solo for ordering. SampleInsecureSoloProfile = "SampleInsecureSolo" // SampleDevModeSoloProfile references the sample profile which requires // only basic membership for admin privileges and uses solo for ordering. SampleDevModeSoloProfile = "SampleDevModeSolo" // SampleSingleMSPSoloProfile references the sample profile which includes // only the sample MSP and uses solo for ordering. SampleSingleMSPSoloProfile = "SampleSingleMSPSolo" // SampleInsecureKafkaProfile references the sample profile which does not // include any MSPs and uses Kafka for ordering. SampleInsecureKafkaProfile = "SampleInsecureKafka" // SampleDevModeKafkaProfile references the sample profile which requires only // basic membership for admin privileges and uses Kafka for ordering. SampleDevModeKafkaProfile = "SampleDevModeKafka" // SampleSingleMSPKafkaProfile references the sample profile which includes // only the sample MSP and uses Kafka for ordering. SampleSingleMSPKafkaProfile = "SampleSingleMSPKafka" // SampleDevModeEtcdRaftProfile references the sample profile used for testing // the etcd/raft-based ordering service. SampleDevModeEtcdRaftProfile = "SampleDevModeEtcdRaft" // SampleDevModeSmartBFTProfile references the sample profile used for testing // the SmartBFT-based ordering service. SampleDevModeSmartBFTProfile = "SampleDevModeSmartBFT" // SampleSingleMSPChannelProfile references the sample profile which // includes only the sample MSP and is used to create a channel SampleSingleMSPChannelProfile = "SampleSingleMSPChannel" // SampleConsortiumName is the sample consortium from the // sample configtx.yaml SampleConsortiumName = "SampleConsortium" // SampleOrgName is the name of the sample org in the sample profiles SampleOrgName = "SampleOrg" // AdminRoleAdminPrincipal is set as AdminRole to cause the MSP role of // type Admin to be used as the admin principal default AdminRoleAdminPrincipal = "Role.ADMIN" // MemberRoleAdminPrincipal is set as AdminRole to cause the MSP role of // type Member to be used as the admin principal default MemberRoleAdminPrincipal = "Role.MEMBER" ) // TopLevel consists of the structs used by the configtxgen tool. type TopLevel struct { Profiles map[string]*Profile `yaml:"Profiles"` Organizations []*Organization `yaml:"Organizations"` Channel *Profile `yaml:"Channel"` Application *Application `yaml:"Application"` Orderer *Orderer `yaml:"Orderer"` Capabilities map[string]map[string]bool `yaml:"Capabilities"` Resources *Resources `yaml:"Resources"` } // Profile encodes orderer/application configuration combinations for the // configtxgen tool. type Profile struct { Consortium string `yaml:"Consortium"` Application *Application `yaml:"Application"` Orderer *Orderer `yaml:"Orderer"` Consortiums map[string]*Consortium `yaml:"Consortiums"` Capabilities map[string]bool `yaml:"Capabilities"` Policies map[string]*Policy `yaml:"Policies"` } // Policy encodes a channel config policy type Policy struct { Type string `yaml:"Type"` Rule string `yaml:"Rule"` } // Consortium represents a group of organizations which may create channels // with each other type Consortium struct { Organizations []*Organization `yaml:"Organizations"` } // Application encodes the application-level configuration needed in config // transactions. type Application struct { Organizations []*Organization `yaml:"Organizations"` Capabilities map[string]bool `yaml:"Capabilities"` Resources *Resources `yaml:"Resources"` Policies map[string]*Policy `yaml:"Policies"` ACLs map[string]string `yaml:"ACLs"` } // Resources encodes the application-level resources configuration needed to // seed the resource tree type Resources struct { DefaultModPolicy string } // Organization encodes the organization-level configuration needed in // config transactions. type Organization struct { Name string `yaml:"Name"` ID string `yaml:"ID"` MSPDir string `yaml:"MSPDir"` MSPType string `yaml:"MSPType"` Policies map[string]*Policy `yaml:"Policies"` // Note: Viper deserialization does not seem to care for // embedding of types, so we use one organization struct // for both orderers and applications. AnchorPeers []*AnchorPeer `yaml:"AnchorPeers"` OrdererEndpoints []string `yaml:"OrdererEndpoints"` // AdminPrincipal is deprecated and may be removed in a future release // it was used for modifying the default policy generation, but policies // may now be specified explicitly so it is redundant and unnecessary AdminPrincipal string `yaml:"AdminPrincipal"` } // AnchorPeer encodes the necessary fields to identify an anchor peer. type AnchorPeer struct { Host string `yaml:"Host"` Port int `yaml:"Port"` } // Orderer contains configuration which is used for the // bootstrapping of an orderer by the provisional bootstrapper. type Orderer struct { OrdererType string `yaml:"OrdererType"` Addresses []string `yaml:"Addresses"` BatchTimeout time.Duration `yaml:"BatchTimeout"` BatchSize BatchSize `yaml:"BatchSize"` Kafka Kafka `yaml:"Kafka"` EtcdRaft *etcdraft.ConfigMetadata `yaml:"EtcdRaft"` SmartBFT *smartbft.ConfigMetadata `yaml:"SmartBFT"` Organizations []*Organization `yaml:"Organizations"` MaxChannels uint64 `yaml:"MaxChannels"` Capabilities map[string]bool `yaml:"Capabilities"` Policies map[string]*Policy `yaml:"Policies"` } // BatchSize contains configuration affecting the size of batches. type BatchSize struct { MaxMessageCount uint32 `yaml:"MaxMessageCount"` AbsoluteMaxBytes uint32 `yaml:"AbsoluteMaxBytes"` PreferredMaxBytes uint32 `yaml:"PreferredMaxBytes"` } // Kafka contains configuration for the Kafka-based orderer. type Kafka struct { Brokers []string `yaml:"Brokers"` } var genesisDefaults = TopLevel{ Orderer: &Orderer{ OrdererType: "solo", Addresses: []string{"127.0.0.1:7050"}, BatchTimeout: 2 * time.Second, BatchSize: BatchSize{ MaxMessageCount: 500, AbsoluteMaxBytes: 10 * 1024 * 1024, PreferredMaxBytes: 2 * 1024 * 1024, }, Kafka: Kafka{ Brokers: []string{"127.0.0.1:9092"}, }, EtcdRaft: &etcdraft.ConfigMetadata{ Options: &etcdraft.Options{ TickInterval: "500ms", ElectionTick: 10, HeartbeatTick: 1, MaxInflightBlocks: 5, SnapshotIntervalSize: 20 * 1024 * 1024, // 20 MB }, }, SmartBFT: &smartbft.ConfigMetadata{ Options: &smartbft.Options{ RequestBatchMaxCount: uint64(types.DefaultConfig.RequestBatchMaxCount), RequestBatchMaxBytes: uint64(types.DefaultConfig.RequestBatchMaxBytes), RequestBatchMaxInterval: types.DefaultConfig.RequestBatchMaxInterval.String(), IncomingMessageBufferSize: uint64(types.DefaultConfig.IncomingMessageBufferSize), RequestPoolSize: uint64(types.DefaultConfig.RequestPoolSize), RequestForwardTimeout: types.DefaultConfig.RequestForwardTimeout.String(), RequestComplainTimeout: types.DefaultConfig.RequestComplainTimeout.String(), RequestAutoRemoveTimeout: types.DefaultConfig.RequestAutoRemoveTimeout.String(), ViewChangeResendInterval: types.DefaultConfig.ViewChangeResendInterval.String(), ViewChangeTimeout: types.DefaultConfig.ViewChangeTimeout.String(), LeaderHeartbeatTimeout: types.DefaultConfig.LeaderHeartbeatTimeout.String(), LeaderHeartbeatCount: uint64(types.DefaultConfig.LeaderHeartbeatCount), CollectTimeout: types.DefaultConfig.CollectTimeout.String(), SyncOnStart: types.DefaultConfig.SyncOnStart, SpeedUpViewChange: types.DefaultConfig.SpeedUpViewChange, }, }, }, } // LoadTopLevel simply loads the configtx.yaml file into the structs above and // completes their initialization. Config paths may optionally be provided and // will be used in place of the FABRIC_CFG_PATH env variable. // // Note, for environment overrides to work properly within a profile, Load // should be used instead. func LoadTopLevel(configPaths ...string) *TopLevel { config := viper.New() if len(configPaths) > 0 { for _, p := range configPaths { config.AddConfigPath(p) } config.SetConfigName(configName) } else { cf.InitViper(config, configName) } // For environment variables config.SetEnvPrefix(Prefix) config.AutomaticEnv() replacer := strings.NewReplacer(".", "_") config.SetEnvKeyReplacer(replacer) err := config.ReadInConfig() if err != nil { logger.Panic("Error reading configuration: ", err) } logger.Debugf("Using config file: %s", config.ConfigFileUsed()) var uconf TopLevel err = viperutil.EnhancedExactUnmarshal(config, &uconf) if err != nil { logger.Panic("Error unmarshaling config into struct: ", err) } (&uconf).completeInitialization(filepath.Dir(config.ConfigFileUsed())) logger.Infof("Loaded configuration: %s", config.ConfigFileUsed()) return &uconf } // Load returns the orderer/application config combination that corresponds to // a given profile. Config paths may optionally be provided and will be used // in place of the FABRIC_CFG_PATH env variable. func Load(profile string, configPaths ...string) *Profile { config := viper.New() if len(configPaths) > 0 { for _, p := range configPaths { config.AddConfigPath(p) } config.SetConfigName(configName) } else { cf.InitViper(config, configName) } // For environment variables config.SetEnvPrefix(Prefix) config.AutomaticEnv() // This replacer allows substitution within the particular profile without // having to fully qualify the name replacer := strings.NewReplacer(strings.ToUpper(fmt.Sprintf("profiles.%s.", profile)), "", ".", "_") config.SetEnvKeyReplacer(replacer) err := config.ReadInConfig() if err != nil { logger.Panic("Error reading configuration: ", err) } logger.Debugf("Using config file: %s", config.ConfigFileUsed()) var uconf TopLevel err = viperutil.EnhancedExactUnmarshal(config, &uconf) if err != nil { logger.Panic("Error unmarshaling config into struct: ", err) } result, ok := uconf.Profiles[profile] if !ok { logger.Panic("Could not find profile: ", profile) } result.completeInitialization(filepath.Dir(config.ConfigFileUsed())) logger.Infof("Loaded configuration: %s", config.ConfigFileUsed()) return result } func (t *TopLevel) completeInitialization(configDir string) { for _, org := range t.Organizations { org.completeInitialization(configDir) } if t.Orderer != nil { t.Orderer.completeInitialization(configDir) } } func (p *Profile) completeInitialization(configDir string) { if p.Application != nil { for _, org := range p.Application.Organizations { org.completeInitialization(configDir) } if p.Application.Resources != nil { p.Application.Resources.completeInitialization() } } if p.Consortiums != nil { for _, consortium := range p.Consortiums { for _, org := range consortium.Organizations { org.completeInitialization(configDir) } } } if p.Orderer != nil { for _, org := range p.Orderer.Organizations { org.completeInitialization(configDir) } // Some profiles will not define orderer parameters p.Orderer.completeInitialization(configDir) } } func (r *Resources) completeInitialization() { for { switch { case r.DefaultModPolicy == "": r.DefaultModPolicy = policies.ChannelApplicationAdmins default: return } } } func (org *Organization) completeInitialization(configDir string) { // set the MSP type; if none is specified we assume BCCSP if org.MSPType == "" { org.MSPType = msp.ProviderTypeToString(msp.FABRIC) } if org.AdminPrincipal == "" { org.AdminPrincipal = AdminRoleAdminPrincipal } translatePaths(configDir, org) } func (ord *Orderer) completeInitialization(configDir string) { loop: for { switch { case ord.OrdererType == "": logger.Infof("Orderer.OrdererType unset, setting to %v", genesisDefaults.Orderer.OrdererType) ord.OrdererType = genesisDefaults.Orderer.OrdererType case ord.Addresses == nil: logger.Infof("Orderer.Addresses unset, setting to %s", genesisDefaults.Orderer.Addresses) ord.Addresses = genesisDefaults.Orderer.Addresses case ord.BatchTimeout == 0: logger.Infof("Orderer.BatchTimeout unset, setting to %s", genesisDefaults.Orderer.BatchTimeout) ord.BatchTimeout = genesisDefaults.Orderer.BatchTimeout case ord.BatchSize.MaxMessageCount == 0: logger.Infof("Orderer.BatchSize.MaxMessageCount unset, setting to %v", genesisDefaults.Orderer.BatchSize.MaxMessageCount) ord.BatchSize.MaxMessageCount = genesisDefaults.Orderer.BatchSize.MaxMessageCount case ord.BatchSize.AbsoluteMaxBytes == 0: logger.Infof("Orderer.BatchSize.AbsoluteMaxBytes unset, setting to %v", genesisDefaults.Orderer.BatchSize.AbsoluteMaxBytes) ord.BatchSize.AbsoluteMaxBytes = genesisDefaults.Orderer.BatchSize.AbsoluteMaxBytes case ord.BatchSize.PreferredMaxBytes == 0: logger.Infof("Orderer.BatchSize.PreferredMaxBytes unset, setting to %v", genesisDefaults.Orderer.BatchSize.PreferredMaxBytes) ord.BatchSize.PreferredMaxBytes = genesisDefaults.Orderer.BatchSize.PreferredMaxBytes default: break loop } } logger.Infof("orderer type: %s", ord.OrdererType) // Additional, consensus type-dependent initialization goes here // Also using this to panic on unknown orderer type. switch ord.OrdererType { case "solo": // nothing to be done here case "kafka": if ord.Kafka.Brokers == nil { logger.Infof("Orderer.Kafka unset, setting to %v", genesisDefaults.Orderer.Kafka.Brokers) ord.Kafka.Brokers = genesisDefaults.Orderer.Kafka.Brokers } case etcdraft.TypeKey: if ord.EtcdRaft == nil { logger.Panicf("%s raft configuration missing", etcdraft.TypeKey) } if ord.EtcdRaft.Options == nil { logger.Infof("Orderer.EtcdRaft.Options unset, setting to %v", genesisDefaults.Orderer.EtcdRaft.Options) ord.EtcdRaft.Options = genesisDefaults.Orderer.EtcdRaft.Options } second_loop: for { switch { case ord.EtcdRaft.Options.TickInterval == "": logger.Infof("Orderer.EtcdRaft.Options.TickInterval unset, setting to %v", genesisDefaults.Orderer.EtcdRaft.Options.TickInterval) ord.EtcdRaft.Options.TickInterval = genesisDefaults.Orderer.EtcdRaft.Options.TickInterval case ord.EtcdRaft.Options.ElectionTick == 0: logger.Infof("Orderer.EtcdRaft.Options.ElectionTick unset, setting to %v", genesisDefaults.Orderer.EtcdRaft.Options.ElectionTick) ord.EtcdRaft.Options.ElectionTick = genesisDefaults.Orderer.EtcdRaft.Options.ElectionTick case ord.EtcdRaft.Options.HeartbeatTick == 0: logger.Infof("Orderer.EtcdRaft.Options.HeartbeatTick unset, setting to %v", genesisDefaults.Orderer.EtcdRaft.Options.HeartbeatTick) ord.EtcdRaft.Options.HeartbeatTick = genesisDefaults.Orderer.EtcdRaft.Options.HeartbeatTick case ord.EtcdRaft.Options.MaxInflightBlocks == 0: logger.Infof("Orderer.EtcdRaft.Options.MaxInflightBlocks unset, setting to %v", genesisDefaults.Orderer.EtcdRaft.Options.MaxInflightBlocks) ord.EtcdRaft.Options.MaxInflightBlocks = genesisDefaults.Orderer.EtcdRaft.Options.MaxInflightBlocks case ord.EtcdRaft.Options.SnapshotIntervalSize == 0: logger.Infof("Orderer.EtcdRaft.Options.SnapshotIntervalSize unset, setting to %v", genesisDefaults.Orderer.EtcdRaft.Options.SnapshotIntervalSize) ord.EtcdRaft.Options.SnapshotIntervalSize = genesisDefaults.Orderer.EtcdRaft.Options.SnapshotIntervalSize case len(ord.EtcdRaft.Consenters) == 0: logger.Panicf("%s configuration did not specify any consenter", etcdraft.TypeKey) default: break second_loop } } if _, err := time.ParseDuration(ord.EtcdRaft.Options.TickInterval); err != nil { logger.Panicf("Etcdraft TickInterval (%s) must be in time duration format", ord.EtcdRaft.Options.TickInterval) } // validate the specified members for Options if ord.EtcdRaft.Options.ElectionTick <= ord.EtcdRaft.Options.HeartbeatTick { logger.Panicf("election tick must be greater than heartbeat tick") } for _, c := range ord.EtcdRaft.GetConsenters() { if c.Host == "" { logger.Panicf("consenter info in %s configuration did not specify host", etcdraft.TypeKey) } if c.Port == 0 { logger.Panicf("consenter info in %s configuration did not specify port", etcdraft.TypeKey) } if c.ClientTlsCert == nil { logger.Panicf("consenter info in %s configuration did not specify client TLS cert", etcdraft.TypeKey) } if c.ServerTlsCert == nil { logger.Panicf("consenter info in %s configuration did not specify server TLS cert", etcdraft.TypeKey) } clientCertPath := string(c.GetClientTlsCert()) cf.TranslatePathInPlace(configDir, &clientCertPath) c.ClientTlsCert = []byte(clientCertPath) serverCertPath := string(c.GetServerTlsCert()) cf.TranslatePathInPlace(configDir, &serverCertPath) c.ServerTlsCert = []byte(serverCertPath) } case SmartBFT: if ord.SmartBFT == nil { logger.Panicf("%s configuration missing", SmartBFT) } if ord.SmartBFT.Options == nil { logger.Infof("Orderer.SmartBFT.Options unset, setting to %v", genesisDefaults.Orderer.SmartBFT.Options) ord.SmartBFT.Options = genesisDefaults.Orderer.SmartBFT.Options } if len(ord.SmartBFT.Consenters) == 0 { logger.Panicf("%s configuration did not specify any consenter", SmartBFT) } for _, c := range ord.SmartBFT.GetConsenters() { if c.Host == "" { logger.Panicf("consenter info in %s configuration did not specify host", SmartBFT) } if c.Port == 0 { logger.Panicf("consenter info in %s configuration did not specify port", SmartBFT) } if c.ClientTlsCert == nil { logger.Panicf("consenter info in %s configuration did not specify client TLS cert", SmartBFT) } if c.ServerTlsCert == nil { logger.Panicf("consenter info in %s configuration did not specify server TLS cert", SmartBFT) } if len(c.MspId) == 0 { logger.Panicf("consenter info in %s configuration did not specify MSP ID", SmartBFT) } if len(c.Identity) == 0 { logger.Panicf("consenter info in %s configuration did not specify identity certificate", SmartBFT) } // Path to the client TLS cert clientCertPath := string(c.GetClientTlsCert()) cf.TranslatePathInPlace(configDir, &clientCertPath) c.ClientTlsCert = []byte(clientCertPath) // Path to the server TLS cert serverCertPath := string(c.GetServerTlsCert()) cf.TranslatePathInPlace(configDir, &serverCertPath) c.ServerTlsCert = []byte(serverCertPath) // Path to the identity cert identityCertPath := string(c.GetIdentity()) cf.TranslatePathInPlace(configDir, &identityCertPath) c.Identity = []byte(identityCertPath) } default: logger.Panicf("unknown orderer type: %s", ord.OrdererType) } } func translatePaths(configDir string, org *Organization)
{ cf.TranslatePathInPlace(configDir, &org.MSPDir) }
GetPipeDriveToBlingOpportinities.ts
import GetOpportunitiesWithStatusWon from '../providers/PipeDrive/GetOpportunitiesWithStatusWon'; import CreateOrderService from '../providers/Bling/CreateOrderService'; import GetNewOpportunities from '../providers/Bling/GetNewOpportunities'; import BuildOrderXML from '../providers/XmlBuilder/buildOrderXML'; class
{ public async handle() { const getOpportunitiesWithStatusWon = new GetOpportunitiesWithStatusWon(); const buildOrderXML = new BuildOrderXML(); const createOrderService = new CreateOrderService(); const getNewOpportunities = new GetNewOpportunities(); try { // retorna todos as oportunidades com status ganho const opportunities = await getOpportunitiesWithStatusWon.execute(); if (!opportunities) { throw new Error('unable to request opportunities'); } // // retorna as oportunidades que nao possuem pedido. const newOpportunities = await getNewOpportunities.execute(opportunities); if (newOpportunities.length <= 0) { throw new Error('There are no new opportunities in the Bling'); } const xmls = newOpportunities.map(opportunity => buildOrderXML.execute(opportunity), ); if (xmls.length <= 0) { throw new Error('Failed to generate XML'); } // envio de xml para a criação de pedidos xmls.map(async xml => { const ped = await createOrderService.execute(xml); return ped; }); } catch (err) { console.error(err.message); } } public key = 'GetPipeDriveToBlingOpportunitiesAction'; } export default GetPipeDriveToBlingOpportunitiesAction;
GetPipeDriveToBlingOpportunitiesAction
noise.py
# Copyright (c) 2019-2020 Simons Observatory. # Full license can be found in the top level "LICENSE" file. import numpy as np from toast.timing import function_timer, Timer from toast.tod import AnalyticNoise from toast.utils import Logger import toast.qarray as qa from ...sim_hardware import get_example def add_so_noise_args(parser): parser.add_argument( "--common-mode-noise", required=False, help="String defining analytical parameters of a per-tube " "common mode that is co-added with every detector: " "'fmin[Hz],fknee[Hz],alpha,NET[K]'", ) return @function_timer def get_elevation_noise(args, comm, data, key="noise"): """ Insert elevation-dependent noise """ timer = Timer() timer.start() # fsample = args.sample_rate for obs in data.obs: tod = obs["tod"] fp = obs["focalplane"] noise = obs[key] for det in tod.local_dets: if det not in noise.keys: raise RuntimeError( 'Detector "{}" does not have a PSD in the noise object'.format(det) ) A = fp[det]["A"] C = fp[det]["C"] psd = noise.psd(det) try: # Some TOD classes provide a shortcut to Az/El _, el = tod.read_azel(detector=det) except Exception: azelquat = tod.read_pntg(detector=det, azel=True) # Convert Az/El quaternion of the detector back into # angles for the simulation. theta, _ = qa.to_position(azelquat) el = np.pi / 2 - theta el = np.median(el) # Scale the analytical noise PSD. Pivot is at el = 50 deg. psd[:] *= (A / np.sin(el) + C) ** 2 timer.stop() if comm.world_rank == 0: timer.report("Elevation noise") return @function_timer def get_analytic_noise(args, comm, focalplane, verbose=True): """ Create a TOAST noise object. Create a noise object from the 1/f noise parameters contained in the focalplane database. """ timer = Timer() timer.start() detectors = sorted(focalplane.keys()) fmins = {} fknees = {} alphas = {} NETs = {} rates = {} indices = {} for d in detectors: rates[d] = args.sample_rate fmins[d] = focalplane[d]["fmin"] fknees[d] = focalplane[d]["fknee"] alphas[d] = focalplane[d]["alpha"] NETs[d] = focalplane[d]["NET"] indices[d] = focalplane[d]["index"] if args.common_mode_noise: # Add an extra "virtual" detector for common mode noise for # every optics tube fmin, fknee, alpha, net = np.array(args.common_mode_noise.split(",")).astype( np.float64 ) hw = get_example() for itube, tube in enumerate(sorted(hw.data["tubes"].keys())): d = "common_mode_{}".format(tube) detectors.append(d) rates[d] = args.sample_rate fmins[d] = fmin fknees[d] = fknee alphas[d] = alpha NETs[d] = net indices[d] = 100000 + itube noise = AnalyticNoise( rate=rates, fmin=fmins, detectors=detectors, fknee=fknees, alpha=alphas,
if args.common_mode_noise: # Update the mixing matrix in the noise operator mixmatrix = {} keys = set() for det in focalplane.keys(): tube = focalplane[det]["tube"] common = "common_mode_{}".format(tube) mixmatrix[det] = {det: 1, common: 1} keys.add(det) keys.add(common) # There should probably be an accessor method to update the # mixmatrix in the TOAST Noise object. if noise._mixmatrix is not None: raise RuntimeError("Did not expect non-empty mixing matrix") noise._mixmatrix = mixmatrix noise._keys = list(sorted(keys)) timer.stop() if comm.world_rank == 0 and verbose: timer.report("Creating noise model") return noise
NET=NETs, indices=indices, )
database.go
package database import ( "github.com/Unknwon/goconfig" _ "github.com/go-sql-driver/mysql" "github.com/tobycroft/gorose-pro" "log" ) var Database *gorose.Engin func init() { var err error Database, err = gorose.Open(DbConfig()) if err != nil { log.Panic(err) } } func DbConfig() *gorose.Config { var conf gorose.Config conf.Driver = "mysql" conf.SetMaxIdleConns = 90 conf.SetMaxOpenConns = 300 conf.Prefix = "" conf.Dsn = dsn_local() return &conf } func dsn_local() string { cfg, err := goconfig.LoadConfigFile("conf.ini") if err != nil { return dsn() } value, err := cfg.GetSection("database") if err != nil { return dsn()
dbuser := value["dbuser"] dbpass := value["dbpass"] dbhost := value["dbhost"] dbport := value["dbport"] conntype := "tcp" charset := "utf8mb4" return dbuser + ":" + dbpass + "@" + conntype + "(" + dbhost + ":" + dbport + ")/" + dbname + "?charset=" + charset + "&parseTime=true" } } func dsn() string { dbname := "GobotQ2" dbuser := "GobotQ" dbpass := "123456" dbhost := "10.0.0.170" conntype := "tcp" dbport := "3306" charset := "utf8mb4" return dbuser + ":" + dbpass + "@" + conntype + "(" + dbhost + ":" + dbport + ")/" + dbname + "?charset=" + charset + "&parseTime=true" }
} else { dbname := value["dbname"]
main.go
package main import ( "bufio" "fmt" "image" "image/jpeg" "os" "path/filepath" "strings" ) func main() { // 1) Read the files from file system, decode them into in-memory images. pics := readPictures() // 2) Flip the images vertically. // Choose your weapon! flipped := flipAllA(pics) // flipped := flipAllB(pics) // flipped := flipAllC(pics) // flipped := flipAllD(pics) // flipped := flipAllE(pics) // flipped := flipAllF(pics) // flipped := flipAllG(pics) // 3) Encode and save the flipped images to the file system. for i, f := range flipped { dstpath := fmt.Sprintf("result/result_%d.jpg", i) _ = save(f, dstpath) } } func readPictures() (pics []image.Image) { err := filepath.Walk("./original", func(path string, info os.FileInfo, err error) error { if !strings.HasSuffix(path, ".jpg") { // Not a JPG return nil } img, err := load(path) if err != nil { return err } pics = append(pics, img) return nil }) if err != nil
return pics } // // Flip vertically all JPG files in current folder and subfolders. // func load(path string) (image.Image, error) { f, err := os.Open(path) if err != nil { return nil, err } defer f.Close() r := bufio.NewReader(f) img, _, err := image.Decode(r) return img, err } func save(img image.Image, path string) error { f, err := os.Create(path) if err != nil { return err } defer f.Close() return jpeg.Encode(f, img, nil) }
{ panic(err) }
series_trie_tree_test.go
package tblstore import ( "fmt" "math" "sync" "testing" "github.com/RoaringBitmap/roaring" "github.com/stretchr/testify/assert" ) func Test_trie_tree(t *testing.T) { tree := newTrieTree() assert.NotNil(t, tree) tree.Add("football", nil) tree.Add("football", nil) tree.Add("football", nil) assert.Equal(t, 1, tree.KeyNum()) assert.Equal(t, 8, tree.NodeNum()) tree.Add("foo", nil) assert.Equal(t, 2, tree.KeyNum()) assert.Equal(t, 8, tree.NodeNum()) tree.Add("f", 1) tree.Add("fo", 2) assert.Equal(t, 4, tree.KeyNum()) assert.Equal(t, 8, tree.NodeNum()) tree.Add("feet", 3) assert.Equal(t, 5, tree.KeyNum()) assert.Equal(t, 11, tree.NodeNum()) tree.Add("bike", 4) tree.Add("bike.bke", 5) tree.Add("a", 6) tree.Add("ab", 7) tree.Add("abcd", 8) assert.Equal(t, 10, tree.KeyNum()) assert.Equal(t, 23, tree.NodeNum()) tree.Add("", 323333) assert.Equal(t, 10, tree.KeyNum()) assert.Equal(t, 23, tree.NodeNum()) tree.Reset() assert.Zero(t, tree.KeyNum()) assert.Zero(t, tree.NodeNum()) } func
(t *testing.T) { tree := newTrieTree() tree.Add("hello", 9) tree.Add("world", 12) tree.Reset() trie := tree.(*trieTree) assert.Len(t, trie.nodesBuf1, 0) assert.Len(t, trie.nodesBuf2, 0) assert.Len(t, trie.root.children, 0) tree.Add("eleme", 1) tree.Add("eleme", 1) tree.Add("eleme", 3) tree.Add("eleme", 2) tree.Add("eleme.ci", 2) tree.Add("eleme.ci.etrace", 3) tree.Add("eleme.bdi", 4) tree.Add("eleme.other", 5) tree.Add("etrace", 6) tree.Add("java", 7) tree.Add("javascript", 8) tree.Add("j", 9) bin := tree.MarshalBinary() assert.NotNil(t, bin) assert.Equal(t, "ejltaervmaaecs.ecbcorditii.hpeettrrace", string(bin.labels)[2:]) assert.Len(t, bin.values, 9) tree.Reset() } func Benchmark_trie_MarshalBinary(b *testing.B) { tree := newTrieTree() for i := 0; i < b.N; i++ { tree.Add("eleme", 1) tree.Add("eleme.ci", 2) tree.Add("eleme.ci.etrace", 3) tree.Add("eleme.bdi", 4) tree.Add("eleme.other", 5) tree.Add("etrace", 6) tree.Add("java", 7) tree.Add("javascript", 8) tree.Add("j", 9) tree.MarshalBinary() tree.Reset() } } func buildTestTrieTreeData() *trieTreeData { /* c5 e f d6 l t i e c r r m d2 a e e1 c f e3 o x4 values : 5,2,1,3,4 indexes: 0,1,2,3,4 */ tree := newTrieTree() tree.Add("eleme", 1) // index: 3 tree.Add("etcd", 2) // index: 2 tree.Add("etrace", 3) // index: 4 tree.Add("firefox", 4) // index: 5 tree.Add("c", 5) // index: 0 tree.Add("cd", 6) // index: 1 return tree.MarshalBinary() } func Test_trieTree_FindOffsetsByEqual(t *testing.T) { data := buildTestTrieTreeData() // test FindOffsetsByEqual assert.Equal(t, []int{3}, data.FindOffsetsByEqual("eleme")) assert.Equal(t, []int{2}, data.FindOffsetsByEqual("etcd")) assert.Equal(t, []int{4}, data.FindOffsetsByEqual("etrace")) assert.Equal(t, []int{5}, data.FindOffsetsByEqual("firefox")) assert.Equal(t, []int{0}, data.FindOffsetsByEqual("c")) assert.Equal(t, []int{1}, data.FindOffsetsByEqual("cd")) assert.Len(t, data.FindOffsetsByEqual("d"), 0) assert.Len(t, data.FindOffsetsByEqual("et"), 0) assert.Len(t, data.FindOffsetsByEqual("etcd1"), 0) assert.Len(t, data.FindOffsetsByEqual("fire"), 0) assert.Len(t, data.FindOffsetsByEqual("etrac"), 0) } func Test_trieTree_walkTreeByValue(t *testing.T) { data := buildTestTrieTreeData() expects := []struct { prefixValue string exhausted bool nodeNumber uint64 }{ {"", true, 1}, {"e", true, 3}, {"z", false, 22}, {"ellme", false, 22}, {"elome", false, 22}, {"elemee", false, 22}, } for _, testCase := range expects { exhausted, nodeNumber := data.walkTreeByValue(testCase.prefixValue) assert.Equal(t, testCase.exhausted, exhausted) assert.Equal(t, testCase.nodeNumber, nodeNumber) } } func Test_trieTree_FindOffsetsByIn(t *testing.T) { data := buildTestTrieTreeData() // test FindOffsetsByIn assert.Len(t, data.FindOffsetsByIn([]string{"d", "c"}), 1) assert.Equal(t, []int{0}, data.FindOffsetsByIn([]string{"d", "c"})) assert.Equal(t, []int{3, 2}, data.FindOffsetsByIn([]string{"eleme", "etcd"})) assert.Equal(t, []int{4, 5}, data.FindOffsetsByIn([]string{"etrace", "etrace1", "firefox"})) } func Test_trieTree_FindOffsetsByLike(t *testing.T) { data := buildTestTrieTreeData() // test FindOffsetsByLike assert.Equal(t, []int{0, 1}, data.FindOffsetsByLike("c")) assert.Equal(t, []int{1}, data.FindOffsetsByLike("cd")) assert.Equal(t, []int{4, 2}, data.FindOffsetsByLike("et")) assert.Equal(t, []int{5}, data.FindOffsetsByLike("fire")) assert.Nil(t, data.FindOffsetsByLike("")) assert.Len(t, data.FindOffsetsByLike("*"), 6) assert.Nil(t, data.FindOffsetsByLike("etrace1")) } func Test_trieTree_FindOffsetsByRegex(t *testing.T) { data := buildTestTrieTreeData() // test FindOffsetsByRegex assert.Len(t, data.FindOffsetsByRegex("et"), 2) assert.Len(t, data.FindOffsetsByRegex("cd"), 1) assert.Len(t, data.FindOffsetsByRegex("^c[a-d]?"), 2) // bad pattern assert.Nil(t, data.FindOffsetsByRegex("[a^-#](")) } func Test_trieTree_PrefixSearch(t *testing.T) { data := buildTestTrieTreeData() // test PrefixSearch assert.Len(t, data.PrefixSearch("e", 3), 3) assert.Len(t, data.PrefixSearch("e", 1), 1) assert.Len(t, data.PrefixSearch("etcd1", 1), 0) } func Test_trieTree_Iterator(t *testing.T) { data := buildTestTrieTreeData() // test iterator with prefix itr1 := data.Iterator("e") assert.True(t, itr1.HasNext()) value, offset := itr1.Next() assert.Equal(t, "etrace", value) assert.Equal(t, 4, offset) assert.True(t, itr1.HasNext()) value, offset = itr1.Next() assert.Equal(t, "etcd", value) assert.Equal(t, 2, offset) assert.True(t, itr1.HasNext()) value, offset = itr1.Next() assert.Equal(t, "eleme", value) assert.Equal(t, 3, offset) assert.False(t, itr1.HasNext()) // test iterator with no-prefix itr2 := data.Iterator("") var count = 0 for itr2.HasNext() { count++ } assert.Equal(t, 6, count) // has Error itr3 := data.Iterator("not-exist") assert.False(t, itr3.HasNext()) assert.False(t, itr3.HasNext()) } var ( once4TestTrieTree sync.Once testTrieTree *trieTreeData ) func prepareTrieTreeData() *trieTreeData { once4TestTrieTree.Do( func() { tree := newTrieTree() for x := 0; x < math.MaxUint8; x++ { for y := 0; y < math.MaxUint8; y++ { // build ip seriesID := uint32(x*math.MaxUint8 + y) ip := fmt.Sprintf("192.168.%d.%d", x, y) r := roaring.New() r.Add(seriesID) tree.Add(ip, r) } } testTrieTree = tree.MarshalBinary() }) return testTrieTree } func BenchmarkTrieTree_LikeSearch(b *testing.B) { data := prepareTrieTreeData() for i := 0; i < b.N; i++ { data.FindOffsetsByLike("192.168.1.1") } } func BenchmarkTrieTree_EqualSearch(b *testing.B) { data := prepareTrieTreeData() for i := 0; i < b.N; i++ { data.FindOffsetsByEqual("192.168.1.1") } } func BenchmarkTrieTree_InSearch(b *testing.B) { data := prepareTrieTreeData() for i := 0; i < b.N; i++ { data.FindOffsetsByIn([]string{"192.168.1.1", "192.168.3.2", "192.168.2.2"}) } } func BenchmarkTrieTree_RegexSearch(b *testing.B) { data := prepareTrieTreeData() for i := 0; i < b.N; i++ { data.FindOffsetsByRegex("192\\.168") } } func BenchmarkTrieTree_PrefixSearch(b *testing.B) { data := prepareTrieTreeData() for i := 0; i < b.N; i++ { data.PrefixSearch("192.168", 200000) } } func BenchmarkTrieTree_Iterator(b *testing.B) { data := prepareTrieTreeData() for i := 0; i < b.N; i++ { itr := data.Iterator("192.168") for itr.HasNext() { itr.Next() } } }
Test_trie_MarshalBinary
Fragments.py
import numpy class Fragments: """ Stores arrays of intensities and M/z values, with some checks on their internal consistency. For example .. testcode:: import numpy as np from matchms import Fragments mz = np.array([10, 20, 30], dtype="float") intensities = np.array([100, 20, 300], dtype="float") peaks = Fragments(mz=mz, intensities=intensities) print(peaks[2]) Should output .. testoutput:: [ 30. 300.] Attributes ---------- mz: Numpy array of m/z values. intensities: Numpy array of peak intensity values. """ def __init__(self, mz=None, intensities=None): assert isinstance(mz, numpy.ndarray), "Input argument 'mz' should be a numpy.array." assert isinstance(intensities, numpy.ndarray), "Input argument 'intensities' should be a numpy.array." assert mz.shape == intensities.shape, "Input arguments 'mz' and 'intensities' should be the same shape." assert mz.dtype == "float", "Input argument 'mz' should be an array of type float." assert intensities.dtype == "float", "Input argument 'intensities' should be an array of type float." self._mz = mz self._intensities = intensities assert self._is_sorted(), "mz values are out of order." def __eq__(self, other): return \ self.mz.shape == other.mz.shape and \ numpy.allclose(self.mz, other.mz) and \ self.intensities.shape == other.intensities.shape and \ numpy.allclose(self.intensities, other.intensities) def __len__(self):
def __getitem__(self, item): return numpy.asarray([self.mz[item], self.intensities[item]]) def _is_sorted(self): return numpy.all(self.mz[:-1] <= self.mz[1:]) def clone(self): return Fragments(self.mz, self.intensities) @property def mz(self): """getter method for mz private variable""" return self._mz.copy() @property def intensities(self): """getter method for intensities private variable""" return self._intensities.copy() @property def to_numpy(self): """getter method to return stacked numpy array of both peak mz and intensities""" return numpy.vstack((self.mz, self.intensities)).T
return self._mz.size
construct.rs
use crate::cfg::*; use crate::middle::region; use rustc_data_structures::graph::implementation as graph; use syntax::ptr::P; use crate::ty::{self, TyCtxt}; use crate::hir::{self, PatKind}; use crate::hir::def_id::DefId; struct CFGBuilder<'a, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx>, owner_def_id: DefId, tables: &'a ty::TypeckTables<'tcx>, graph: CFGGraph, fn_exit: CFGIndex, loop_scopes: Vec<LoopScope>, breakable_block_scopes: Vec<BlockScope>, } #[derive(Copy, Clone)] struct BlockScope { block_expr_id: hir::ItemLocalId, // id of breakable block expr node break_index: CFGIndex, // where to go on `break` } #[derive(Copy, Clone)] struct LoopScope { loop_id: hir::ItemLocalId, // id of loop/while node continue_index: CFGIndex, // where to go on a `loop` break_index: CFGIndex, // where to go on a `break` } pub fn
<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, body: &hir::Body) -> CFG { let mut graph = graph::Graph::new(); let entry = graph.add_node(CFGNodeData::Entry); // `fn_exit` is target of return exprs, which lies somewhere // outside input `body`. (Distinguishing `fn_exit` and `body_exit` // also resolves chicken-and-egg problem that arises if you try to // have return exprs jump to `body_exit` during construction.) let fn_exit = graph.add_node(CFGNodeData::Exit); let body_exit; // Find the tables for this body. let owner_def_id = tcx.hir().local_def_id(tcx.hir().body_owner(body.id())); let tables = tcx.typeck_tables_of(owner_def_id); let mut cfg_builder = CFGBuilder { tcx, owner_def_id, tables, graph, fn_exit, loop_scopes: Vec::new(), breakable_block_scopes: Vec::new(), }; body_exit = cfg_builder.expr(&body.value, entry); cfg_builder.add_contained_edge(body_exit, fn_exit); let CFGBuilder { graph, .. } = cfg_builder; CFG { owner_def_id, graph, entry, exit: fn_exit, } } impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { fn block(&mut self, blk: &hir::Block, pred: CFGIndex) -> CFGIndex { if blk.targeted_by_break { let expr_exit = self.add_ast_node(blk.hir_id.local_id, &[]); self.breakable_block_scopes.push(BlockScope { block_expr_id: blk.hir_id.local_id, break_index: expr_exit, }); let mut stmts_exit = pred; for stmt in &blk.stmts { stmts_exit = self.stmt(stmt, stmts_exit); } let blk_expr_exit = self.opt_expr(&blk.expr, stmts_exit); self.add_contained_edge(blk_expr_exit, expr_exit); self.breakable_block_scopes.pop(); expr_exit } else { let mut stmts_exit = pred; for stmt in &blk.stmts { stmts_exit = self.stmt(stmt, stmts_exit); } let expr_exit = self.opt_expr(&blk.expr, stmts_exit); self.add_ast_node(blk.hir_id.local_id, &[expr_exit]) } } fn stmt(&mut self, stmt: &hir::Stmt, pred: CFGIndex) -> CFGIndex { let exit = match stmt.node { hir::StmtKind::Local(ref local) => { let init_exit = self.opt_expr(&local.init, pred); self.pat(&local.pat, init_exit) } hir::StmtKind::Item(_) => { pred } hir::StmtKind::Expr(ref expr) | hir::StmtKind::Semi(ref expr) => { self.expr(&expr, pred) } }; self.add_ast_node(stmt.hir_id.local_id, &[exit]) } fn pat(&mut self, pat: &hir::Pat, pred: CFGIndex) -> CFGIndex { match pat.node { PatKind::Binding(.., None) | PatKind::Path(_) | PatKind::Lit(..) | PatKind::Range(..) | PatKind::Wild => self.add_ast_node(pat.hir_id.local_id, &[pred]), PatKind::Box(ref subpat) | PatKind::Ref(ref subpat, _) | PatKind::Binding(.., Some(ref subpat)) => { let subpat_exit = self.pat(&subpat, pred); self.add_ast_node(pat.hir_id.local_id, &[subpat_exit]) } PatKind::TupleStruct(_, ref subpats, _) | PatKind::Tuple(ref subpats, _) => { let pats_exit = self.pats_all(subpats.iter(), pred); self.add_ast_node(pat.hir_id.local_id, &[pats_exit]) } PatKind::Struct(_, ref subpats, _) => { let pats_exit = self.pats_all(subpats.iter().map(|f| &f.node.pat), pred); self.add_ast_node(pat.hir_id.local_id, &[pats_exit]) } PatKind::Slice(ref pre, ref vec, ref post) => { let pre_exit = self.pats_all(pre.iter(), pred); let vec_exit = self.pats_all(vec.iter(), pre_exit); let post_exit = self.pats_all(post.iter(), vec_exit); self.add_ast_node(pat.hir_id.local_id, &[post_exit]) } } } fn pats_all<'b, I: Iterator<Item=&'b P<hir::Pat>>>(&mut self, pats: I, pred: CFGIndex) -> CFGIndex { //! Handles case where all of the patterns must match. pats.fold(pred, |pred, pat| self.pat(&pat, pred)) } fn expr(&mut self, expr: &hir::Expr, pred: CFGIndex) -> CFGIndex { match expr.node { hir::ExprKind::Block(ref blk, _) => { let blk_exit = self.block(&blk, pred); self.add_ast_node(expr.hir_id.local_id, &[blk_exit]) } hir::ExprKind::If(ref cond, ref then, None) => { // // [pred] // | // v 1 // [cond] // | // / \ // / \ // v 2 * // [then] | // | | // v 3 v 4 // [..expr..] // let cond_exit = self.expr(&cond, pred); // 1 let then_exit = self.expr(&then, cond_exit); // 2 self.add_ast_node(expr.hir_id.local_id, &[cond_exit, then_exit]) // 3,4 } hir::ExprKind::If(ref cond, ref then, Some(ref otherwise)) => { // // [pred] // | // v 1 // [cond] // | // / \ // / \ // v 2 v 3 // [then][otherwise] // | | // v 4 v 5 // [..expr..] // let cond_exit = self.expr(&cond, pred); // 1 let then_exit = self.expr(&then, cond_exit); // 2 let else_exit = self.expr(&otherwise, cond_exit); // 3 self.add_ast_node(expr.hir_id.local_id, &[then_exit, else_exit]) // 4, 5 } hir::ExprKind::While(ref cond, ref body, _) => { // // [pred] // | // v 1 // [loopback] <--+ 5 // | | // v 2 | // +-----[cond] | // | | | // | v 4 | // | [body] -----+ // v 3 // [expr] // // Note that `break` and `continue` statements // may cause additional edges. let loopback = self.add_dummy_node(&[pred]); // 1 // Create expr_exit without pred (cond_exit) let expr_exit = self.add_ast_node(expr.hir_id.local_id, &[]); // 3 // The LoopScope needs to be on the loop_scopes stack while evaluating the // condition and the body of the loop (both can break out of the loop) self.loop_scopes.push(LoopScope { loop_id: expr.hir_id.local_id, continue_index: loopback, break_index: expr_exit }); let cond_exit = self.expr(&cond, loopback); // 2 // Add pred (cond_exit) to expr_exit self.add_contained_edge(cond_exit, expr_exit); let body_exit = self.block(&body, cond_exit); // 4 self.add_contained_edge(body_exit, loopback); // 5 self.loop_scopes.pop(); expr_exit } hir::ExprKind::Loop(ref body, _, _) => { // // [pred] // | // v 1 // [loopback] <---+ // | 4 | // v 3 | // [body] ------+ // // [expr] 2 // // Note that `break` and `loop` statements // may cause additional edges. let loopback = self.add_dummy_node(&[pred]); // 1 let expr_exit = self.add_ast_node(expr.hir_id.local_id, &[]); // 2 self.loop_scopes.push(LoopScope { loop_id: expr.hir_id.local_id, continue_index: loopback, break_index: expr_exit, }); let body_exit = self.block(&body, loopback); // 3 self.add_contained_edge(body_exit, loopback); // 4 self.loop_scopes.pop(); expr_exit } hir::ExprKind::Match(ref discr, ref arms, _) => { self.match_(expr.hir_id.local_id, &discr, &arms, pred) } hir::ExprKind::Binary(op, ref l, ref r) if op.node.is_lazy() => { // // [pred] // | // v 1 // [l] // | // / \ // / \ // v 2 * // [r] | // | | // v 3 v 4 // [..exit..] // let l_exit = self.expr(&l, pred); // 1 let r_exit = self.expr(&r, l_exit); // 2 self.add_ast_node(expr.hir_id.local_id, &[l_exit, r_exit]) // 3,4 } hir::ExprKind::Ret(ref v) => { let v_exit = self.opt_expr(v, pred); let b = self.add_ast_node(expr.hir_id.local_id, &[v_exit]); self.add_returning_edge(expr, b); self.add_unreachable_node() } hir::ExprKind::Break(destination, ref opt_expr) => { let v = self.opt_expr(opt_expr, pred); let (target_scope, break_dest) = self.find_scope_edge(expr, destination, ScopeCfKind::Break); let b = self.add_ast_node(expr.hir_id.local_id, &[v]); self.add_exiting_edge(expr, b, target_scope, break_dest); self.add_unreachable_node() } hir::ExprKind::Continue(destination) => { let (target_scope, cont_dest) = self.find_scope_edge(expr, destination, ScopeCfKind::Continue); let a = self.add_ast_node(expr.hir_id.local_id, &[pred]); self.add_exiting_edge(expr, a, target_scope, cont_dest); self.add_unreachable_node() } hir::ExprKind::Array(ref elems) => { self.straightline(expr, pred, elems.iter().map(|e| &*e)) } hir::ExprKind::Call(ref func, ref args) => { self.call(expr, pred, &func, args.iter().map(|e| &*e)) } hir::ExprKind::MethodCall(.., ref args) => { self.call(expr, pred, &args[0], args[1..].iter().map(|e| &*e)) } hir::ExprKind::Index(ref l, ref r) | hir::ExprKind::Binary(_, ref l, ref r) if self.tables.is_method_call(expr) => { self.call(expr, pred, &l, Some(&**r).into_iter()) } hir::ExprKind::Unary(_, ref e) if self.tables.is_method_call(expr) => { self.call(expr, pred, &e, None::<hir::Expr>.iter()) } hir::ExprKind::Tup(ref exprs) => { self.straightline(expr, pred, exprs.iter().map(|e| &*e)) } hir::ExprKind::Struct(_, ref fields, ref base) => { let field_cfg = self.straightline(expr, pred, fields.iter().map(|f| &*f.expr)); self.opt_expr(base, field_cfg) } hir::ExprKind::Assign(ref l, ref r) | hir::ExprKind::AssignOp(_, ref l, ref r) => { self.straightline(expr, pred, [r, l].iter().map(|&e| &**e)) } hir::ExprKind::Index(ref l, ref r) | hir::ExprKind::Binary(_, ref l, ref r) => { // N.B., && and || handled earlier self.straightline(expr, pred, [l, r].iter().map(|&e| &**e)) } hir::ExprKind::Box(ref e) | hir::ExprKind::AddrOf(_, ref e) | hir::ExprKind::Cast(ref e, _) | hir::ExprKind::Type(ref e, _) | hir::ExprKind::Unary(_, ref e) | hir::ExprKind::Field(ref e, _) | hir::ExprKind::Yield(ref e) | hir::ExprKind::Repeat(ref e, _) => { self.straightline(expr, pred, Some(&**e).into_iter()) } hir::ExprKind::InlineAsm(_, ref outputs, ref inputs) => { let post_outputs = self.exprs(outputs.iter().map(|e| &*e), pred); let post_inputs = self.exprs(inputs.iter().map(|e| &*e), post_outputs); self.add_ast_node(expr.hir_id.local_id, &[post_inputs]) } hir::ExprKind::Closure(..) | hir::ExprKind::Lit(..) | hir::ExprKind::Path(_) | hir::ExprKind::Err => { self.straightline(expr, pred, None::<hir::Expr>.iter()) } } } fn call<'b, I: Iterator<Item=&'b hir::Expr>>(&mut self, call_expr: &hir::Expr, pred: CFGIndex, func_or_rcvr: &hir::Expr, args: I) -> CFGIndex { let func_or_rcvr_exit = self.expr(func_or_rcvr, pred); let ret = self.straightline(call_expr, func_or_rcvr_exit, args); let m = self.tcx.hir().get_module_parent(call_expr.id); if self.tcx.is_ty_uninhabited_from(m, self.tables.expr_ty(call_expr)) { self.add_unreachable_node() } else { ret } } fn exprs<'b, I: Iterator<Item=&'b hir::Expr>>(&mut self, exprs: I, pred: CFGIndex) -> CFGIndex { //! Constructs graph for `exprs` evaluated in order exprs.fold(pred, |p, e| self.expr(e, p)) } fn opt_expr(&mut self, opt_expr: &Option<P<hir::Expr>>, pred: CFGIndex) -> CFGIndex { //! Constructs graph for `opt_expr` evaluated, if Some opt_expr.iter().fold(pred, |p, e| self.expr(&e, p)) } fn straightline<'b, I: Iterator<Item=&'b hir::Expr>>(&mut self, expr: &hir::Expr, pred: CFGIndex, subexprs: I) -> CFGIndex { //! Handles case of an expression that evaluates `subexprs` in order let subexprs_exit = self.exprs(subexprs, pred); self.add_ast_node(expr.hir_id.local_id, &[subexprs_exit]) } fn match_(&mut self, id: hir::ItemLocalId, discr: &hir::Expr, arms: &[hir::Arm], pred: CFGIndex) -> CFGIndex { // The CFG for match expression is quite complex, so no ASCII // art for it (yet). // // The CFG generated below matches roughly what MIR contains. // Each pattern and guard is visited in parallel, with // arms containing multiple patterns generating multiple nodes // for the same guard expression. The guard expressions chain // into each other from top to bottom, with a specific // exception to allow some additional valid programs // (explained below). MIR differs slightly in that the // pattern matching may continue after a guard but the visible // behaviour should be the same. // // What is going on is explained in further comments. // Visit the discriminant expression let discr_exit = self.expr(discr, pred); // Add a node for the exit of the match expression as a whole. let expr_exit = self.add_ast_node(id, &[]); // Keep track of the previous guard expressions let mut prev_guards = Vec::new(); for arm in arms { // Add an exit node for when we've visited all the // patterns and the guard (if there is one) in the arm. let arm_exit = self.add_dummy_node(&[]); for pat in &arm.pats { // Visit the pattern, coming from the discriminant exit let mut pat_exit = self.pat(&pat, discr_exit); // If there is a guard expression, handle it here if let Some(ref guard) = arm.guard { // Add a dummy node for the previous guard // expression to target let guard_start = self.add_dummy_node(&[pat_exit]); // Visit the guard expression let guard_exit = match guard { hir::Guard::If(ref e) => self.expr(e, guard_start), }; // #47295: We used to have very special case code // here for when a pair of arms are both formed // solely from constants, and if so, not add these // edges. But this was not actually sound without // other constraints that we stopped enforcing at // some point. while let Some(prev) = prev_guards.pop() { self.add_contained_edge(prev, guard_start); } // Push the guard onto the list of previous guards prev_guards.push(guard_exit); // Update the exit node for the pattern pat_exit = guard_exit; } // Add an edge from the exit of this pattern to the // exit of the arm self.add_contained_edge(pat_exit, arm_exit); } // Visit the body of this arm let body_exit = self.expr(&arm.body, arm_exit); // Link the body to the exit of the expression self.add_contained_edge(body_exit, expr_exit); } expr_exit } fn add_dummy_node(&mut self, preds: &[CFGIndex]) -> CFGIndex { self.add_node(CFGNodeData::Dummy, preds) } fn add_ast_node(&mut self, id: hir::ItemLocalId, preds: &[CFGIndex]) -> CFGIndex { self.add_node(CFGNodeData::AST(id), preds) } fn add_unreachable_node(&mut self) -> CFGIndex { self.add_node(CFGNodeData::Unreachable, &[]) } fn add_node(&mut self, data: CFGNodeData, preds: &[CFGIndex]) -> CFGIndex { let node = self.graph.add_node(data); for &pred in preds { self.add_contained_edge(pred, node); } node } fn add_contained_edge(&mut self, source: CFGIndex, target: CFGIndex) { let data = CFGEdgeData {exiting_scopes: vec![] }; self.graph.add_edge(source, target, data); } fn add_exiting_edge(&mut self, from_expr: &hir::Expr, from_index: CFGIndex, target_scope: region::Scope, to_index: CFGIndex) { let mut data = CFGEdgeData { exiting_scopes: vec![] }; let mut scope = region::Scope { id: from_expr.hir_id.local_id, data: region::ScopeData::Node }; let region_scope_tree = self.tcx.region_scope_tree(self.owner_def_id); while scope != target_scope { data.exiting_scopes.push(scope.item_local_id()); scope = region_scope_tree.encl_scope(scope); } self.graph.add_edge(from_index, to_index, data); } fn add_returning_edge(&mut self, _from_expr: &hir::Expr, from_index: CFGIndex) { let data = CFGEdgeData { exiting_scopes: self.loop_scopes.iter() .rev() .map(|&LoopScope { loop_id: id, .. }| id) .collect() }; self.graph.add_edge(from_index, self.fn_exit, data); } fn find_scope_edge(&self, expr: &hir::Expr, destination: hir::Destination, scope_cf_kind: ScopeCfKind) -> (region::Scope, CFGIndex) { match destination.target_id { Ok(loop_id) => { for b in &self.breakable_block_scopes { if b.block_expr_id == self.tcx.hir().node_to_hir_id(loop_id).local_id { let scope = region::Scope { id: self.tcx.hir().node_to_hir_id(loop_id).local_id, data: region::ScopeData::Node }; return (scope, match scope_cf_kind { ScopeCfKind::Break => b.break_index, ScopeCfKind::Continue => bug!("can't continue to block"), }); } } for l in &self.loop_scopes { if l.loop_id == self.tcx.hir().node_to_hir_id(loop_id).local_id { let scope = region::Scope { id: self.tcx.hir().node_to_hir_id(loop_id).local_id, data: region::ScopeData::Node }; return (scope, match scope_cf_kind { ScopeCfKind::Break => l.break_index, ScopeCfKind::Continue => l.continue_index, }); } } span_bug!(expr.span, "no scope for id {}", loop_id); } Err(err) => span_bug!(expr.span, "scope error: {}", err), } } } #[derive(Copy, Clone, Eq, PartialEq)] enum ScopeCfKind { Break, Continue, }
construct
user.component.ts
import { ChangeDetectionStrategy, Component } from '@angular/core'; import { Router } from '@angular/router'; import { SettingsService, User } from '@delon/theme'; import { LocalStorageService } from '../../../service/local-storage.service'; @Component({ selector: 'header-user', template: ` <div class="alain-default__nav-item d-flex align-items-center px-sm" nz-dropdown nzPlacement="bottomRight" [nzDropdownMenu]="userMenu"> <nz-avatar [nzSrc]="user.avatar" nzSize="small" class="mr-sm"></nz-avatar> {{ user.name }} </div> <nz-dropdown-menu #userMenu="nzDropdownMenu"> <div nz-menu class="width-sm"> <li nz-menu-divider></li> <div nz-menu-item (click)="logout()"> <i nz-icon nzType="logout" class="mr-sm"></i> {{ 'menu.account.logout' | i18n }} </div> </div> </nz-dropdown-menu> `, changeDetection: ChangeDetectionStrategy.OnPush }) export class
{ get user(): User { return this.settings.user; } constructor(private settings: SettingsService, private router: Router, private localStorageSvc: LocalStorageService) {} logout(): void { this.localStorageSvc.clear(); this.router.navigateByUrl('/passport/login'); } }
HeaderUserComponent
Title.js
import React from 'react';
const Title = styled.div` font-family: 'Pacifico', cursive; font-size: 3.5em; color: chocolate; `; export default Title;
import styled from 'styled-components';
cabtool.rs
extern crate cab; extern crate chrono; extern crate clap; use cab::{Cabinet, CabinetBuilder, CompressionType, FileEntry, FolderEntry}; use chrono::NaiveDateTime; use clap::{App, Arg, SubCommand}; use std::fs::{self, File}; use std::io; use std::path::PathBuf; use std::time::UNIX_EPOCH; // ========================================================================= // fn main() { let matches = App::new("cabtool") .version("0.1") .author("Matthew D. Steele <[email protected]>") .about("Manipulates CAB files") .subcommand( SubCommand::with_name("cat") .about("Concatenates and prints streams") .arg(Arg::with_name("cab").required(true)) .arg(Arg::with_name("file").multiple(true)), ) .subcommand( SubCommand::with_name("create") .about("Creates a new cabinet") .arg( Arg::with_name("compress") .takes_value(true) .value_name("TYPE") .short("c") .long("compress") .help("Sets compression type"), ) .arg( Arg::with_name("output") .takes_value(true) .value_name("PATH") .short("o") .long("output") .help("Sets output path"), ) .arg(Arg::with_name("file").multiple(true)), ) .subcommand( SubCommand::with_name("ls") .about("Lists files in the cabinet") .arg( Arg::with_name("long") .short("l") .help("Lists in long format"), ) .arg(Arg::with_name("cab").required(true)), ) .get_matches(); if let Some(submatches) = matches.subcommand_matches("cat") { let mut cabinet = open_cab(submatches.value_of("cab").unwrap()).unwrap(); if let Some(filenames) = submatches.values_of("file") { for filename in filenames { let mut file_reader = cabinet.read_file(filename).unwrap(); io::copy(&mut file_reader, &mut io::stdout()).unwrap(); } } } else if let Some(submatches) = matches.subcommand_matches("create") { let ctype = if let Some(string) = submatches.value_of("compress") { let string = string.to_lowercase(); match string.as_str() { "none" => CompressionType::None, "mszip" => CompressionType::MsZip, _ => panic!("Invalid compression type: {}", string), } } else { CompressionType::MsZip }; let out_path = if let Some(path) = submatches.value_of("output") { PathBuf::from(path) } else { let mut path = PathBuf::from("out.cab"); let mut index: i32 = 0; while path.exists() { index += 1; path = PathBuf::from(format!("out{}.cab", index)); } path }; let mut builder = CabinetBuilder::new(); if let Some(filenames) = submatches.values_of("file") { let filenames: Vec<&str> = filenames.collect(); let mut file_index: usize = 0; while file_index < filenames.len() { let folder = builder.add_folder(ctype); let mut folder_size: u64 = 0; while file_index < filenames.len() && folder_size < 0x8000 { let filename = filenames[file_index]; let metadata = fs::metadata(filename).unwrap(); folder_size += metadata.len(); let file = folder.add_file(filename); if let Ok(time) = metadata.modified() { if let Ok(dur) = time.duration_since(UNIX_EPOCH) { let secs = dur.as_secs() as i64; let ndt = NaiveDateTime::from_timestamp(secs, 0); file.set_datetime(ndt); } } file_index += 1; } } } let file = File::create(&out_path).unwrap(); let mut cabinet = builder.build(file).unwrap(); while let Some(mut writer) = cabinet.next_file().unwrap() { let mut file = File::open(writer.file_name()).unwrap(); io::copy(&mut file, &mut writer).unwrap(); } cabinet.finish().unwrap(); } else if let Some(submatches) = matches.subcommand_matches("ls") { let long = submatches.is_present("long"); let cabinet = open_cab(submatches.value_of("cab").unwrap()).unwrap(); for (index, folder) in cabinet.folder_entries().enumerate() { for file in folder.file_entries() { list_file(index, folder, file, long); } } } } // ========================================================================= // fn list_file( folder_index: usize, folder: &FolderEntry, file: &FileEntry, long: bool, )
fn open_cab(path: &str) -> io::Result<Cabinet<File>> { Cabinet::new(File::open(path)?) } // ========================================================================= //
{ if !long { println!("{}", file.name()); return; } let ctype = match folder.compression_type() { CompressionType::None => "None".to_string(), CompressionType::MsZip => "MsZip".to_string(), CompressionType::Quantum(v, m) => format!("Q{}/{}", v, m), CompressionType::Lzx(w) => format!("Lzx{}", w), }; let file_size = if file.uncompressed_size() >= 100_000_000 { format!("{} MB", file.uncompressed_size() / (1 << 20)) } else if file.uncompressed_size() >= 1_000_000 { format!("{} kB", file.uncompressed_size() / (1 << 10)) } else { format!("{} B ", file.uncompressed_size()) }; println!( "{}{}{}{}{}{} {:>2} {:<5} {:>10} {} {}", if file.is_read_only() { 'R' } else { '-' }, if file.is_hidden() { 'H' } else { '-' }, if file.is_system() { 'S' } else { '-' }, if file.is_archive() { 'A' } else { '-' }, if file.is_exec() { 'E' } else { '-' }, if file.is_name_utf() { 'U' } else { '-' }, folder_index, ctype, file_size, file.datetime() .map(|dt| dt.to_string()) .unwrap_or("invalid datetime".to_string()), file.name() ); }
sat1.rs
#[doc = "Register `SAT1` reader"] pub struct R(crate::R<SAT1_SPEC>); impl core::ops::Deref for R { type Target = crate::R<SAT1_SPEC>; #[inline(always)] fn
(&self) -> &Self::Target { &self.0 } } impl core::convert::From<crate::R<SAT1_SPEC>> for R { fn from(reader: crate::R<SAT1_SPEC>) -> Self { R(reader) } } #[doc = "Register `SAT1` writer"] pub struct W(crate::W<SAT1_SPEC>); impl core::ops::Deref for W { type Target = crate::W<SAT1_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::ops::DerefMut for W { #[inline(always)] fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl core::convert::From<crate::W<SAT1_SPEC>> for W { fn from(writer: crate::W<SAT1_SPEC>) -> Self { W(writer) } } #[doc = "Field `ADDR` reader - Specific Address 1"] pub struct ADDR_R(crate::FieldReader<u16, u16>); impl ADDR_R { pub(crate) fn new(bits: u16) -> Self { ADDR_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for ADDR_R { type Target = crate::FieldReader<u16, u16>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `ADDR` writer - Specific Address 1"] pub struct ADDR_W<'a> { w: &'a mut W, } impl<'a> ADDR_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u16) -> &'a mut W { self.w.bits = (self.w.bits & !0xffff) | (value as u32 & 0xffff); self.w } } impl R { #[doc = "Bits 0:15 - Specific Address 1"] #[inline(always)] pub fn addr(&self) -> ADDR_R { ADDR_R::new((self.bits & 0xffff) as u16) } } impl W { #[doc = "Bits 0:15 - Specific Address 1"] #[inline(always)] pub fn addr(&mut self) -> ADDR_W { ADDR_W { w: self } } #[doc = "Writes raw bits to the register."] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.0.bits(bits); self } } #[doc = "Specific Address 1 Top Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [sat1](index.html) module"] pub struct SAT1_SPEC; impl crate::RegisterSpec for SAT1_SPEC { type Ux = u32; } #[doc = "`read()` method returns [sat1::R](R) reader structure"] impl crate::Readable for SAT1_SPEC { type Reader = R; } #[doc = "`write(|w| ..)` method takes [sat1::W](W) writer structure"] impl crate::Writable for SAT1_SPEC { type Writer = W; } #[doc = "`reset()` method sets SAT1 to value 0"] impl crate::Resettable for SAT1_SPEC { #[inline(always)] fn reset_value() -> Self::Ux { 0 } }
deref
execute_in_process_result.py
from typing import Any, Dict, List, Optional, cast from dagster import DagsterEvent, check from dagster.core.definitions import NodeDefinition, NodeHandle from dagster.core.definitions.utils import DEFAULT_OUTPUT from dagster.core.errors import DagsterInvariantViolationError from dagster.core.execution.plan.outputs import StepOutputHandle class ExecuteInProcessResult: def __init__( self, node_def: NodeDefinition, all_events: List[DagsterEvent], run_id: str, output_capture: Optional[Dict[StepOutputHandle, Any]], ): self._node_def = node_def # If top-level result, no handle will be provided self._handle = NodeHandle(node_def.name, parent=None) self._event_list = all_events self._run_id = run_id self._output_capture = check.opt_dict_param( output_capture, "output_capture", key_type=StepOutputHandle ) @property def success(self) -> bool: """bool: Whether execution was successful.""" return all([not event.is_failure for event in self._event_list]) @property def all_node_events(self) -> List[DagsterEvent]: """List[DagsterEvent]: All dagster events from the in-process execution.""" step_events = [] for node_name in self._node_def.ensure_graph_def().node_dict.keys(): handle = NodeHandle(node_name, None) step_events += _filter_events_by_handle(self._event_list, handle) return step_events @property def run_id(self) -> str: """str: The run id for the executed run""" return self._run_id def events_for_node(self, node_name: str) -> List[DagsterEvent]: """Retrieves all dagster events for a specific node. Args: node_name (str): The name of the node for which outputs should be retrieved. Returns: List[DagsterEvent]: A list of all dagster events associated with provided node name. """ check.str_param(node_name, "node_name") return _filter_events_by_handle(self._event_list, NodeHandle.from_string(node_name)) def
(self, output_name: str = DEFAULT_OUTPUT) -> Any: """Retrieves output of top-level job, if an output is returned. If the top-level job has no output, calling this method will result in a DagsterInvariantViolationError. Args: output_name (Optional[str]): The name of the output to retrieve. Defaults to `result`, the default output name in dagster. Returns: Any: The value of the retrieved output. """ check.str_param(output_name, "output_name") graph_def = self._node_def.ensure_graph_def() if not graph_def.has_output(output_name) and len(graph_def.output_mappings) == 0: raise DagsterInvariantViolationError( f"Attempted to retrieve top-level outputs for '{graph_def.name}', which has no outputs." ) elif not graph_def.has_output(output_name): raise DagsterInvariantViolationError( f"Could not find top-level output '{output_name}' in '{graph_def.name}'." ) # Resolve the first layer of mapping output_mapping = graph_def.get_output_mapping(output_name) mapped_node = graph_def.solid_named(output_mapping.maps_from.solid_name) origin_output_def, origin_handle = mapped_node.definition.resolve_output_to_origin( output_mapping.maps_from.output_name, NodeHandle(mapped_node.name, None), ) # Get output from origin node return _filter_outputs_by_handle( self._output_capture, origin_handle, origin_output_def.name ) def output_for_node(self, node_str: str, output_name: Optional[str] = DEFAULT_OUTPUT) -> Any: """Retrieves output value with a particular name from the in-process run of the job. Args: node_str (str): Name of the op/graph whose output should be retrieved. If the intended graph/op is nested within another graph, the syntax is `outer_graph.inner_node`. output_name (Optional[str]): Name of the output on the op/graph to retrieve. Defaults to `result`, the default output name in dagster. Returns: Any: The value of the retrieved output. """ # resolve handle of node that node_str is referring to target_handle = NodeHandle.from_string(node_str) target_node_def = self._node_def.ensure_graph_def().get_solid(target_handle).definition origin_output_def, origin_handle = target_node_def.resolve_output_to_origin( output_name, NodeHandle.from_string(node_str) ) # retrieve output value from resolved handle return _filter_outputs_by_handle( self._output_capture, origin_handle, origin_output_def.name ) def _filter_events_by_handle( event_list: List[DagsterEvent], handle: NodeHandle ) -> List[DagsterEvent]: step_events = [] for event in event_list: if event.is_step_event: event_handle = cast( NodeHandle, event.solid_handle ) # step events are guaranteed to have a node handle. if event_handle.is_or_descends_from(handle): step_events.append(event) return step_events def _filter_outputs_by_handle( output_dict: Dict[StepOutputHandle, Any], node_handle: NodeHandle, output_name: str, ) -> Any: mapped_outputs = {} step_key = str(node_handle) output_found = False for step_output_handle, value in output_dict.items(): # For the mapped output case, where step keys are in the format # "step_key[upstream_mapped_output_name]" within the step output handle. if step_output_handle.step_key.startswith(f"{step_key}["): output_found = True key_start = step_output_handle.step_key.find("[") key_end = step_output_handle.step_key.find("]") upstream_mapped_output_name = step_output_handle.step_key[key_start + 1 : key_end] mapped_outputs[upstream_mapped_output_name] = value # For all other cases, search for exact match. elif ( step_key == step_output_handle.step_key and step_output_handle.output_name == output_name ): output_found = True if not step_output_handle.mapping_key: return output_dict[step_output_handle] mapped_outputs[step_output_handle.mapping_key] = value if not output_found: raise DagsterInvariantViolationError(f"No outputs found for node '{node_handle}'.") return mapped_outputs
output_value
py.py
from ._base import register_app, App @register_app class PurePy(App):
alias = 'py' description = 'Pure wsgi application'
views.py
from flask import render_template, send_from_directory, request, jsonify from app import app import hashlib, uuid import game from app import __config__ as config def
(password, correct_hash): """ Compares password with hash """ hashed_password = hashlib.sha512(password).hexdigest() return hashed_password == correct_hash @app.route('/index', methods=['POST']) @app.route('/', methods=['POST']) def index(): """ Main webhook for responses to JSON objects """ json_obj = request.get_json() if not "conversation_id" in json_obj: return jsonify({"error": "400 Bad Request: No conversation ID field."}), 400 if not "X-Password" in request.headers: return jsonify({"error": "400 Bad Request: No X-Password header field"}), 400 conv = json_obj["conversation_id"] command = json_obj["command"] if "command" in json_obj else "" password = request.headers.get('X-Password') if not compare_password(password, config.hashed_password): return jsonify({"error": "401 Unauthorized: Password is invalid"}), 401 if game.contains_conv(conv): if command.strip() == "restart": game.finish_conv(conv) return jsonify({"response": game.create_conv(conv)}) else: return jsonify({"response": game.execute_command_conv(conv, command)}) else: return jsonify({"response": game.create_conv(conv)})
compare_password
helpers.go
/* Copyright 2016 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package images import ( "fmt" "k8s.io/kubernetes/pkg/api/v1" kubecontainer "k8s.io/kubernetes/pkg/kubelet/container" "k8s.io/kubernetes/pkg/util/flowcontrol" ) // throttleImagePulling wraps kubecontainer.ImageService to throttle image // pulling based on the given QPS and burst limits. If QPS is zero, defaults // to no throttling. func throttleImagePulling(imageService kubecontainer.ImageService, qps float32, burst int) kubecontainer.ImageService { if qps == 0.0
return &throttledImageService{ ImageService: imageService, limiter: flowcontrol.NewTokenBucketRateLimiter(qps, burst), } } type throttledImageService struct { kubecontainer.ImageService limiter flowcontrol.RateLimiter } func (ts throttledImageService) PullImage(image kubecontainer.ImageSpec, secrets []v1.Secret) (string, error) { if ts.limiter.TryAccept() { return ts.ImageService.PullImage(image, secrets) } return "", fmt.Errorf("pull QPS exceeded.") }
{ return imageService }
json_deser.rs
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT. pub fn parse_http_generic_error( response: &http::Response<bytes::Bytes>, ) -> Result<smithy_types::Error, smithy_json::deserialize::Error>
pub fn deser_structure_invalid_request_exceptionjson_err( input: &[u8], mut builder: crate::error::invalid_request_exception::Builder, ) -> Result<crate::error::invalid_request_exception::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "message" => { builder = builder.set_message( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_structure_resource_not_found_exceptionjson_err( input: &[u8], mut builder: crate::error::resource_not_found_exception::Builder, ) -> Result<crate::error::resource_not_found_exception::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "message" => { builder = builder.set_message( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_structure_too_many_requests_exceptionjson_err( input: &[u8], mut builder: crate::error::too_many_requests_exception::Builder, ) -> Result<crate::error::too_many_requests_exception::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "message" => { builder = builder.set_message( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_structure_unauthorized_exceptionjson_err( input: &[u8], mut builder: crate::error::unauthorized_exception::Builder, ) -> Result<crate::error::unauthorized_exception::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "message" => { builder = builder.set_message( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_get_role_credentials( input: &[u8], mut builder: crate::output::get_role_credentials_output::Builder, ) -> Result<crate::output::get_role_credentials_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "roleCredentials" => { builder = builder.set_role_credentials( crate::json_deser::deser_structure_role_credentials(tokens)?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_list_account_roles( input: &[u8], mut builder: crate::output::list_account_roles_output::Builder, ) -> Result<crate::output::list_account_roles_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "nextToken" => { builder = builder.set_next_token( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "roleList" => { builder = builder .set_role_list(crate::json_deser::deser_list_role_list_type(tokens)?); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_list_accounts( input: &[u8], mut builder: crate::output::list_accounts_output::Builder, ) -> Result<crate::output::list_accounts_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "accountList" => { builder = builder.set_account_list( crate::json_deser::deser_list_account_list_type(tokens)?, ); } "nextToken" => { builder = builder.set_next_token( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn or_empty_doc(data: &[u8]) -> &[u8] { if data.is_empty() { b"{}" } else { data } } pub fn deser_structure_role_credentials<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::RoleCredentials>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::RoleCredentials::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "accessKeyId" => { builder = builder.set_access_key_id( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "secretAccessKey" => { builder = builder.set_secret_access_key( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "sessionToken" => { builder = builder.set_session_token( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "expiration" => { builder = builder.set_expiration( smithy_json::deserialize::token::expect_number_or_null( tokens.next(), )? .map(|v| v.to_i64()), ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list_role_list_type<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<crate::model::RoleInfo>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = crate::json_deser::deser_structure_role_info(tokens)?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list_account_list_type<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<crate::model::AccountInfo>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = crate::json_deser::deser_structure_account_info(tokens)?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } pub fn deser_structure_role_info<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::RoleInfo>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::RoleInfo::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "roleName" => { builder = builder.set_role_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "accountId" => { builder = builder.set_account_id( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_account_info<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::AccountInfo>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::AccountInfo::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "accountId" => { builder = builder.set_account_id( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "accountName" => { builder = builder.set_account_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "emailAddress" => { builder = builder.set_email_address( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } }
{ crate::json_errors::parse_generic_error(response.body(), response.headers()) }
io_xk8s_cluster_addons_v1alpha4_cluster_resource_set_binding_list.py
# coding: utf-8 """ Kubernetes No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 The version of the OpenAPI document: v1.20.7 Generated by: https://openapi-generator.tech """ import pprint import re # noqa: F401 import six from kubernetes.client.configuration import Configuration class IoXK8sClusterAddonsV1alpha4ClusterResourceSetBindingList(object): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. """ """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ openapi_types = { 'api_version': 'str', 'items': 'list[IoXK8sClusterAddonsV1alpha4ClusterResourceSetBinding]', 'kind': 'str', 'metadata': 'V1ListMeta' } attribute_map = { 'api_version': 'apiVersion', 'items': 'items', 'kind': 'kind', 'metadata': 'metadata' } def __init__(self, api_version=None, items=None, kind=None, metadata=None, local_vars_configuration=None): # noqa: E501 """IoXK8sClusterAddonsV1alpha4ClusterResourceSetBindingList - a model defined in OpenAPI""" # noqa: E501 if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration self._api_version = None self._items = None self._kind = None self._metadata = None self.discriminator = None if api_version is not None: self.api_version = api_version self.items = items if kind is not None: self.kind = kind if metadata is not None: self.metadata = metadata @property def api_version(self): """Gets the api_version of this IoXK8sClusterAddonsV1alpha4ClusterResourceSetBindingList. # noqa: E501 APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501 :return: The api_version of this IoXK8sClusterAddonsV1alpha4ClusterResourceSetBindingList. # noqa: E501
@api_version.setter def api_version(self, api_version): """Sets the api_version of this IoXK8sClusterAddonsV1alpha4ClusterResourceSetBindingList. APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501 :param api_version: The api_version of this IoXK8sClusterAddonsV1alpha4ClusterResourceSetBindingList. # noqa: E501 :type: str """ self._api_version = api_version @property def items(self): """Gets the items of this IoXK8sClusterAddonsV1alpha4ClusterResourceSetBindingList. # noqa: E501 List of clusterresourcesetbindings. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md # noqa: E501 :return: The items of this IoXK8sClusterAddonsV1alpha4ClusterResourceSetBindingList. # noqa: E501 :rtype: list[IoXK8sClusterAddonsV1alpha4ClusterResourceSetBinding] """ return self._items @items.setter def items(self, items): """Sets the items of this IoXK8sClusterAddonsV1alpha4ClusterResourceSetBindingList. List of clusterresourcesetbindings. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md # noqa: E501 :param items: The items of this IoXK8sClusterAddonsV1alpha4ClusterResourceSetBindingList. # noqa: E501 :type: list[IoXK8sClusterAddonsV1alpha4ClusterResourceSetBinding] """ if self.local_vars_configuration.client_side_validation and items is None: # noqa: E501 raise ValueError("Invalid value for `items`, must not be `None`") # noqa: E501 self._items = items @property def kind(self): """Gets the kind of this IoXK8sClusterAddonsV1alpha4ClusterResourceSetBindingList. # noqa: E501 Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501 :return: The kind of this IoXK8sClusterAddonsV1alpha4ClusterResourceSetBindingList. # noqa: E501 :rtype: str """ return self._kind @kind.setter def kind(self, kind): """Sets the kind of this IoXK8sClusterAddonsV1alpha4ClusterResourceSetBindingList. Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501 :param kind: The kind of this IoXK8sClusterAddonsV1alpha4ClusterResourceSetBindingList. # noqa: E501 :type: str """ self._kind = kind @property def metadata(self): """Gets the metadata of this IoXK8sClusterAddonsV1alpha4ClusterResourceSetBindingList. # noqa: E501 :return: The metadata of this IoXK8sClusterAddonsV1alpha4ClusterResourceSetBindingList. # noqa: E501 :rtype: V1ListMeta """ return self._metadata @metadata.setter def metadata(self, metadata): """Sets the metadata of this IoXK8sClusterAddonsV1alpha4ClusterResourceSetBindingList. :param metadata: The metadata of this IoXK8sClusterAddonsV1alpha4ClusterResourceSetBindingList. # noqa: E501 :type: V1ListMeta """ self._metadata = metadata def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.openapi_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, IoXK8sClusterAddonsV1alpha4ClusterResourceSetBindingList): return False return self.to_dict() == other.to_dict() def __ne__(self, other): """Returns true if both objects are not equal""" if not isinstance(other, IoXK8sClusterAddonsV1alpha4ClusterResourceSetBindingList): return True return self.to_dict() != other.to_dict()
:rtype: str """ return self._api_version
video_service.pb.go
// Code generated by protoc-gen-go. DO NOT EDIT. // source: google/ads/googleads/v1/services/video_service.proto package services import ( context "context" fmt "fmt" math "math" proto "github.com/golang/protobuf/proto" resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" _ "google.golang.org/genproto/googleapis/api/annotations" grpc "google.golang.org/grpc" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package // Request message for // [VideoService.GetVideo][google.ads.googleads.v1.services.VideoService.GetVideo]. type GetVideoRequest struct { // The resource name of the video to fetch. ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *GetVideoRequest) Reset() { *m = GetVideoRequest{} } func (m *GetVideoRequest) String() string { return proto.CompactTextString(m) } func (*GetVideoRequest) ProtoMessage() {} func (*GetVideoRequest) Descriptor() ([]byte, []int) { return fileDescriptor_7856cda63e5cca90, []int{0} } func (m *GetVideoRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetVideoRequest.Unmarshal(m, b) } func (m *GetVideoRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_GetVideoRequest.Marshal(b, m, deterministic) } func (m *GetVideoRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_GetVideoRequest.Merge(m, src) } func (m *GetVideoRequest) XXX_Size() int { return xxx_messageInfo_GetVideoRequest.Size(m) } func (m *GetVideoRequest) XXX_DiscardUnknown() { xxx_messageInfo_GetVideoRequest.DiscardUnknown(m) } var xxx_messageInfo_GetVideoRequest proto.InternalMessageInfo func (m *GetVideoRequest) GetResourceName() string { if m != nil
return "" } func init() { proto.RegisterType((*GetVideoRequest)(nil), "google.ads.googleads.v1.services.GetVideoRequest") } func init() { proto.RegisterFile("google/ads/googleads/v1/services/video_service.proto", fileDescriptor_7856cda63e5cca90) } var fileDescriptor_7856cda63e5cca90 = []byte{ // 349 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x91, 0x31, 0x4b, 0xfb, 0x40, 0x18, 0xc6, 0x49, 0xfe, 0xf0, 0x47, 0x43, 0x45, 0xcc, 0x24, 0xc5, 0xa1, 0xd4, 0x0e, 0xa5, 0xe0, 0x9d, 0x51, 0x71, 0x38, 0x71, 0x48, 0x97, 0x3a, 0x49, 0xa9, 0x90, 0x41, 0x02, 0xe5, 0x6c, 0x5e, 0x42, 0xa0, 0xc9, 0x5b, 0xf3, 0x5e, 0xb3, 0x88, 0x8b, 0x5f, 0xc1, 0xc9, 0xd5, 0xb1, 0x1f, 0xc5, 0xd5, 0xaf, 0xe0, 0xe4, 0x27, 0x70, 0x94, 0xe4, 0x7a, 0x41, 0x85, 0xd0, 0xed, 0xc9, 0xe5, 0xf7, 0x3c, 0xf7, 0xbe, 0xcf, 0x39, 0x67, 0x31, 0x62, 0x3c, 0x07, 0x2e, 0x23, 0xe2, 0x5a, 0x96, 0xaa, 0xf0, 0x38, 0x41, 0x5e, 0x24, 0x33, 0x20, 0x5e, 0x24, 0x11, 0xe0, 0x74, 0xfd, 0xc9, 0x16, 0x39, 0x2a, 0x74, 0x3b, 0x1a, 0x65, 0x32, 0x22, 0x56, 0xbb, 0x58, 0xe1, 0x31, 0xe3, 0x6a, 0x1f, 0x35, 0xe5, 0xe6, 0x40, 0xb8, 0xcc, 0xeb, 0x60, 0x1d, 0xd8, 0x3e, 0x30, 0xf8, 0x22, 0xe1, 0x32, 0xcb, 0x50, 0x49, 0x95, 0x60, 0x46, 0xfa, 0x6f, 0xf7, 0xdc, 0xd9, 0x1d, 0x81, 0x0a, 0x4a, 0x7e, 0x02, 0xf7, 0x4b, 0x20, 0xe5, 0x1e, 0x3a, 0x3b, 0x26, 0x69, 0x9a, 0xc9, 0x14, 0xf6, 0xad, 0x8e, 0xd5, 0xdf, 0x9e, 0xb4, 0xcc, 0xe1, 0xb5, 0x4c, 0xe1, 0x64, 0x65, 0x39, 0xad, 0xca, 0x75, 0xa3, 0xc7, 0x72, 0x5f, 0x2c, 0x67, 0xcb, 0x24, 0xb9, 0x1e, 0xdb, 0xb4, 0x05, 0xfb, 0x73, 0x6b, 0xbb, 0xdf, 0x68, 0xa9, 0xd7, 0x62, 0x95, 0xa1, 0x7b, 0xfc, 0xf4, 0xfe, 0xf1, 0x6c, 0x0f, 0xdc, 0x7e, 0xb9, 0xf3, 0xc3, 0xaf, 0x51, 0x2f, 0x67, 0x4b, 0x52, 0x98, 0x42, 0x4e, 0x7c, 0xa0, 0x4b, 0x20, 0x3e, 0x78, 0x1c, 0x7e, 0x59, 0x4e, 0x6f, 0x86, 0xe9, 0xc6, 0xa1, 0x86, 0x7b, 0x3f, 0x57, 0x1a, 0x97, 0x05, 0x8d, 0xad, 0xdb, 0xab, 0xb5, 0x2d, 0xc6, 0xb9, 0xcc, 0x62, 0x86, 0x79, 0xcc, 0x63, 0xc8, 0xaa, 0xfa, 0x4c, 0xff, 0x8b, 0x84, 0x9a, 0x9f, 0xf9, 0xc2, 0x88, 0x57, 0xfb, 0xdf, 0xc8, 0xf7, 0x57, 0x76, 0x67, 0xa4, 0x03, 0xfd, 0x88, 0x98, 0x96, 0xa5, 0x0a, 0x3c, 0xb6, 0xbe, 0x98, 0xde, 0x0c, 0x12, 0xfa, 0x11, 0x85, 0x35, 0x12, 0x06, 0x5e, 0x68, 0x90, 0x4f, 0xbb, 0xa7, 0xcf, 0x85, 0xf0, 0x23, 0x12, 0xa2, 0x86, 0x84, 0x08, 0x3c, 0x21, 0x0c, 0x76, 0xf7, 0xbf, 0x9a, 0xf3, 0xf4, 0x3b, 0x00, 0x00, 0xff, 0xff, 0x24, 0x5c, 0xd6, 0x52, 0x8d, 0x02, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. var _ context.Context var _ grpc.ClientConn // This is a compile-time assertion to ensure that this generated file // is compatible with the grpc package it is being compiled against. const _ = grpc.SupportPackageIsVersion4 // VideoServiceClient is the client API for VideoService service. // // For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. type VideoServiceClient interface { // Returns the requested video in full detail. GetVideo(ctx context.Context, in *GetVideoRequest, opts ...grpc.CallOption) (*resources.Video, error) } type videoServiceClient struct { cc *grpc.ClientConn } func NewVideoServiceClient(cc *grpc.ClientConn) VideoServiceClient { return &videoServiceClient{cc} } func (c *videoServiceClient) GetVideo(ctx context.Context, in *GetVideoRequest, opts ...grpc.CallOption) (*resources.Video, error) { out := new(resources.Video) err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.VideoService/GetVideo", in, out, opts...) if err != nil { return nil, err } return out, nil } // VideoServiceServer is the server API for VideoService service. type VideoServiceServer interface { // Returns the requested video in full detail. GetVideo(context.Context, *GetVideoRequest) (*resources.Video, error) } func RegisterVideoServiceServer(s *grpc.Server, srv VideoServiceServer) { s.RegisterService(&_VideoService_serviceDesc, srv) } func _VideoService_GetVideo_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(GetVideoRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { return srv.(VideoServiceServer).GetVideo(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, FullMethod: "/google.ads.googleads.v1.services.VideoService/GetVideo", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(VideoServiceServer).GetVideo(ctx, req.(*GetVideoRequest)) } return interceptor(ctx, in, info, handler) } var _VideoService_serviceDesc = grpc.ServiceDesc{ ServiceName: "google.ads.googleads.v1.services.VideoService", HandlerType: (*VideoServiceServer)(nil), Methods: []grpc.MethodDesc{ { MethodName: "GetVideo", Handler: _VideoService_GetVideo_Handler, }, }, Streams: []grpc.StreamDesc{}, Metadata: "google/ads/googleads/v1/services/video_service.proto", }
{ return m.ResourceName }
image_test.go
// Copyright 2016 Hajime Hoshi // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package ebiten_test import ( "bytes" "errors" "image" "image/color" "image/draw" _ "image/png" "math" "os" "testing" . "github.com/hajimehoshi/ebiten" "github.com/hajimehoshi/ebiten/ebitenutil" "github.com/hajimehoshi/ebiten/examples/resources/images" emath "github.com/hajimehoshi/ebiten/internal/math" "github.com/hajimehoshi/ebiten/internal/testflock" ) func TestMain(m *testing.M) { testflock.Lock() defer testflock.Unlock() code := 0 // Run an Ebiten process so that (*Image).At is available. regularTermination := errors.New("regular termination") f := func(screen *Image) error { code = m.Run() return regularTermination } if err := Run(f, 320, 240, 1, "Test"); err != nil && err != regularTermination { panic(err) } os.Exit(code) } func openEbitenImage() (*Image, image.Image, error) { img, _, err := image.Decode(bytes.NewReader(images.Ebiten_png)) if err != nil { return nil, nil, err } eimg, err := NewImageFromImage(img, FilterNearest) if err != nil { return nil, nil, err } return eimg, img, nil } func abs(x int) int { if x < 0 { return -x } return x } // sameColors compares c1 and c2 and returns a boolean value indicating // if the two colors are (almost) same. // // Pixels read from GPU might include errors (#492), and // sameColors considers such errors as delta. func sameColors(c1, c2 color.RGBA, delta int) bool { return abs(int(c1.R)-int(c2.R)) <= delta && abs(int(c1.G)-int(c2.G)) <= delta && abs(int(c1.B)-int(c2.B)) <= delta && abs(int(c1.A)-int(c2.A)) <= delta } func TestImagePixels(t *testing.T) { img0, img, err := openEbitenImage() if err != nil { t.Fatal(err) return } if got := img0.Bounds().Size(); got != img.Bounds().Size() { t.Fatalf("img size: got %d; want %d", got, img.Bounds().Size()) } w, h := img0.Bounds().Size().X, img0.Bounds().Size().Y // Check out of range part w2, h2 := emath.NextPowerOf2Int(w), emath.NextPowerOf2Int(h) for j := -100; j < h2+100; j++ { for i := -100; i < w2+100; i++ { got := img0.At(i, j) want := color.RGBAModel.Convert(img.At(i, j)) if got != want { t.Errorf("img0 At(%d, %d): got %#v; want %#v", i, j, got, want) } } } } func TestImageComposition(t *testing.T) { img2Color := color.NRGBA{0x24, 0x3f, 0x6a, 0x88} img3Color := color.NRGBA{0x85, 0xa3, 0x08, 0xd3} // TODO: Rename this to img0 img1, _, err := openEbitenImage() if err != nil { t.Fatal(err) return } w, h := img1.Bounds().Size().X, img1.Bounds().Size().Y img2, err := NewImage(w, h, FilterNearest) if err != nil { t.Fatal(err) return } img3, err := NewImage(w, h, FilterNearest) if err != nil { t.Fatal(err) return } if err := img2.Fill(img2Color); err != nil { t.Fatal(err) return } if err := img3.Fill(img3Color); err != nil { t.Fatal(err) return } img_12_3, err := NewImage(w, h, FilterNearest) if err != nil { t.Fatal(err) return } if err := img2.DrawImage(img1, nil); err != nil { t.Fatal(err) return } if err := img3.DrawImage(img2, nil); err != nil { t.Fatal(err) return } if err := img_12_3.DrawImage(img3, nil); err != nil { t.Fatal(err) return } if err := img2.Fill(img2Color); err != nil { t.Fatal(err) return } if err := img3.Fill(img3Color); err != nil { t.Fatal(err) return } img_1_23, err := NewImage(w, h, FilterNearest) if err != nil { t.Fatal(err) return } if err := img3.DrawImage(img2, nil); err != nil { t.Fatal(err) return } if err := img3.DrawImage(img1, nil); err != nil { t.Fatal(err) return } if err := img_1_23.DrawImage(img3, nil); err != nil { t.Fatal(err) return } for j := 0; j < h; j++ { for i := 0; i < w; i++ { c1 := img_12_3.At(i, j).(color.RGBA) c2 := img_1_23.At(i, j).(color.RGBA) if !sameColors(c1, c2, 1) { t.Errorf("img_12_3.At(%d, %d) = %#v; img_1_23.At(%[1]d, %[2]d) = %#[4]v", i, j, c1, c2) } if c1.A == 0 { t.Fatalf("img_12_3.At(%d, %d).A = 0; nothing is rendered?", i, j) } if c2.A == 0 { t.Fatalf("img_1_23.At(%d, %d).A = 0; nothing is rendered?", i, j) } } } } func TestImageSelf(t *testing.T) { // Note that mutex usages: without defer, unlocking is not called when panicing. defer func() { if r := recover(); r == nil { t.Errorf("DrawImage must panic but not") } }() img, _, err := openEbitenImage() if err != nil { t.Fatal(err) return } img.DrawImage(img, nil) } func TestImageScale(t *testing.T) { for _, scale := range []int{2, 3, 4} { img0, _, err := openEbitenImage() if err != nil { t.Fatal(err) return } w, h := img0.Size() img1, err := NewImage(w*scale, h*scale, FilterNearest) if err != nil { t.Fatal(err) return } op := &DrawImageOptions{} op.GeoM.Scale(float64(scale), float64(scale)) if err := img1.DrawImage(img0, op); err != nil { t.Fatal(err) return } for j := 0; j < h*scale; j++ { for i := 0; i < w*scale; i++ { c0 := img0.At(i/scale, j/scale).(color.RGBA) c1 := img1.At(i, j).(color.RGBA) if c0 != c1 { t.Fatalf("img0.At(%[1]d, %[2]d) should equal to img1.At(%[3]d, %[4]d) (with scale %[5]d) but not: %[6]v vs %[7]v", i/2, j/2, i, j, scale, c0, c1) } } } } } func TestImage90DegreeRotate(t *testing.T) { img0, _, err := openEbitenImage() if err != nil { t.Fatal(err) return } w, h := img0.Size() img1, err := NewImage(h, w, FilterNearest) if err != nil { t.Fatal(err) return } op := &DrawImageOptions{} op.GeoM.Rotate(math.Pi / 2) op.GeoM.Translate(float64(h), 0) if err := img1.DrawImage(img0, op); err != nil { t.Fatal(err) return } for j := 0; j < h; j++ { for i := 0; i < w; i++ { c0 := img0.At(i, j).(color.RGBA) c1 := img1.At(h-j-1, i).(color.RGBA) if c0 != c1 { t.Errorf("img0.At(%[1]d, %[2]d) should equal to img1.At(%[3]d, %[4]d) but not: %[5]v vs %[6]v", i, j, h-j-1, i, c0, c1) } } } } func TestImageDotByDotInversion(t *testing.T) { img0, _, err := openEbitenImage() if err != nil { t.Fatal(err) return } w, h := img0.Size() img1, err := NewImage(w, h, FilterNearest) if err != nil { t.Fatal(err) return } op := &DrawImageOptions{} op.GeoM.Rotate(math.Pi) op.GeoM.Translate(float64(w), float64(h)) if err := img1.DrawImage(img0, op); err != nil { t.Fatal(err) return } for j := 0; j < h; j++ { for i := 0; i < w; i++ { c0 := img0.At(i, j).(color.RGBA) c1 := img1.At(w-i-1, h-j-1).(color.RGBA) if c0 != c1 { t.Errorf("img0.At(%[1]d, %[2]d) should equal to img1.At(%[3]d, %[4]d) but not: %[5]v vs %[6]v", i, j, w-i-1, h-j-1, c0, c1) } } } } func TestImageReplacePixels(t *testing.T) { // Create a dummy image so that the shared texture is used and origImg's position is shfited. dummyImg, _ := NewImageFromImage(image.NewRGBA(image.Rect(0, 0, 16, 16)), FilterDefault) defer dummyImg.Dispose() _, origImg, err := openEbitenImage() if err != nil { t.Fatal(err) return } // Convert to *image.RGBA just in case. img := image.NewRGBA(origImg.Bounds()) draw.Draw(img, img.Bounds(), origImg, image.ZP, draw.Src) size := img.Bounds().Size() img0, err := NewImage(size.X, size.Y, FilterNearest) if err != nil { t.Fatal(err) return } if err := img0.ReplacePixels(img.Pix); err != nil { t.Fatal(err) return } for j := 0; j < img0.Bounds().Size().Y; j++ { for i := 0; i < img0.Bounds().Size().X; i++ { got := img0.At(i, j) want := img.At(i, j) if got != want { t.Errorf("img0 At(%d, %d): got %#v; want %#v", i, j, got, want) } } } p := make([]uint8, 4*size.X*size.Y) for i := range p { p[i] = 0x80 } if err := img0.ReplacePixels(p); err != nil { t.Fatal(err) return } // Even if p is changed after calling ReplacePixel, img0 uses the original values. for i := range p { p[i] = 0 } for j := 0; j < img0.Bounds().Size().Y; j++ { for i := 0; i < img0.Bounds().Size().X; i++ { got := img0.At(i, j) want := color.RGBA{0x80, 0x80, 0x80, 0x80} if got != want { t.Errorf("img0 At(%d, %d): got %#v; want %#v", i, j, got, want) } } } } func TestImageDispose(t *testing.T) { img, err := NewImage(16, 16, FilterNearest) if err != nil { t.Fatal(err) return } img.Fill(color.White) if err := img.Dispose(); err != nil { t.Errorf("img.Dipose() returns error: %v", err) } // The color is transparent (color.RGBA{}). // Note that the value's type must be color.RGBA. got := img.At(0, 0) want := color.RGBA{} if got != want { t.Errorf("img.At(0, 0) got: %v, want: %v", got, want) } } func min(a, b int) int { if a < b { return a } return b } func TestImageCompositeModeLighter(t *testing.T) { img0, _, err := openEbitenImage() if err != nil { t.Fatal(err) return } w, h := img0.Size() img1, err := NewImage(w, h, FilterNearest) if err != nil { t.Fatal(err) return } if err := img1.Fill(color.RGBA{0x01, 0x02, 0x03, 0x04}); err != nil { t.Fatal(err) return } op := &DrawImageOptions{} op.CompositeMode = CompositeModeLighter if err := img1.DrawImage(img0, op); err != nil { t.Fatal(err) return } for j := 0; j < img1.Bounds().Size().Y; j++ { for i := 0; i < img1.Bounds().Size().X; i++ { got := img1.At(i, j).(color.RGBA) want := img0.At(i, j).(color.RGBA) want.R = uint8(min(0xff, int(want.R)+1)) want.G = uint8(min(0xff, int(want.G)+2)) want.B = uint8(min(0xff, int(want.B)+3)) want.A = uint8(min(0xff, int(want.A)+4)) if got != want { t.Errorf("img1 At(%d, %d): got %#v; want %#v", i, j, got, want) } } } } func TestNewImageFromEbitenImage(t *testing.T) { img, _, err := openEbitenImage() if err != nil { t.Fatal(err) return } if _, err := NewImageFromImage(img, FilterNearest); err != nil { t.Errorf("NewImageFromImage returns error: %v", err) } } func TestNewImageFromSubImage(t *testing.T) { _, img, err := openEbitenImage() if err != nil { t.Fatal(err) return } w, h := img.Bounds().Dx(), img.Bounds().Dy() subImg := img.(*image.NRGBA).SubImage(image.Rect(1, 1, w-1, h-1)) eimg, err := NewImageFromImage(subImg, FilterNearest) if err != nil { t.Fatal(err) return } sw, sh := subImg.Bounds().Dx(), subImg.Bounds().Dy() w2, h2 := eimg.Size() if w2 != sw { t.Errorf("eimg Width: got %#v; want %#v", w2, sw) } if h2 != sh { t.Errorf("eimg Width: got %#v; want %#v", h2, sh) } for j := 0; j < h2; j++ { for i := 0; i < w2; i++ { got := eimg.At(i, j) want := color.RGBAModel.Convert(img.At(i+1, j+1)) if got != want { t.Errorf("img0 At(%d, %d): got %#v; want %#v", i, j, got, want) } } } } type mutableRGBA struct { r, g, b, a uint8 } func (c *mutableRGBA) RGBA() (r, g, b, a uint32) { return uint32(c.r) * 0x101, uint32(c.g) * 0x101, uint32(c.b) * 0x101, uint32(c.a) * 0x101 } func TestImageFill(t *testing.T) { w, h := 10, 10 img, err := NewImage(w, h, FilterNearest) if err != nil { t.Fatal(err) return } clr := &mutableRGBA{0x80, 0x80, 0x80, 0x80} if err := img.Fill(clr); err != nil { t.Fatal(err) return } clr.r = 0 for j := 0; j < h; j++ { for i := 0; i < w; i++ { got := img.At(i, j) want := color.RGBA{0x80, 0x80, 0x80, 0x80} if got != want { t.Errorf("img At(%d, %d): got %#v; want %#v", i, j, got, want) } } } } // Issue #317, #558 func TestImageEdge(t *testing.T) { const ( img0Width = 16 img0Height = 16 img1Width = 32 img1Height = 32 ) img0, _ := NewImage(img0Width, img0Height, FilterNearest) pixels := make([]uint8, 4*img0Width*img0Height) for j := 0; j < img0Height; j++ { for i := 0; i < img0Width; i++ { idx := 4 * (i + j*img0Width) switch { case j < img0Height/2: pixels[idx] = 0xff pixels[idx+1] = 0 pixels[idx+2] = 0 pixels[idx+3] = 0xff default: pixels[idx] = 0 pixels[idx+1] = 0xff pixels[idx+2] = 0 pixels[idx+3] = 0xff } } } img0.ReplacePixels(pixels) img1, _ := NewImage(img1Width, img1Height, FilterDefault) red := color.RGBA{0xff, 0, 0, 0xff} transparent := color.RGBA{0, 0, 0, 0} angles := []float64{} for a := 0; a < 1440; a++ { angles = append(angles, float64(a)/1440*2*math.Pi) } for a := 0; a < 4096; a++ { angles = append(angles, float64(a)/4096*2*math.Pi) } for _, f := range []Filter{FilterNearest, FilterLinear} { for _, a := range angles { img1.Clear() op := &DrawImageOptions{} w, h := img0.Size() r := image.Rect(0, 0, w, h/2) op.SourceRect = &r op.GeoM.Translate(-float64(img0Width)/2, -float64(img0Height)/2) op.GeoM.Rotate(a) op.GeoM.Translate(img1Width/2, img1Height/2) op.Filter = f img1.DrawImage(img0, op) for j := 0; j < img1Height; j++ { for i := 0; i < img1Width; i++ { c := img1.At(i, j) if c == transparent { continue } switch f { case FilterNearest: if c == red { continue } case FilterLinear: _, g, b, _ := c.RGBA() if g == 0 && b == 0 { continue } } t.Errorf("img1.At(%d, %d) (filter: %d, angle: %f) want: red or transparent, got: %v", i, j, f, a, c) } } } } } // Issue #419 func TestImageTooManyFill(t *testing.T) { const width = 1024 indexToColor := func(index int) uint8 { return uint8((17*index + 0x40) % 256) } src, _ := NewImage(1, 1, FilterNearest) dst, _ := NewImage(width, 1, FilterNearest) for i := 0; i < width; i++ { c := indexToColor(i) src.Fill(color.RGBA{c, c, c, 0xff}) op := &DrawImageOptions{} op.GeoM.Translate(float64(i), 0) dst.DrawImage(src, op) } for i := 0; i < width; i++ { c := indexToColor(i) got := dst.At(i, 0).(color.RGBA) want := color.RGBA{c, c, c, 0xff} if !sameColors(got, want, 1) { t.Errorf("dst.At(%d, %d): got %#v, want: %#v", i, 0, got, want) } } } func BenchmarkDrawImage(b *testing.B) { img0, _ := NewImage(16, 16, FilterNearest) img1, _ := NewImage(16, 16, FilterNearest) op := &DrawImageOptions{} for i := 0; i < b.N; i++ { img0.DrawImage(img1, op) } } func TestImageLinear(t *testing.T) { src, _ := NewImage(32, 32, FilterDefault) dst, _ := NewImage(64, 64, FilterDefault) src.Fill(color.RGBA{0, 0xff, 0, 0xff}) ebitenutil.DrawRect(src, 8, 8, 16, 16, color.RGBA{0xff, 0, 0, 0xff}) op := &DrawImageOptions{} op.GeoM.Translate(8, 8) op.GeoM.Scale(2, 2) r := image.Rect(8, 8, 24, 24) op.SourceRect = &r op.Filter = FilterLinear dst.DrawImage(src, op) for j := 0; j < 64; j++ { for i := 0; i < 64; i++ { c := dst.At(i, j).(color.RGBA) got := c.G want := uint8(0) if abs(int(c.G)-int(want)) > 1 { t.Errorf("dst At(%d, %d).G: got %#v, want: %#v", i, j, got, want) } } } } func TestImageOutside(t *testing.T)
func TestImageOutsideUpperLeft(t *testing.T) { src, _ := NewImage(4, 4, FilterNearest) dst1, _ := NewImage(16, 16, FilterNearest) dst2, _ := NewImage(16, 16, FilterNearest) src.Fill(color.RGBA{0xff, 0, 0, 0xff}) op := &DrawImageOptions{} op.GeoM.Rotate(math.Pi / 4) r := image.Rect(-4, -4, 8, 8) op.SourceRect = &r dst1.DrawImage(src, op) op = &DrawImageOptions{} op.GeoM.Translate(4, 4) op.GeoM.Rotate(math.Pi / 4) dst2.DrawImage(src, op) for j := 0; j < 16; j++ { for i := 0; i < 16; i++ { got := dst1.At(i, j).(color.RGBA) want := dst2.At(i, j).(color.RGBA) if got != want { t.Errorf("got: dst1.At(%d, %d): %#v, want: dst2.At(%d, %d): %#v", i, j, got, i, j, want) } } } } func TestImageSize(t *testing.T) { const ( w = 17 h = 31 ) img, _ := NewImage(w, h, FilterDefault) gotW, gotH := img.Size() if gotW != w { t.Errorf("got: %d, want: %d", gotW, w) } if gotH != h { t.Errorf("got: %d, want: %d", gotH, h) } } func TestImageSize1(t *testing.T) { src, _ := NewImage(1, 1, FilterNearest) dst, _ := NewImage(1, 1, FilterNearest) src.Fill(color.White) dst.DrawImage(src, nil) got := src.At(0, 0).(color.RGBA) want := color.RGBA{0xff, 0xff, 0xff, 0xff} if !sameColors(got, want, 1) { t.Errorf("got: %#v, want: %#v", got, want) } } func TestImageSize4096(t *testing.T) { src, _ := NewImage(4096, 4096, FilterNearest) dst, _ := NewImage(4096, 4096, FilterNearest) pix := make([]byte, 4096*4096*4) for i := 0; i < 4096; i++ { j := 4095 idx := 4 * (i + j*4096) pix[idx] = uint8(i + j) pix[idx+1] = uint8((i + j) >> 8) pix[idx+2] = uint8((i + j) >> 16) pix[idx+3] = 0xff } for j := 0; j < 4096; j++ { i := 4095 idx := 4 * (i + j*4096) pix[idx] = uint8(i + j) pix[idx+1] = uint8((i + j) >> 8) pix[idx+2] = uint8((i + j) >> 16) pix[idx+3] = 0xff } src.ReplacePixels(pix) dst.DrawImage(src, nil) for i := 4095; i < 4096; i++ { j := 4095 got := dst.At(i, j).(color.RGBA) want := color.RGBA{uint8(i + j), uint8((i + j) >> 8), uint8((i + j) >> 16), 0xff} if got != want { t.Errorf("At(%d, %d): got: %#v, want: %#v", i, j, got, want) } } for j := 4095; j < 4096; j++ { i := 4095 got := dst.At(i, j).(color.RGBA) want := color.RGBA{uint8(i + j), uint8((i + j) >> 8), uint8((i + j) >> 16), 0xff} if got != want { t.Errorf("At(%d, %d): got: %#v, want: %#v", i, j, got, want) } } } func TestImageCopy(t *testing.T) { defer func() { if r := recover(); r == nil { t.Errorf("copying image and using it should panic") } }() img0, _ := NewImage(256, 256, FilterDefault) img1 := *img0 img1.Fill(color.Transparent) } func TestImageStretch(t *testing.T) { img0, _ := NewImage(16, 17, FilterDefault) pix := make([]byte, 4*16*17) for i := 0; i < 16*16; i++ { pix[4*i] = 0xff pix[4*i+3] = 0xff } for i := 0; i < 16; i++ { pix[4*(16*16+i)+1] = 0xff pix[4*(16*16+i)+3] = 0xff } img0.ReplacePixels(pix) // TODO: 4096 doesn't pass on MacBook Pro (#611). const h = 2048 img1, _ := NewImage(16, h, FilterDefault) for i := 1; i < h; i++ { img1.Clear() op := &DrawImageOptions{} op.GeoM.Scale(1, float64(i)/16) r := image.Rect(0, 0, 16, 16) op.SourceRect = &r img1.DrawImage(img0, op) for j := -1; j <= 1; j++ { got := img1.At(0, i+j).(color.RGBA) want := color.RGBA{} if j < 0 { want = color.RGBA{0xff, 0, 0, 0xff} } if got != want { t.Errorf("At(%d, %d) (i=%d): got: %#v, want: %#v", 0, i+j, i, got, want) } } } } func TestSprites(t *testing.T) { const ( width = 512 height = 512 ) src, _ := NewImage(4, 4, FilterNearest) src.Fill(color.RGBA{0xff, 0xff, 0xff, 0xff}) dst, _ := NewImage(width, height, FilterNearest) for j := 0; j < height/4; j++ { for i := 0; i < width/4; i++ { op := &DrawImageOptions{} op.GeoM.Translate(float64(i*4), float64(j*4)) dst.DrawImage(src, op) } } for j := 0; j < height/4; j++ { for i := 0; i < width/4; i++ { got := dst.At(i*4, j*4).(color.RGBA) want := color.RGBA{0xff, 0xff, 0xff, 0xff} if !sameColors(got, want, 1) { t.Errorf("dst.At(%d, %d): got %#v, want: %#v", i*4, j*4, got, want) } } } } func TestMipmap(t *testing.T) { src, _, err := openEbitenImage() if err != nil { t.Fatal(err) return } w, h := src.Size() l1, _ := NewImage(w/2, h/2, FilterDefault) op := &DrawImageOptions{} op.GeoM.Scale(1/2.0, 1/2.0) op.Filter = FilterLinear l1.DrawImage(src, op) l1w, l1h := l1.Size() l2, _ := NewImage(l1w/2, l1h/2, FilterDefault) op = &DrawImageOptions{} op.GeoM.Scale(1/2.0, 1/2.0) op.Filter = FilterLinear l2.DrawImage(l1, op) gotDst, _ := NewImage(w, h, FilterDefault) op = &DrawImageOptions{} op.GeoM.Scale(1/5.0, 1/5.0) op.Filter = FilterLinear gotDst.DrawImage(src, op) wantDst, _ := NewImage(w, h, FilterDefault) op = &DrawImageOptions{} op.GeoM.Scale(4.0/5.0, 4.0/5.0) op.Filter = FilterLinear wantDst.DrawImage(l2, op) for j := 0; j < h; j++ { for i := 0; i < h; i++ { got := gotDst.At(i, j).(color.RGBA) want := wantDst.At(i, j).(color.RGBA) if !sameColors(got, want, 1) { t.Errorf("At(%d, %d): got: %#v, want: %#v", i, j, got, want) } } } }
{ src, _ := NewImage(5, 10, FilterNearest) // internal texture size is 8x16. dst, _ := NewImage(4, 4, FilterNearest) src.Fill(color.RGBA{0xff, 0, 0, 0xff}) cases := []struct { X, Y, Width, Height int }{ {-4, -4, 4, 4}, {5, 0, 4, 4}, {0, 10, 4, 4}, {5, 10, 4, 4}, {8, 0, 4, 4}, {0, 16, 4, 4}, {8, 16, 4, 4}, {8, -4, 4, 4}, {-4, 16, 4, 4}, {5, 10, 0, 0}, {5, 10, -2, -2}, // non-well-formed rectangle } for _, c := range cases { dst.Clear() op := &DrawImageOptions{} op.GeoM.Translate(0, 0) op.SourceRect = &image.Rectangle{ Min: image.Pt(c.X, c.Y), Max: image.Pt(c.X+c.Width, c.Y+c.Height), } dst.DrawImage(src, op) for j := 0; j < 4; j++ { for i := 0; i < 4; i++ { got := dst.At(i, j).(color.RGBA) want := color.RGBA{0, 0, 0, 0} if got != want { t.Errorf("src(x: %d, y: %d, w: %d, h: %d), dst At(%d, %d): got %#v, want: %#v", c.X, c.Y, c.Width, c.Height, i, j, got, want) } } } } }
writer.go
package bulk import ( "errors" "time" ) // A function of a FlushFunc type once called will receive // a buffer containing all the data from writes made after // the previous FlushFunc call. The data buffer will be cleaned up // automatically after this function is executed (i.e. you do // not need to clean it up yourself). // Any error returned from this function will be passed to a ErrorHandlerFunc function type FlushFunc func(data []byte) error // ErrorHandlerFunc is a function that gets errors occured in FlushFunc // It will also get a buffer copy so that you could somehow analyze it. // After this function is called, the buffer is destroyed automatically // by the calling code (i.e. you do not need to clean it up yourself). type ErrorHandlerFunc func(data []byte, err error) // NoErrorHandler is an empty function that is used when no ErrorHandler is required // One may always process all their errors directly in the FlushFunc. var NoErrorHandler = func(data []byte, err error) {} // Writer is an implenetation of an io.WriteCloser interface.
type Writer struct { ticker *time.Ticker tickerCh <-chan time.Time buf []byte data chan []byte quit chan bool flusher chan bool closed bool flushFunc FlushFunc errorHandler ErrorHandlerFunc } // NewBulkWriter creates a new bulk.Writer instance // flushInterval - how often to call the flushFunc, if set to a nonpositive value will effectively turn // off automatic flushing // flushFunc - defines what to do on flush func NewBulkWriter(flushInterval time.Duration, flushFunc FlushFunc) *Writer { return NewBulkWriterWithErrorHandler(flushInterval, flushFunc, NoErrorHandler) } // NewBulkWriterWithErrorHandler creates a new bulk.Writer instance // flushInterval - how often to call the flushFunc, if set to a nonpositive value will effectively turn // off automatic flushing // flushFunc - defines what to do on flush // errorHandler - whenever your flushFunc returns an error, it can be processed in this function func NewBulkWriterWithErrorHandler(flushInterval time.Duration, flushFunc FlushFunc, errorHandler ErrorHandlerFunc) *Writer { bw := &Writer{ buf: make([]byte, 0), data: make(chan []byte), quit: make(chan bool), flushFunc: flushFunc, errorHandler: errorHandler, flusher: make(chan bool), } if flushInterval > 0 { bw.ticker = time.NewTicker(flushInterval) bw.tickerCh = bw.ticker.C } else { bw.tickerCh = make(chan time.Time) } go bw.processor() return bw } func (b *Writer) flush() { if len(b.buf) == 0 { return } if err := b.flushFunc(b.buf); err != nil { b.errorHandler(b.buf, err) } b.buf = []byte{} } func (b *Writer) processor() { loop: for { select { case d := <-b.data: b.buf = append(b.buf, d...) case <-b.flusher: b.flush() case <-b.tickerCh: b.flush() case <-b.quit: b.flush() break loop } } } // Write is an implementation of an io.Writer interface. The data are appended to a temporary // buffer that will be cleaned up on flush. // It will return an error if called after Close() was called. func (b *Writer) Write(data []byte) (n int, err error) { if b.closed { return 0, errors.New("writing on a closed bulk.Writer") } b.data <- data return len(data), nil } // Flush forces buffer flush. It is mainly suited for buffer flushing // when automatic flushing is turned off, but you may call it even // if automatic flushing is turned on. // It will return an error if called after Close() was called. func (b *Writer) Flush() error { if b.closed { return errors.New("flushing a closed bulk.Writer") } b.flusher <- true return nil } // Close is an implementation of an io.Closer interface. // It closes the writer, stops any activity and any subsiquent operations // will result in a error. // It will return an error if called after Close() was called. func (b *Writer) Close() error { if b.closed { return errors.New("closing a closed bulk.Writer") } b.closed = true close(b.quit) if b.ticker != nil { b.ticker.Stop() } return nil }
// It lets creating a buffered writer that can flush (and thus physically write) // the buffer by a time ticker or by manual calls of Writer.Flush().
main.go
package main import ( "bytes" "encoding/json" "fmt" "io/ioutil" "net/http" "strings" "flag" "log" "path/filepath" "strconv" "github.com/gregjones/httpcache" "github.com/if1live/staticfilecache" "github.com/jhoonb/archivex" ) var cmd string var uri string var cid string var similarity float64 var workerCount int var filter int var output string func clusterIdList() []int { cids := []int{} for _, c := range strings.Split(cid, ",") { clusterid, err := strconv.Atoi(c) if err != nil { continue } cids = append(cids, clusterid) } return cids } type fetchConfig struct { URL string `json:"url"` ClusterIDList []int `json:"cluster_id_list"` Filter int `json:"filter"` Similarity float64 `json:"similarity"` } func (m *fetchConfig) Marshal() []byte { data, err := json.Marshal(m) if err != nil { panic(err) } var out bytes.Buffer json.Indent(&out, data, "", " ") return out.Bytes() } func makeClusterFromFlag() *clusterList { htmltext := getHTMLText(uri) links := parseHTMLText(htmltext) for i, link := range links { links[i] = ApplyFilter(link, filter) } return newClusterList(links, similarity) } func init() { flag.StringVar(&cmd, "cmd", "help", "command") flag.StringVar(&uri, "uri", "", "target uri") flag.StringVar(&cid, "cid", "", "cluster id") flag.Float64Var(&similarity, "similarity", 0.9, "similarity") flag.IntVar(&workerCount, "worker", 8, "worker count") flag.IntVar(&filter, "filter", 0, "link filter") flag.StringVar(&output, "output", "download.zip", "output zip filename") } func main() { flag.Parse() switch cmd { case "show": mainForShow() case "fetch": mainForFetch() default: mainForHelp() } } func mainForShow() { cluster := makeClusterFromFlag() if cid == "" { cluster.Show() } else { links := cluster.GetClusters(clusterIdList()) for i, link := range links { fmt.Printf("(%d/%d) %s\n", i+1, len(links), link) } } } func mainForHelp() { fmt.Println("help") } type fetchCommand struct { uri string idx int } type fetchResult struct { resp *http.Response uri string idx int } func workerFetch(id int, jobs <-chan *fetchCommand, results chan<- *fetchResult)
func mainForFetch() { cluster := makeClusterFromFlag() cids := clusterIdList() links := cluster.GetClusters(cids) linkcount := len(links) if linkcount == 0 { log.Printf("No link found, cid=%s\n", cid) return } jobs := make(chan *fetchCommand, linkcount) fetchResults := make(chan *fetchResult, linkcount) zipFinishCh := make(chan bool, linkcount) zip := new(archivex.ZipFile) zip.Create(output) // 다운로드에 사용한 정보 저장하기 config := &fetchConfig{ URL: uri, ClusterIDList: cids, Filter: filter, Similarity: similarity, } zip.Add("metadata.json", config.Marshal()) go func(z *archivex.ZipFile, count int, results chan *fetchResult, finishCh chan bool) { for i := 1; i <= count; i++ { result := <-results log.Printf("complete (%d/%d)\n", i, linkcount) bytes, _ := ioutil.ReadAll(result.resp.Body) result.resp.Body.Close() basename := filepath.Base(result.uri) filename := MakeFilename(basename, result.idx) z.Add(filename, bytes) } finishCh <- true }(zip, linkcount, fetchResults, zipFinishCh) for w := 1; w <= workerCount; w++ { go workerFetch(w, jobs, fetchResults) } for i, link := range links { jobs <- &fetchCommand{ uri: link, idx: i, } } // 종료 지점 넣어주기. 요청을 받은 fetch worker는 알아서 종료될것이다 for i := 0; i < linkcount; i++ { jobs <- &fetchCommand{ uri: "", idx: -1, } } // zip 파일쓰기 끝난거 확인하면 종료 <-zipFinishCh zip.Close() }
{ for req := range jobs { if req.uri == "" { return } imguri := req.uri imguri = makeSafeImageURI(imguri, uri) log.Printf("[worker %d] download %s\n", id, imguri) cachedir := "./_cache_static" cache := staticfilecache.New(cachedir) tp := httpcache.NewTransport(cache) client := &http.Client{Transport: tp} resp, _ := client.Get(imguri) results <- &fetchResult{ resp: resp, uri: imguri, idx: req.idx, } } }
diff.py
#!/usr/bin/env python """A kernel that compares two ASCII files and outputs the differences in a detailed format. """ __author__ = "Ioannis Paraskevakos <[email protected]>" __copyright__ = "Copyright 2014, http://radical.rutgers.edu" __license__ = "MIT" from copy import deepcopy from radical.ensemblemd.exceptions import ArgumentError from radical.ensemblemd.exceptions import NoKernelConfigurationError from radical.ensemblemd.kernel_plugins.kernel_base import KernelBase # ------------------------------------------------------------------------------ # _KERNEL_INFO = { "name": "misc.diff", "description": "Counts the differences between two ASCII files.", "arguments": {"--inputfile1=": { "mandatory": True, "description": "The first input ASCII file." }, "--inputfile2=": { "mandatory": True, "description": "The second input ASCII file." }, "--outputfile=": { "mandatory": True, "description": "The output file containing the difference count." }, }, "machine_configs": { "*": { "environment" : None, "pre_exec" : None, "executable" : "diff", "uses_mpi" : False } } } # ------------------------------------------------------------------------------ # class Kernel(KernelBase): # -------------------------------------------------------------------------- # def __init__(self): """Le constructor. """ super(Kernel, self).__init__(_KERNEL_INFO) # -------------------------------------------------------------------------- # @staticmethod def
(): return _KERNEL_INFO["name"] # -------------------------------------------------------------------------- # def _bind_to_resource(self, resource_key): """(PRIVATE) Implements parent class method. """ if resource_key not in _KERNEL_INFO["machine_configs"]: if "*" in _KERNEL_INFO["machine_configs"]: # Fall-back to generic resource key resource_key = "*" else: raise NoKernelConfigurationError(kernel_name=_KERNEL_INFO["name"], resource_key=resource_key) cfg = _KERNEL_INFO["machine_configs"][resource_key] executable = "/bin/bash" arguments = ['-l', '-c', 'diff -U 0 {input1} {input2} | grep ^@ | wc -l > {output}'.format( input1 = self.get_arg("--inputfile1="), input2 = self.get_arg("--inputfile2="), output = self.get_arg("--outputfile=")) ] self._executable = executable self._arguments = arguments self._environment = cfg["environment"] self._uses_mpi = cfg["uses_mpi"] self._pre_exec = None
get_name
ibert_quant_modules.py
import decimal import numpy as np import torch from torch import nn from torch.autograd import Function from ...utils import logging logger = logging.get_logger(__name__) class QuantEmbedding(qc.Module): def __init__( self, num_embeddings, embedding_dim, padding_idx=None, max_norm=None, norm_type=2.0, scale_grad_by_freq=False, sparse=False, _weight=None, weight_bit=8, momentum=0.95, quant_mode=False, ): super().__init__() self.num_ = num_embeddings self.dim = embedding_dim self.padding_idx = padding_idx self.max_norm = max_norm self.norm_type = norm_type self.scale_grad_by_freq = scale_grad_by_freq self.sparse = sparse self.weight = nn.Parameter(torch.zeros([num_embeddings, embedding_dim])) self.register_buffer("weight_scaling_factor", torch.zeros(1)) self.register_buffer("weight_integer", torch.zeros_like(self.weight)) self.weight_bit = weight_bit self.momentum = momentum self.quant_mode = quant_mode self.percentile_mode = False self.weight_function = SymmetricQuantFunction.apply def forward(self, x, positions=None, incremental_state=None): if not self.quant_mode: return ( F.embedding( x, self.weight, self.padding_idx, self.max_norm, self.norm_type, self.scale_grad_by_freq, self.sparse, ), None, ) w = self.weight w_transform = w.data.detach() w_min = w_transform.min().expand(1) w_max = w_transform.max().expand(1) self.weight_scaling_factor = symmetric_linear_quantization_params( self.weight_bit, w_min, w_max, False ) self.weight_integer = self.weight_function( self.weight, self.weight_bit, self.percentile_mode, self.weight_scaling_factor ) emb_int = F.embedding( x, self.weight_integer, self.padding_idx, self.max_norm, self.norm_type, self.scale_grad_by_freq, self.sparse, ) return emb_int * self.weight_scaling_factor, self.weight_scaling_factor class QuantAct(qc.Module): def __init__( self, activation_bit, act_range_momentum=0.95, per_channel=False, channel_len=None, quant_mode=False, ): super().__init__() self.activation_bit = activation_bit self.act_range_momentum = act_range_momentum self.quant_mode = quant_mode self.per_channel = per_channel self.percentile = False self.act_function = SymmetricQuantFunction.apply if not self.per_channel: self.register_buffer("x_min", torch.zeros(1)) self.register_buffer("x_max", torch.zeros(1)) self.register_buffer("act_scaling_factor", torch.zeros(1)) self.x_min -= 1e-5 self.x_max += 1e-5 else: raise NotImplementedError("per-channel mode is not currently supported for activation.") def __repr__(self): return ( f"{self.__class__.__name__}(activation_bit={self.activation_bit}, " f"quant_mode: {self.activation_bit}, Act_min: {self.x_min.item():.2f}, " f"Act_max: {self.x_max.item():.2f})" ) def forward( self, x, pre_act_scaling_factor=None, identity=None, identity_scaling_factor=None, specified_min=None, specified_max=None, ): x_act = x if identity is None else identity + x # collect running stats if training if self.training: assert not self.percentile, "percentile mode is not currently supported for activation." assert ( not self.per_channel ), "per-channel mode is not currently supported for activation." x_min = x_act.data.min() x_max = x_act.data.max() assert ( x_max.isnan().sum() == 0 and x_min.isnan().sum() == 0 ), "NaN detected when computing min/max of the activation" # Initialization if self.x_min.min() > -1.1e-5 and self.x_max.max() < 1.1e-5: self.x_min = self.x_min + x_min self.x_max = self.x_max + x_max # exponential moving average (EMA) # use momentum to prevent the quantized values change greatly every iteration elif self.act_range_momentum == -1: self.x_min = torch.min(self.x_min, x_min) self.x_max = torch.max(self.x_max, x_max) else: self.x_min = self.x_min * self.act_range_momentum + x_min * ( 1 - self.act_range_momentum ) self.x_max = self.x_max * self.act_range_momentum + x_max * ( 1 - self.act_range_momentum ) if not self.quant_mode: return x_act, None x_min = self.x_min if specified_min is None else specified_min x_max = self.x_max if specified_max is None else specified_max self.act_scaling_factor = symmetric_linear_quantization_params( self.activation_bit, x_min, x_max, per_channel=self.per_channel ) if pre_act_scaling_factor is None: # this is for the input quantization quant_act_int = self.act_function( x, self.activation_bit, self.percentile, self.act_scaling_factor ) else: quant_act_int = FixedPointMul.apply( x, pre_act_scaling_factor, self.activation_bit, self.act_scaling_factor, identity, identity_scaling_factor, ) correct_output_scale = self.act_scaling_factor.view(-1) return quant_act_int * correct_output_scale, self.act_scaling_factor class QuantLinear(qc.Module): def __init__( self, in_features, out_features, bias=True, weight_bit=8, bias_bit=32, per_channel=False, quant_mode=False, ): super().__init__() self.in_features = in_features self.out_features = out_features self.weight = nn.Parameter(torch.zeros([out_features, in_features])) self.register_buffer("weight_integer", torch.zeros_like(self.weight)) self.register_buffer("fc_scaling_factor", torch.zeros(self.out_features)) if bias: self.bias = nn.Parameter(torch.zeros(out_features)) self.register_buffer("bias_integer", torch.zeros_like(self.bias)) self.weight_bit = weight_bit self.quant_mode = quant_mode self.per_channel = per_channel self.bias_bit = bias_bit self.quant_mode = quant_mode self.percentile_mode = False self.weight_function = SymmetricQuantFunction.apply def __repr__(self): s = super().__repr__() s = f"({s} weight_bit={self.weight_bit}, quant_mode={self.quant_mode})" return s def forward(self, x, prev_act_scaling_factor=None): if not self.quant_mode: return F.linear(x, weight=self.weight, bias=self.bias), None # assert that prev_act_scaling_factor is a scalar tensor assert prev_act_scaling_factor is not None and prev_act_scaling_factor.shape == (1,), ( "Input activation to the QuantLinear layer should be globally (non-channel-wise) quantized. " "Please add a QuantAct layer with `per_channel = True` before this QuantAct layer" ) w = self.weight w_transform = w.data.detach() if self.per_channel: w_min, _ = torch.min(w_transform, dim=1, out=None) w_max, _ = torch.max(w_transform, dim=1, out=None) else: w_min = w_transform.min().expand(1) w_max = w_transform.max().expand(1) self.fc_scaling_factor = symmetric_linear_quantization_params( self.weight_bit, w_min, w_max, self.per_channel ) self.weight_integer = self.weight_function( self.weight, self.weight_bit, self.percentile_mode, self.fc_scaling_factor ) bias_scaling_factor = self.fc_scaling_factor * prev_act_scaling_factor if self.bias is not None: self.bias_integer = self.weight_function( self.bias, self.bias_bit, False, bias_scaling_factor ) prev_act_scaling_factor = prev_act_scaling_factor.view(1, -1) x_int = x / prev_act_scaling_factor return ( F.linear(x_int, weight=self.weight_integer, bias=self.bias_integer) * bias_scaling_factor, bias_scaling_factor, ) class IntGELU(qc.Module): def __init__(self, quant_mode=True, force_dequant="none"): super().__init__() self.quant_mode = quant_mode if force_dequant in ["nonlinear", "gelu"]: logger.info("Force dequantize gelu") self.quant_mode = False if not self.quant_mode: self.activation_fn = nn.GELU() self.k = 1.4142 self.const = 14 # dummy integer constant self.coeff = [-0.2888, -1.769, 1] # a(x+b)**2 + c self.coeff[2] /= self.coeff[0] def int_erf(self, x_int, scaling_factor): b_int = torch.floor(self.coeff[1] / scaling_factor) c_int = torch.floor(self.coeff[2] / scaling_factor**2) sign = torch.sign(x_int) abs_int = torch.min(torch.abs(x_int), -b_int) y_int = sign * ((abs_int + b_int) ** 2 + c_int) scaling_factor = scaling_factor**2 * self.coeff[0] # avoid overflow y_int = floor_ste.apply(y_int / 2**self.const) scaling_factor = scaling_factor * 2**self.const return y_int, scaling_factor def forward(self, x, scaling_factor=None): if not self.quant_mode: return self.activation_fn(x), None x_int = x / scaling_factor sigmoid_int, sigmoid_scaling_factor = self.int_erf(x_int, scaling_factor / self.k) shift_int = 1.0 // sigmoid_scaling_factor x_int = x_int * (sigmoid_int + shift_int) scaling_factor = scaling_factor * sigmoid_scaling_factor / 2 return x_int * scaling_factor, scaling_factor class IntSoftmax(qc.Module): def __init__(self, output_bit, quant_mode=False, force_dequant="none"): super().__init__() self.output_bit = output_bit self.max_bit = 32 self.quant_mode = quant_mode if force_dequant in ["nonlinear", "softmax"]: logger.info("Force dequantize softmax") self.quant_mode = False self.act = QuantAct(16, quant_mode=self.quant_mode) self.x0 = -0.6931 # -ln2 self.const = 30 # dummy integer constant self.coef = [0.35815147, 0.96963238, 1.0] # ax**2 + bx + c self.coef[1] /= self.coef[0] self.coef[2] /= self.coef[0] def int_polynomial(self, x_int, scaling_factor): with torch.no_grad(): b_int = torch.floor(self.coef[1] / scaling_factor) c_int = torch.floor(self.coef[2] / scaling_factor**2) z = (x_int + b_int) * x_int + c_int scaling_factor = self.coef[0] * scaling_factor**2 return z, scaling_factor def int_exp(self, x_int, scaling_factor): with torch.no_grad(): x0_int = torch.floor(self.x0 / scaling_factor) x_int = torch.max(x_int, self.const * x0_int) q = floor_ste.apply(x_int / x0_int) r = x_int - x0_int * q exp_int, exp_scaling_factor = self.int_polynomial(r, scaling_factor) exp_int = torch.clamp(floor_ste.apply(exp_int * 2 ** (self.const - q)), min=0) scaling_factor = exp_scaling_factor / 2**self.const return exp_int, scaling_factor def forward(self, x, scaling_factor): if not self.quant_mode: return F.softmax(x, dim=-1), None x_int = x / scaling_factor x_int_max, _ = x_int.max(dim=-1, keepdim=True) x_int = x_int - x_int_max exp_int, exp_scaling_factor = self.int_exp(x_int, scaling_factor) # Avoid overflow exp, exp_scaling_factor = self.act(exp_int, exp_scaling_factor) exp_int = exp / exp_scaling_factor exp_int_sum = exp_int.sum(dim=-1, keepdim=True) factor = floor_ste.apply(2**self.max_bit / exp_int_sum) exp_int = floor_ste.apply(exp_int * factor / 2 ** (self.max_bit - self.output_bit)) scaling_factor = 1 / 2**self.output_bit return exp_int * scaling_factor, scaling_factor class IntLayerNorm(qc.Module): def __init__(self, normalized_shape, eps, output_bit=8, quant_mode=False, force_dequant="none"): super().__init__() self.normalized_shape = normalized_shape self.eps = eps self.weight = nn.Parameter(torch.zeros(normalized_shape)) self.bias = nn.Parameter(torch.zeros(normalized_shape)) self.quant_mode = quant_mode if force_dequant in ["nonlinear", "layernorm"]: logger.info("Force dequantize layernorm") self.quant_mode = False self.register_buffer("shift", torch.zeros(1)) self.output_bit = output_bit self.max_bit = 32 self.dim_sqrt = None self.activation = QuantAct(self.output_bit, quant_mode=self.quant_mode) def set_shift(self, y_int): with torch.no_grad(): y_sq_int = y_int**2 var_int = torch.sum(y_sq_int, axis=2, keepdim=True) shift = (torch.log2(torch.sqrt(var_int / 2**self.max_bit)).ceil()).max() shift_old = self.shift self.shift = torch.max(self.shift, shift) logger.info(f"Dynamic shift adjustment: {int(shift_old)} to {int(self.shift)}") def overflow_fallback(self, y_int): self.set_shift(y_int) # adjusts `self.shift` y_int_shifted = floor_ste.apply(y_int / 2**self.shift) y_sq_int = y_int_shifted**2 var_int = torch.sum(y_sq_int, axis=2, keepdim=True) return var_int def forward(self, x, scaling_factor=None): if not self.quant_mode: mean = x.mean(axis=2, keepdim=True) y = x - mean var = torch.mean(y**2, axis=2, keepdim=True) x = y / torch.sqrt(self.eps + var) x = x * self.weight + self.bias return x, None # compute sqrt of the feature dimension if it is the first run if self.dim_sqrt is None: n = torch.tensor(x.shape[2], dtype=torch.float) self.dim_sqrt = torch.sqrt(n).to(x.device) # Normalization: computes mean and variance(std) x_int = x / scaling_factor mean_int = round_ste.apply(x_int.mean(axis=2, keepdim=True)) y_int = x_int - mean_int y_int_shifted = floor_ste.apply(y_int / 2**self.shift) y_sq_int = y_int_shifted**2 var_int = torch.sum(y_sq_int, axis=2, keepdim=True) # overflow handling in training time if self.training: # if overflow is detected if var_int.max() >= 2**self.max_bit: var_int = self.overflow_fallback(y_int) assert var_int.max() < 2**self.max_bit + 0.1, ( "Error detected in overflow handling: " "`var_int` exceeds `self.max_bit` (the maximum possible bit width)" ) # To be replaced with integer-sqrt kernel that produces the same output std_int = floor_ste.apply(torch.sqrt(var_int)) * 2**self.shift factor = floor_ste.apply(2**31 / std_int) y_int = floor_ste.apply(y_int * factor / 2) scaling_factor = self.dim_sqrt / 2**30 # scaling and shifting bias = self.bias.data.detach() / (self.weight.data.detach()) bias_int = floor_ste.apply(bias / scaling_factor) y_int = y_int + bias_int scaling_factor = scaling_factor * self.weight x = y_int * scaling_factor return x, scaling_factor def get_percentile_min_max(input, lower_percentile, upper_percentile, output_tensor=False): input_length = input.shape[0] lower_index = round(input_length * (1 - lower_percentile * 0.01)) upper_index = round(input_length * upper_percentile * 0.01) upper_bound = torch.kthvalue(input, k=upper_index).values if lower_percentile == 0: lower_bound = upper_bound * 0 # lower_index += 1 else: lower_bound = -torch.kthvalue(-input, k=lower_index).values if not output_tensor: lower_bound = lower_bound.item() upper_bound = upper_bound.item() return lower_bound, upper_bound def linear_quantize(input, scale, zero_point, inplace=False): if len(input.shape) == 4: scale = scale.view(-1, 1, 1, 1) zero_point = zero_point.view(-1, 1, 1, 1) # reshape scale and zeropoint for linear weights elif len(input.shape) == 2: scale = scale.view(-1, 1) zero_point = zero_point.view(-1, 1) else: scale = scale.view(-1) zero_point = zero_point.view(-1) # quantized = float / scale + zero_point if inplace: input.mul_(1.0 / scale).add_(zero_point).round_() return input return torch.round(1.0 / scale * input + zero_point) def symmetric_linear_quantization_params( num_bits, saturation_min, saturation_max, per_channel=False ): with torch.no_grad(): n = 2 ** (num_bits - 1) - 1 if per_channel: scale, _ = torch.max( torch.stack([saturation_min.abs(), saturation_max.abs()], dim=1), dim=1 ) scale = torch.clamp(scale, min=1e-8) / n else: scale = max(saturation_min.abs(), saturation_max.abs()) scale = torch.clamp(scale, min=1e-8) / n return scale class SymmetricQuantFunction(Function): @staticmethod def forward(ctx, x, k, percentile_mode, scale): zero_point = torch.tensor(0.0).to(scale.device) n = 2 ** (k - 1) - 1 new_quant_x = linear_quantize(x, scale, zero_point, inplace=False) new_quant_x = torch.clamp(new_quant_x, -n, n - 1) ctx.scale = scale return new_quant_x @staticmethod def backward(ctx, grad_output): scale = ctx.scale if len(grad_output.shape) == 4: scale = scale.view(-1, 1, 1, 1) # reshape scale and zeropoint for linear weights elif len(grad_output.shape) == 2: scale = scale.view(-1, 1) else: scale = scale.view(-1) return grad_output.clone() / scale, None, None, None, None class floor_ste(Function): @staticmethod def forward(ctx, x): return torch.floor(x) @staticmethod def backward(ctx, grad_output): return grad_output.clone() class round_ste(Function): @staticmethod def forward(ctx, x): return torch.round(x) @staticmethod def backward(ctx, grad_output): return grad_output.clone() def batch_frexp(inputs, max_bit=31): shape_of_input = inputs.size() # trans the input to be a 1-d tensor inputs = inputs.view(-1) output_m, output_e = np.frexp(inputs.cpu().numpy()) tmp_m = [] for m in output_m: int_m_shifted = int( decimal.Decimal(m * (2**max_bit)).quantize( decimal.Decimal("1"), rounding=decimal.ROUND_HALF_UP ) ) tmp_m.append(int_m_shifted) output_m = np.array(tmp_m) output_e = float(max_bit) - output_e return ( torch.from_numpy(output_m).to(inputs.device).view(shape_of_input), torch.from_numpy(output_e).to(inputs.device).view(shape_of_input), ) class FixedPointMul(Function): @staticmethod def
( ctx, pre_act, pre_act_scaling_factor, bit_num, z_scaling_factor, identity=None, identity_scaling_factor=None, ): if len(pre_act_scaling_factor.shape) == 3: reshape = lambda x: x # noqa: E731 else: reshape = lambda x: x.view(1, 1, -1) # noqa: E731 ctx.identity = identity n = 2 ** (bit_num - 1) - 1 with torch.no_grad(): pre_act_scaling_factor = reshape(pre_act_scaling_factor) if identity is not None: identity_scaling_factor = reshape(identity_scaling_factor) ctx.z_scaling_factor = z_scaling_factor z_int = torch.round(pre_act / pre_act_scaling_factor) _A = pre_act_scaling_factor.type(torch.double) _B = (z_scaling_factor.type(torch.float)).type(torch.double) new_scale = _A / _B new_scale = reshape(new_scale) m, e = batch_frexp(new_scale) output = z_int.type(torch.double) * m.type(torch.double) output = torch.round(output / (2.0**e)) if identity is not None: # needs addition of identity activation wx_int = torch.round(identity / identity_scaling_factor) _A = identity_scaling_factor.type(torch.double) _B = (z_scaling_factor.type(torch.float)).type(torch.double) new_scale = _A / _B new_scale = reshape(new_scale) m1, e1 = batch_frexp(new_scale) output1 = wx_int.type(torch.double) * m1.type(torch.double) output1 = torch.round(output1 / (2.0**e1)) output = output1 + output return torch.clamp(output.type(torch.float), -n - 1, n) @staticmethod def backward(ctx, grad_output): identity_grad = None if ctx.identity is not None: identity_grad = grad_output.clone() / ctx.z_scaling_factor return ( grad_output.clone() / ctx.z_scaling_factor, None, None, None, None, identity_grad, None, )
forward
dinero-polyfilled.js
import 'core-js/fn/array/every' import 'core-js/fn/array/find-index' import 'core-js/fn/array/find'
import 'core-js/fn/object/assign' import 'core-js/fn/object/entries' import 'core-js/fn/number/is-integer' import 'core-js/fn/math/sign' import 'core-js/fn/promise' import Dinero from './dinero' export default Dinero
import 'core-js/fn/array/keys'
specs.py
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import annotations import itertools import os from abc import ABC, ABCMeta, abstractmethod from dataclasses import dataclass from typing import TYPE_CHECKING, Iterable, Mapping, Sequence, Tuple from pants.base.exceptions import ResolveError from pants.build_graph.address import Address from pants.engine.fs import GlobExpansionConjunction, GlobMatchErrorBehavior, PathGlobs from pants.engine.internals.target_adaptor import TargetAdaptor from pants.util.dirutil import fast_relpath_optional, recursive_dirname from pants.util.meta import frozen_after_init if TYPE_CHECKING: from pants.engine.internals.mapper import AddressFamily class Spec(ABC): """A specification for what Pants should operate on.""" @abstractmethod def __str__(self) -> str: """The normalized string representation of this spec.""" class AddressSpec(Spec, metaclass=ABCMeta): """Represents address selectors as passed from the command line.""" @dataclass(frozen=True) class AddressLiteralSpec(AddressSpec): """An AddressSpec for a single address. This may be a traditional address, like `a/b/c:c`, or a file address using disambiguation syntax, e.g. `a/b/c.txt:tgt`. """ path_component: str target_component: str def __str__(self) -> str: return f"{self.path_component}:{self.target_component}" class AddressGlobSpec(AddressSpec, metaclass=ABCMeta):
@dataclass(frozen=True) class SiblingAddresses(AddressGlobSpec): """An AddressSpec representing all addresses located directly within the given directory.""" directory: str def __str__(self) -> str: return f"{self.directory}:" def to_globs(self, build_patterns: Iterable[str]) -> Tuple[str, ...]: return tuple(os.path.join(self.directory, pat) for pat in build_patterns) def matching_address_families( self, address_families_dict: Mapping[str, "AddressFamily"] ) -> Tuple["AddressFamily", ...]: maybe_af = address_families_dict.get(self.directory) if maybe_af is None: raise ResolveError( f"Path '{self.directory}' does not contain any BUILD files, but '{self}' expected " "matching targets there." ) return (maybe_af,) @dataclass(frozen=True) class MaybeEmptyDescendantAddresses(AddressGlobSpec): """An AddressSpec representing all addresses located recursively under the given directory. It is not an error if there are no such addresses. """ directory: str def __str__(self) -> str: return f"{self.directory}::" def to_globs(self, build_patterns: Iterable[str]) -> Tuple[str, ...]: return tuple(os.path.join(self.directory, "**", pat) for pat in build_patterns) def matching_address_families( self, address_families_dict: Mapping[str, "AddressFamily"] ) -> Tuple["AddressFamily", ...]: return tuple( af for ns, af in address_families_dict.items() if fast_relpath_optional(ns, self.directory) is not None ) class DescendantAddresses(MaybeEmptyDescendantAddresses): """An AddressSpec representing all addresses located recursively under the given directory. At least one such address must exist. """ def matching_addresses( self, address_families: Sequence["AddressFamily"] ) -> Sequence[Tuple[Address, TargetAdaptor]]: matching = super().matching_addresses(address_families) if len(matching) == 0: raise ResolveError(f"Address spec '{self}' does not match any targets.") return matching @dataclass(frozen=True) class AscendantAddresses(AddressGlobSpec): """An AddressSpec representing all addresses located recursively _above_ the given directory.""" directory: str def __str__(self) -> str: return f"{self.directory}^" def to_globs(self, build_patterns: Iterable[str]) -> Tuple[str, ...]: return tuple( os.path.join(f, pattern) for pattern in build_patterns for f in recursive_dirname(self.directory) ) def matching_address_families( self, address_families_dict: Mapping[str, "AddressFamily"] ) -> Tuple["AddressFamily", ...]: return tuple( af for ns, af in address_families_dict.items() if fast_relpath_optional(self.directory, ns) is not None ) @frozen_after_init @dataclass(unsafe_hash=True) class AddressSpecs: literals: Tuple[AddressLiteralSpec, ...] globs: Tuple[AddressGlobSpec, ...] filter_by_global_options: bool def __init__( self, specs: Iterable[AddressSpec], *, filter_by_global_options: bool = False ) -> None: """Create the specs for what addresses Pants should run on. If `filter_by_global_options` is set to True, then the resulting Addresses will be filtered by the global options `--tag` and `--exclude-target-regexp`. """ literals = [] globs = [] for spec in specs: if isinstance(spec, AddressLiteralSpec): literals.append(spec) elif isinstance(spec, AddressGlobSpec): globs.append(spec) else: raise ValueError(f"Unexpected type of AddressSpec: {repr(self)}") self.literals = tuple(literals) self.globs = tuple(globs) self.filter_by_global_options = filter_by_global_options @property def specs(self) -> Tuple[AddressSpec, ...]: return (*self.literals, *self.globs) def to_path_globs( self, *, build_patterns: Iterable[str], build_ignore_patterns: Iterable[str] ) -> PathGlobs: includes = set( itertools.chain.from_iterable(spec.to_globs(build_patterns) for spec in self.globs) ) ignores = (f"!{p}" for p in build_ignore_patterns) return PathGlobs(globs=(*includes, *ignores)) def __bool__(self) -> bool: return bool(self.specs) class FilesystemSpec(Spec, metaclass=ABCMeta): pass @dataclass(frozen=True) class FilesystemLiteralSpec(FilesystemSpec): """A literal file name, e.g. `foo.py`.""" file: str def __str__(self) -> str: return self.file @dataclass(frozen=True) class FilesystemGlobSpec(FilesystemSpec): """A spec with a glob or globs, e.g. `*.py` and `**/*.java`.""" glob: str def __str__(self) -> str: return self.glob @dataclass(frozen=True) class FilesystemIgnoreSpec(FilesystemSpec): """A spec to ignore certain files or globs.""" glob: str def __post_init__(self) -> None: if self.glob.startswith("!"): raise ValueError(f"The `glob` for {self} should not start with `!`.") def __str__(self) -> str: return f"!{self.glob}" @frozen_after_init @dataclass(unsafe_hash=True) class FilesystemSpecs: includes: tuple[FilesystemLiteralSpec | FilesystemGlobSpec, ...] ignores: tuple[FilesystemIgnoreSpec, ...] def __init__(self, specs: Iterable[FilesystemSpec]) -> None: includes = [] ignores = [] for spec in specs: if isinstance(spec, (FilesystemLiteralSpec, FilesystemGlobSpec)): includes.append(spec) elif isinstance(spec, FilesystemIgnoreSpec): ignores.append(spec) else: raise ValueError(f"Unexpected type of FilesystemSpec: {repr(self)}") self.includes = tuple(includes) self.ignores = tuple(ignores) @property def specs(self) -> Tuple[FilesystemSpec, ...]: return (*self.includes, *self.ignores) @staticmethod def _generate_path_globs( specs: Iterable[FilesystemSpec], glob_match_error_behavior: GlobMatchErrorBehavior ) -> PathGlobs: return PathGlobs( globs=(str(s) for s in specs), glob_match_error_behavior=glob_match_error_behavior, # We validate that _every_ glob is valid. conjunction=GlobExpansionConjunction.all_match, description_of_origin=( None if glob_match_error_behavior == GlobMatchErrorBehavior.ignore else "file arguments" ), ) def path_globs_for_spec( self, spec: FilesystemLiteralSpec | FilesystemGlobSpec, glob_match_error_behavior: GlobMatchErrorBehavior, ) -> PathGlobs: """Generate PathGlobs for the specific spec, automatically including the instance's FilesystemIgnoreSpecs.""" return self._generate_path_globs((spec, *self.ignores), glob_match_error_behavior) def to_path_globs(self, glob_match_error_behavior: GlobMatchErrorBehavior) -> PathGlobs: """Generate a single PathGlobs for the instance.""" return self._generate_path_globs((*self.includes, *self.ignores), glob_match_error_behavior) def __bool__(self) -> bool: return bool(self.specs) @dataclass(frozen=True) class Specs: address_specs: AddressSpecs filesystem_specs: FilesystemSpecs @property def provided(self) -> bool: """Did the user provide specs?""" return bool(self.address_specs) or bool(self.filesystem_specs) @classmethod def empty(cls) -> Specs: return Specs(AddressSpecs([], filter_by_global_options=True), FilesystemSpecs([]))
@abstractmethod def to_globs(self, build_patterns: Iterable[str]) -> Tuple[str, ...]: """Generate glob patterns matching exactly all the BUILD files this address spec covers.""" @abstractmethod def matching_address_families( self, address_families_dict: Mapping[str, "AddressFamily"] ) -> Tuple["AddressFamily", ...]: """Given a dict of (namespace path) -> AddressFamily, return the values matching this address spec. :raises: :class:`ResolveError` if no address families matched this spec and this spec type expects a match. """ def matching_addresses( self, address_families: Sequence["AddressFamily"] ) -> Sequence[Tuple[Address, TargetAdaptor]]: """Given a list of AddressFamily, return (Address, TargetAdaptor) pairs matching this address spec. :raises: :class:`ResolveError` if no addresses could be matched and this spec type expects a match. """ return tuple( itertools.chain.from_iterable( af.addresses_to_target_adaptors.items() for af in address_families ) )
lib.rs
#![no_std] #![allow(dead_code)] // #[macro_use(singleton)] // extern crate cortex_m; use cortex_m::asm::delay as delay_cycles; use stm32h7xx_hal::time::{Hertz, MegaHertz}; pub const MILLI: u32 = 1_000; pub const AUDIO_FRAME_RATE_HZ: u32 = 1_000; pub const AUDIO_BLOCK_SIZE: u16 = 48; pub const AUDIO_SAMPLE_RATE: usize = 48_000; pub const AUDIO_SAMPLE_HZ: Hertz = Hertz(48_000); pub const CLOCK_RATE_HZ: Hertz = Hertz(480_000_000_u32); pub const MILICYCLES: u32 = CLOCK_RATE_HZ.0 / MILLI; pub type FrameTimer = stm32h7xx_hal::timer::Timer<stm32h7xx_hal::stm32::TIM2>; pub use stm32h7xx_hal as hal; pub mod audio; pub mod flash; pub mod gpio; pub mod hid; pub mod logger; pub mod mpu; pub mod prelude; pub mod sdmmc; pub mod sdram; pub mod system; // Delay for ms, note if interrupts are active delay time will extend pub fn delay_ms(ms: u32) { delay_cycles(ms * MILICYCLES);
// }
} // pub fn ms_to_cycles(ms: u32) {
client.go
// Copyright (c) 2017-2018 THL A29 Limited, a Tencent company. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package v20190416 import ( "context" "errors" "github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/common" tchttp "github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/common/http" "github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/common/profile" ) const APIVersion = "2019-04-16" type Client struct { common.Client } // Deprecated func NewClientWithSecretId(secretId, secretKey, region string) (client *Client, err error) { cpf := profile.NewClientProfile() client = &Client{} client.Init(region).WithSecretId(secretId, secretKey).WithProfile(cpf) return } func NewClient(credential common.CredentialIface, region string, clientProfile *profile.ClientProfile) (client *Client, err error) { client = &Client{} client.Init(region). WithCredential(credential). WithProfile(clientProfile) return } func NewCreateJobRequest() (request *CreateJobRequest) { request = &CreateJobRequest{ BaseRequest: &tchttp.BaseRequest{}, } request.Init().WithApiInfo("tiems", APIVersion, "CreateJob") return } func NewCreateJobResponse() (response *CreateJobResponse) { response = &CreateJobResponse{ BaseResponse: &tchttp.BaseResponse{}, } return } // CreateJob // 创建任务 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // FAILEDOPERATION_ALREADYEXISTS = "FailedOperation.AlreadyExists" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNAUTHORIZEDOPERATION = "UnauthorizedOperation" func (c *Client) CreateJob(request *CreateJobRequest) (response *CreateJobResponse, err error) { return c.CreateJobWithContext(context.Background(), request) } // CreateJob // 创建任务 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // FAILEDOPERATION_ALREADYEXISTS = "FailedOperation.AlreadyExists" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNAUTHORIZEDOPERATION = "UnauthorizedOperation" func (c *Client) CreateJobWithContext(ctx context.Context, request *CreateJobRequest) (response *CreateJobResponse, err error) { if request == nil { request = NewCreateJobRequest() } if c.GetCredential() == nil { return nil, errors.New("CreateJob require credential") } request.SetContext(ctx) response = NewCreateJobResponse() err = c.Send(request, response) return } func NewCreateRsgAsGroupRequest() (request *CreateRsgAsGroupRequest) { request = &CreateRsgAsGroupRequest{ BaseRequest: &tchttp.BaseRequest{}, } request.Init().WithApiInfo("tiems", APIVersion, "CreateRsgAsGroup") return } func NewCreateRsgAsGroupResponse() (response *CreateRsgAsGroupResponse) { response = &CreateRsgAsGroupResponse{ BaseResponse: &tchttp.BaseResponse{}, } return } // CreateRsgAsGroup // 创建资源组的伸缩组。当前一个资源组仅允许创建一个伸缩组。 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // FAILEDOPERATION_ALREADYEXISTS = "FailedOperation.AlreadyExists" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNAUTHORIZEDOPERATION = "UnauthorizedOperation" // UNSUPPORTEDOPERATION = "UnsupportedOperation" func (c *Client) CreateRsgAsGroup(request *CreateRsgAsGroupRequest) (response *CreateRsgAsGroupResponse, err error) { return c.CreateRsgAsGroupWithContext(context.Background(), request) } // CreateRsgAsGroup // 创建资源组的伸缩组。当前一个资源组仅允许创建一个伸缩组。 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // FAILEDOPERATION_ALREADYEXISTS = "FailedOperation.AlreadyExists" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNAUTHORIZEDOPERATION = "UnauthorizedOperation" // UNSUPPORTEDOPERATION = "UnsupportedOperation" func (c *Client) CreateRsgAsGroupWithContext(ctx context.Context, request *CreateRsgAsGroupRequest) (response *CreateRsgAsGroupResponse, err error) { if request == nil { request = NewCreateRsgAsGroupRequest() } if c.GetCredential() == nil { return nil, errors.New("CreateRsgAsGroup require credential") } request.SetContext(ctx) response = NewCreateRsgAsGroupResponse() err = c.Send(request, response) return } func NewCreateRuntimeRequest() (request *CreateRuntimeRequest) { request = &CreateRuntimeRequest{ BaseRequest: &tchttp.BaseRequest{}, } request.Init().WithApiInfo("tiems", APIVersion, "CreateRuntime") return } func NewCreateRuntimeResponse() (response *CreateRuntimeResponse) { response = &CreateRuntimeResponse{ BaseResponse: &tchttp.BaseResponse{}, } return } // CreateRuntime // 创建运行环境 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // FAILEDOPERATION_ALREADYEXISTS = "FailedOperation.AlreadyExists" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // UNAUTHORIZEDOPERATION = "UnauthorizedOperation" // UNSUPPORTEDOPERATION = "UnsupportedOperation" func (c *Client) CreateRuntime(request *CreateRuntimeRequest) (response *CreateRuntimeResponse, err error) { return c.CreateRuntimeWithContext(context.Background(), request) } // CreateRuntime // 创建运行环境 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // FAILEDOPERATION_ALREADYEXISTS = "FailedOperation.AlreadyExists" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // UNAUTHORIZEDOPERATION = "UnauthorizedOperation" // UNSUPPORTEDOPERATION = "UnsupportedOperation" func (c *Client) CreateRuntimeWithContext(ctx context.Context, request *CreateRuntimeRequest) (response *CreateRuntimeResponse, err error) { if request == nil { request = NewCreateRuntimeRequest() } if c.GetCredential() == nil { return nil, errors.New("CreateRuntime require credential") } request.SetContext(ctx) response = NewCreateRuntimeResponse() err = c.Send(request, response) return } func NewCreateServiceRequest() (request *CreateServiceRequest) { request = &CreateServiceRequest{ BaseRequest: &tchttp.BaseRequest{}, } request.Init().WithApiInfo("tiems", APIVersion, "CreateService") return } func NewCreateServiceResponse() (response *CreateServiceResponse) { response = &CreateServiceResponse{ BaseResponse: &tchttp.BaseResponse{}, } return } // CreateService // 创建服务 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // FAILEDOPERATION_ALREADYEXISTS = "FailedOperation.AlreadyExists" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNAUTHORIZEDOPERATION = "UnauthorizedOperation" func (c *Client) CreateService(request *CreateServiceRequest) (response *CreateServiceResponse, err error) { return c.CreateServiceWithContext(context.Background(), request) } // CreateService // 创建服务 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // FAILEDOPERATION_ALREADYEXISTS = "FailedOperation.AlreadyExists" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNAUTHORIZEDOPERATION = "UnauthorizedOperation" func (c *Client) CreateServiceWithContext(ctx context.Context, request *CreateServiceRequest) (response *CreateServiceResponse, err error) { if request == nil { request = NewCreateServiceRequest() } if c.GetCredential() == nil { return nil, errors.New("CreateService require credential") } request.SetContext(ctx) response = NewCreateServiceResponse() err = c.Send(request, response) return } func NewCreateServiceConfigRequest() (request *CreateServiceConfigRequest) { request = &CreateServiceConfigRequest{ BaseRequest: &tchttp.BaseRequest{}, } request.Init().WithApiInfo("tiems", APIVersion, "CreateServiceConfig") return } func NewCreateServiceConfigResponse() (response *CreateServiceConfigResponse) { response = &CreateServiceConfigResponse{ BaseResponse: &tchttp.BaseResponse{}, } return } // CreateServiceConfig // 创建服务配置 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // FAILEDOPERATION_ALREADYEXISTS = "FailedOperation.AlreadyExists" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNAUTHORIZEDOPERATION = "UnauthorizedOperation" func (c *Client) CreateServiceConfig(request *CreateServiceConfigRequest) (response *CreateServiceConfigResponse, err error) { return c.CreateServiceConfigWithContext(context.Background(), request) } // CreateServiceConfig // 创建服务配置 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // FAILEDOPERATION_ALREADYEXISTS = "FailedOperation.AlreadyExists" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNAUTHORIZEDOPERATION = "UnauthorizedOperation" func (c *Client) CreateServiceConfigWithContext(ctx context.Context, request *CreateServiceConfigRequest) (response *CreateServiceConfigResponse, err error) { if request == nil { request = NewCreateServiceConfigRequest() } if c.GetCredential() == nil { return nil, errors.New("CreateServiceConfig require credential") } request.SetContext(ctx) response = NewCreateServiceConfigResponse() err = c.Send(request, response) return } func NewDeleteInstanceRequest() (request *DeleteInstanceRequest) { request = &DeleteInstanceRequest{ BaseRequest: &tchttp.BaseRequest{}, } request.Init().WithApiInfo("tiems", APIVersion, "DeleteInstance") return } func NewDeleteInstanceResponse() (response *DeleteInstanceResponse) { response = &DeleteInstanceResponse{ BaseResponse: &tchttp.BaseResponse{}, } return } // DeleteInstance // 删除资源组中的节点。目前仅支持删除已经到期的预付费节点,和按量付费节点。 // // 可能返回的错误码: // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNAUTHORIZEDOPERATION = "UnauthorizedOperation" // UNSUPPORTEDOPERATION = "UnsupportedOperation" func (c *Client) DeleteInstance(request *DeleteInstanceRequest) (response *DeleteInstanceResponse, err error) { return c.DeleteInstanceWithContext(context.Background(), request) } // DeleteInstance // 删除资源组中的节点。目前仅支持删除已经到期的预付费节点,和按量付费节点。 // // 可能返回的错误码: // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNAUTHORIZEDOPERATION = "UnauthorizedOperation" // UNSUPPORTEDOPERATION = "UnsupportedOperation" func (c *Client) DeleteInstanceWithContext(ctx context.Context, request *DeleteInstanceRequest) (response *DeleteInstanceResponse, err error) { if request == nil { request = NewDeleteInstanceRequest() } if c.GetCredential() == nil { return nil, errors.New("DeleteInstance require credential") } request.SetContext(ctx) response = NewDeleteInstanceResponse() err = c.Send(request, response) return } func NewDeleteJobRequest() (request *DeleteJobRequest) { request = &DeleteJobRequest{ BaseRequest: &tchttp.BaseRequest{}, } request.Init().WithApiInfo("tiems", APIVersion, "DeleteJob") return } func NewDeleteJobResponse() (response *DeleteJobResponse) { response = &DeleteJobResponse{ BaseResponse: &tchttp.BaseResponse{}, } return } // DeleteJob // 删除任务 // // 可能返回的错误码: // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "Reso
(c *Client) DeleteJob(request *DeleteJobRequest) (response *DeleteJobResponse, err error) { return c.DeleteJobWithContext(context.Background(), request) } // DeleteJob // 删除任务 // // 可能返回的错误码: // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" func (c *Client) DeleteJobWithContext(ctx context.Context, request *DeleteJobRequest) (response *DeleteJobResponse, err error) { if request == nil { request = NewDeleteJobRequest() } if c.GetCredential() == nil { return nil, errors.New("DeleteJob require credential") } request.SetContext(ctx) response = NewDeleteJobResponse() err = c.Send(request, response) return } func NewDeleteResourceGroupRequest() (request *DeleteResourceGroupRequest) { request = &DeleteResourceGroupRequest{ BaseRequest: &tchttp.BaseRequest{}, } request.Init().WithApiInfo("tiems", APIVersion, "DeleteResourceGroup") return } func NewDeleteResourceGroupResponse() (response *DeleteResourceGroupResponse) { response = &DeleteResourceGroupResponse{ BaseResponse: &tchttp.BaseResponse{}, } return } // DeleteResourceGroup // 删除资源组 // // 可能返回的错误码: // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNSUPPORTEDOPERATION = "UnsupportedOperation" func (c *Client) DeleteResourceGroup(request *DeleteResourceGroupRequest) (response *DeleteResourceGroupResponse, err error) { return c.DeleteResourceGroupWithContext(context.Background(), request) } // DeleteResourceGroup // 删除资源组 // // 可能返回的错误码: // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNSUPPORTEDOPERATION = "UnsupportedOperation" func (c *Client) DeleteResourceGroupWithContext(ctx context.Context, request *DeleteResourceGroupRequest) (response *DeleteResourceGroupResponse, err error) { if request == nil { request = NewDeleteResourceGroupRequest() } if c.GetCredential() == nil { return nil, errors.New("DeleteResourceGroup require credential") } request.SetContext(ctx) response = NewDeleteResourceGroupResponse() err = c.Send(request, response) return } func NewDeleteRsgAsGroupRequest() (request *DeleteRsgAsGroupRequest) { request = &DeleteRsgAsGroupRequest{ BaseRequest: &tchttp.BaseRequest{}, } request.Init().WithApiInfo("tiems", APIVersion, "DeleteRsgAsGroup") return } func NewDeleteRsgAsGroupResponse() (response *DeleteRsgAsGroupResponse) { response = &DeleteRsgAsGroupResponse{ BaseResponse: &tchttp.BaseResponse{}, } return } // DeleteRsgAsGroup // 伸缩 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" func (c *Client) DeleteRsgAsGroup(request *DeleteRsgAsGroupRequest) (response *DeleteRsgAsGroupResponse, err error) { return c.DeleteRsgAsGroupWithContext(context.Background(), request) } // DeleteRsgAsGroup // 伸缩 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" func (c *Client) DeleteRsgAsGroupWithContext(ctx context.Context, request *DeleteRsgAsGroupRequest) (response *DeleteRsgAsGroupResponse, err error) { if request == nil { request = NewDeleteRsgAsGroupRequest() } if c.GetCredential() == nil { return nil, errors.New("DeleteRsgAsGroup require credential") } request.SetContext(ctx) response = NewDeleteRsgAsGroupResponse() err = c.Send(request, response) return } func NewDeleteRuntimeRequest() (request *DeleteRuntimeRequest) { request = &DeleteRuntimeRequest{ BaseRequest: &tchttp.BaseRequest{}, } request.Init().WithApiInfo("tiems", APIVersion, "DeleteRuntime") return } func NewDeleteRuntimeResponse() (response *DeleteRuntimeResponse) { response = &DeleteRuntimeResponse{ BaseResponse: &tchttp.BaseResponse{}, } return } // DeleteRuntime // 删除运行环境 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNAUTHORIZEDOPERATION = "UnauthorizedOperation" // UNSUPPORTEDOPERATION = "UnsupportedOperation" func (c *Client) DeleteRuntime(request *DeleteRuntimeRequest) (response *DeleteRuntimeResponse, err error) { return c.DeleteRuntimeWithContext(context.Background(), request) } // DeleteRuntime // 删除运行环境 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNAUTHORIZEDOPERATION = "UnauthorizedOperation" // UNSUPPORTEDOPERATION = "UnsupportedOperation" func (c *Client) DeleteRuntimeWithContext(ctx context.Context, request *DeleteRuntimeRequest) (response *DeleteRuntimeResponse, err error) { if request == nil { request = NewDeleteRuntimeRequest() } if c.GetCredential() == nil { return nil, errors.New("DeleteRuntime require credential") } request.SetContext(ctx) response = NewDeleteRuntimeResponse() err = c.Send(request, response) return } func NewDeleteServiceRequest() (request *DeleteServiceRequest) { request = &DeleteServiceRequest{ BaseRequest: &tchttp.BaseRequest{}, } request.Init().WithApiInfo("tiems", APIVersion, "DeleteService") return } func NewDeleteServiceResponse() (response *DeleteServiceResponse) { response = &DeleteServiceResponse{ BaseResponse: &tchttp.BaseResponse{}, } return } // DeleteService // 删除服务 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNAUTHORIZEDOPERATION = "UnauthorizedOperation" func (c *Client) DeleteService(request *DeleteServiceRequest) (response *DeleteServiceResponse, err error) { return c.DeleteServiceWithContext(context.Background(), request) } // DeleteService // 删除服务 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNAUTHORIZEDOPERATION = "UnauthorizedOperation" func (c *Client) DeleteServiceWithContext(ctx context.Context, request *DeleteServiceRequest) (response *DeleteServiceResponse, err error) { if request == nil { request = NewDeleteServiceRequest() } if c.GetCredential() == nil { return nil, errors.New("DeleteService require credential") } request.SetContext(ctx) response = NewDeleteServiceResponse() err = c.Send(request, response) return } func NewDeleteServiceConfigRequest() (request *DeleteServiceConfigRequest) { request = &DeleteServiceConfigRequest{ BaseRequest: &tchttp.BaseRequest{}, } request.Init().WithApiInfo("tiems", APIVersion, "DeleteServiceConfig") return } func NewDeleteServiceConfigResponse() (response *DeleteServiceConfigResponse) { response = &DeleteServiceConfigResponse{ BaseResponse: &tchttp.BaseResponse{}, } return } // DeleteServiceConfig // 删除服务配置 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNAUTHORIZEDOPERATION = "UnauthorizedOperation" func (c *Client) DeleteServiceConfig(request *DeleteServiceConfigRequest) (response *DeleteServiceConfigResponse, err error) { return c.DeleteServiceConfigWithContext(context.Background(), request) } // DeleteServiceConfig // 删除服务配置 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNAUTHORIZEDOPERATION = "UnauthorizedOperation" func (c *Client) DeleteServiceConfigWithContext(ctx context.Context, request *DeleteServiceConfigRequest) (response *DeleteServiceConfigResponse, err error) { if request == nil { request = NewDeleteServiceConfigRequest() } if c.GetCredential() == nil { return nil, errors.New("DeleteServiceConfig require credential") } request.SetContext(ctx) response = NewDeleteServiceConfigResponse() err = c.Send(request, response) return } func NewDescribeInstancesRequest() (request *DescribeInstancesRequest) { request = &DescribeInstancesRequest{ BaseRequest: &tchttp.BaseRequest{}, } request.Init().WithApiInfo("tiems", APIVersion, "DescribeInstances") return } func NewDescribeInstancesResponse() (response *DescribeInstancesResponse) { response = &DescribeInstancesResponse{ BaseResponse: &tchttp.BaseResponse{}, } return } // DescribeInstances // 获取节点列表 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" func (c *Client) DescribeInstances(request *DescribeInstancesRequest) (response *DescribeInstancesResponse, err error) { return c.DescribeInstancesWithContext(context.Background(), request) } // DescribeInstances // 获取节点列表 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" func (c *Client) DescribeInstancesWithContext(ctx context.Context, request *DescribeInstancesRequest) (response *DescribeInstancesResponse, err error) { if request == nil { request = NewDescribeInstancesRequest() } if c.GetCredential() == nil { return nil, errors.New("DescribeInstances require credential") } request.SetContext(ctx) response = NewDescribeInstancesResponse() err = c.Send(request, response) return } func NewDescribeResourceGroupsRequest() (request *DescribeResourceGroupsRequest) { request = &DescribeResourceGroupsRequest{ BaseRequest: &tchttp.BaseRequest{}, } request.Init().WithApiInfo("tiems", APIVersion, "DescribeResourceGroups") return } func NewDescribeResourceGroupsResponse() (response *DescribeResourceGroupsResponse) { response = &DescribeResourceGroupsResponse{ BaseResponse: &tchttp.BaseResponse{}, } return } // DescribeResourceGroups // 获取资源组列表 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" func (c *Client) DescribeResourceGroups(request *DescribeResourceGroupsRequest) (response *DescribeResourceGroupsResponse, err error) { return c.DescribeResourceGroupsWithContext(context.Background(), request) } // DescribeResourceGroups // 获取资源组列表 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" func (c *Client) DescribeResourceGroupsWithContext(ctx context.Context, request *DescribeResourceGroupsRequest) (response *DescribeResourceGroupsResponse, err error) { if request == nil { request = NewDescribeResourceGroupsRequest() } if c.GetCredential() == nil { return nil, errors.New("DescribeResourceGroups require credential") } request.SetContext(ctx) response = NewDescribeResourceGroupsResponse() err = c.Send(request, response) return } func NewDescribeRsgAsGroupActivitiesRequest() (request *DescribeRsgAsGroupActivitiesRequest) { request = &DescribeRsgAsGroupActivitiesRequest{ BaseRequest: &tchttp.BaseRequest{}, } request.Init().WithApiInfo("tiems", APIVersion, "DescribeRsgAsGroupActivities") return } func NewDescribeRsgAsGroupActivitiesResponse() (response *DescribeRsgAsGroupActivitiesResponse) { response = &DescribeRsgAsGroupActivitiesResponse{ BaseResponse: &tchttp.BaseResponse{}, } return } // DescribeRsgAsGroupActivities // 查询伸缩组活动 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNSUPPORTEDOPERATION = "UnsupportedOperation" func (c *Client) DescribeRsgAsGroupActivities(request *DescribeRsgAsGroupActivitiesRequest) (response *DescribeRsgAsGroupActivitiesResponse, err error) { return c.DescribeRsgAsGroupActivitiesWithContext(context.Background(), request) } // DescribeRsgAsGroupActivities // 查询伸缩组活动 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNSUPPORTEDOPERATION = "UnsupportedOperation" func (c *Client) DescribeRsgAsGroupActivitiesWithContext(ctx context.Context, request *DescribeRsgAsGroupActivitiesRequest) (response *DescribeRsgAsGroupActivitiesResponse, err error) { if request == nil { request = NewDescribeRsgAsGroupActivitiesRequest() } if c.GetCredential() == nil { return nil, errors.New("DescribeRsgAsGroupActivities require credential") } request.SetContext(ctx) response = NewDescribeRsgAsGroupActivitiesResponse() err = c.Send(request, response) return } func NewDescribeRsgAsGroupsRequest() (request *DescribeRsgAsGroupsRequest) { request = &DescribeRsgAsGroupsRequest{ BaseRequest: &tchttp.BaseRequest{}, } request.Init().WithApiInfo("tiems", APIVersion, "DescribeRsgAsGroups") return } func NewDescribeRsgAsGroupsResponse() (response *DescribeRsgAsGroupsResponse) { response = &DescribeRsgAsGroupsResponse{ BaseResponse: &tchttp.BaseResponse{}, } return } // DescribeRsgAsGroups // 查询资源组的伸缩组信息 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNSUPPORTEDOPERATION = "UnsupportedOperation" func (c *Client) DescribeRsgAsGroups(request *DescribeRsgAsGroupsRequest) (response *DescribeRsgAsGroupsResponse, err error) { return c.DescribeRsgAsGroupsWithContext(context.Background(), request) } // DescribeRsgAsGroups // 查询资源组的伸缩组信息 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNSUPPORTEDOPERATION = "UnsupportedOperation" func (c *Client) DescribeRsgAsGroupsWithContext(ctx context.Context, request *DescribeRsgAsGroupsRequest) (response *DescribeRsgAsGroupsResponse, err error) { if request == nil { request = NewDescribeRsgAsGroupsRequest() } if c.GetCredential() == nil { return nil, errors.New("DescribeRsgAsGroups require credential") } request.SetContext(ctx) response = NewDescribeRsgAsGroupsResponse() err = c.Send(request, response) return } func NewDescribeRuntimesRequest() (request *DescribeRuntimesRequest) { request = &DescribeRuntimesRequest{ BaseRequest: &tchttp.BaseRequest{}, } request.Init().WithApiInfo("tiems", APIVersion, "DescribeRuntimes") return } func NewDescribeRuntimesResponse() (response *DescribeRuntimesResponse) { response = &DescribeRuntimesResponse{ BaseResponse: &tchttp.BaseResponse{}, } return } // DescribeRuntimes // 描述服务运行环境 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNAUTHORIZEDOPERATION = "UnauthorizedOperation" func (c *Client) DescribeRuntimes(request *DescribeRuntimesRequest) (response *DescribeRuntimesResponse, err error) { return c.DescribeRuntimesWithContext(context.Background(), request) } // DescribeRuntimes // 描述服务运行环境 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNAUTHORIZEDOPERATION = "UnauthorizedOperation" func (c *Client) DescribeRuntimesWithContext(ctx context.Context, request *DescribeRuntimesRequest) (response *DescribeRuntimesResponse, err error) { if request == nil { request = NewDescribeRuntimesRequest() } if c.GetCredential() == nil { return nil, errors.New("DescribeRuntimes require credential") } request.SetContext(ctx) response = NewDescribeRuntimesResponse() err = c.Send(request, response) return } func NewDescribeServiceConfigsRequest() (request *DescribeServiceConfigsRequest) { request = &DescribeServiceConfigsRequest{ BaseRequest: &tchttp.BaseRequest{}, } request.Init().WithApiInfo("tiems", APIVersion, "DescribeServiceConfigs") return } func NewDescribeServiceConfigsResponse() (response *DescribeServiceConfigsResponse) { response = &DescribeServiceConfigsResponse{ BaseResponse: &tchttp.BaseResponse{}, } return } // DescribeServiceConfigs // 描述服务配置 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNAUTHORIZEDOPERATION = "UnauthorizedOperation" func (c *Client) DescribeServiceConfigs(request *DescribeServiceConfigsRequest) (response *DescribeServiceConfigsResponse, err error) { return c.DescribeServiceConfigsWithContext(context.Background(), request) } // DescribeServiceConfigs // 描述服务配置 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNAUTHORIZEDOPERATION = "UnauthorizedOperation" func (c *Client) DescribeServiceConfigsWithContext(ctx context.Context, request *DescribeServiceConfigsRequest) (response *DescribeServiceConfigsResponse, err error) { if request == nil { request = NewDescribeServiceConfigsRequest() } if c.GetCredential() == nil { return nil, errors.New("DescribeServiceConfigs require credential") } request.SetContext(ctx) response = NewDescribeServiceConfigsResponse() err = c.Send(request, response) return } func NewDescribeServicesRequest() (request *DescribeServicesRequest) { request = &DescribeServicesRequest{ BaseRequest: &tchttp.BaseRequest{}, } request.Init().WithApiInfo("tiems", APIVersion, "DescribeServices") return } func NewDescribeServicesResponse() (response *DescribeServicesResponse) { response = &DescribeServicesResponse{ BaseResponse: &tchttp.BaseResponse{}, } return } // DescribeServices // 描述服务 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // FAILEDOPERATION_ALREADYEXISTS = "FailedOperation.AlreadyExists" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNAUTHORIZEDOPERATION = "UnauthorizedOperation" func (c *Client) DescribeServices(request *DescribeServicesRequest) (response *DescribeServicesResponse, err error) { return c.DescribeServicesWithContext(context.Background(), request) } // DescribeServices // 描述服务 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // FAILEDOPERATION_ALREADYEXISTS = "FailedOperation.AlreadyExists" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNAUTHORIZEDOPERATION = "UnauthorizedOperation" func (c *Client) DescribeServicesWithContext(ctx context.Context, request *DescribeServicesRequest) (response *DescribeServicesResponse, err error) { if request == nil { request = NewDescribeServicesRequest() } if c.GetCredential() == nil { return nil, errors.New("DescribeServices require credential") } request.SetContext(ctx) response = NewDescribeServicesResponse() err = c.Send(request, response) return } func NewDisableRsgAsGroupRequest() (request *DisableRsgAsGroupRequest) { request = &DisableRsgAsGroupRequest{ BaseRequest: &tchttp.BaseRequest{}, } request.Init().WithApiInfo("tiems", APIVersion, "DisableRsgAsGroup") return } func NewDisableRsgAsGroupResponse() (response *DisableRsgAsGroupResponse) { response = &DisableRsgAsGroupResponse{ BaseResponse: &tchttp.BaseResponse{}, } return } // DisableRsgAsGroup // 停用资源组的伸缩组 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNSUPPORTEDOPERATION = "UnsupportedOperation" func (c *Client) DisableRsgAsGroup(request *DisableRsgAsGroupRequest) (response *DisableRsgAsGroupResponse, err error) { return c.DisableRsgAsGroupWithContext(context.Background(), request) } // DisableRsgAsGroup // 停用资源组的伸缩组 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNSUPPORTEDOPERATION = "UnsupportedOperation" func (c *Client) DisableRsgAsGroupWithContext(ctx context.Context, request *DisableRsgAsGroupRequest) (response *DisableRsgAsGroupResponse, err error) { if request == nil { request = NewDisableRsgAsGroupRequest() } if c.GetCredential() == nil { return nil, errors.New("DisableRsgAsGroup require credential") } request.SetContext(ctx) response = NewDisableRsgAsGroupResponse() err = c.Send(request, response) return } func NewEnableRsgAsGroupRequest() (request *EnableRsgAsGroupRequest) { request = &EnableRsgAsGroupRequest{ BaseRequest: &tchttp.BaseRequest{}, } request.Init().WithApiInfo("tiems", APIVersion, "EnableRsgAsGroup") return } func NewEnableRsgAsGroupResponse() (response *EnableRsgAsGroupResponse) { response = &EnableRsgAsGroupResponse{ BaseResponse: &tchttp.BaseResponse{}, } return } // EnableRsgAsGroup // 启用资源组的伸缩组 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNSUPPORTEDOPERATION = "UnsupportedOperation" func (c *Client) EnableRsgAsGroup(request *EnableRsgAsGroupRequest) (response *EnableRsgAsGroupResponse, err error) { return c.EnableRsgAsGroupWithContext(context.Background(), request) } // EnableRsgAsGroup // 启用资源组的伸缩组 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNSUPPORTEDOPERATION = "UnsupportedOperation" func (c *Client) EnableRsgAsGroupWithContext(ctx context.Context, request *EnableRsgAsGroupRequest) (response *EnableRsgAsGroupResponse, err error) { if request == nil { request = NewEnableRsgAsGroupRequest() } if c.GetCredential() == nil { return nil, errors.New("EnableRsgAsGroup require credential") } request.SetContext(ctx) response = NewEnableRsgAsGroupResponse() err = c.Send(request, response) return } func NewExposeServiceRequest() (request *ExposeServiceRequest) { request = &ExposeServiceRequest{ BaseRequest: &tchttp.BaseRequest{}, } request.Init().WithApiInfo("tiems", APIVersion, "ExposeService") return } func NewExposeServiceResponse() (response *ExposeServiceResponse) { response = &ExposeServiceResponse{ BaseResponse: &tchttp.BaseResponse{}, } return } // ExposeService // 暴露服务 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // FAILEDOPERATION_ALREADYEXISTS = "FailedOperation.AlreadyExists" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNAUTHORIZEDOPERATION = "UnauthorizedOperation" // UNSUPPORTEDOPERATION = "UnsupportedOperation" func (c *Client) ExposeService(request *ExposeServiceRequest) (response *ExposeServiceResponse, err error) { return c.ExposeServiceWithContext(context.Background(), request) } // ExposeService // 暴露服务 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // FAILEDOPERATION_ALREADYEXISTS = "FailedOperation.AlreadyExists" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNAUTHORIZEDOPERATION = "UnauthorizedOperation" // UNSUPPORTEDOPERATION = "UnsupportedOperation" func (c *Client) ExposeServiceWithContext(ctx context.Context, request *ExposeServiceRequest) (response *ExposeServiceResponse, err error) { if request == nil { request = NewExposeServiceRequest() } if c.GetCredential() == nil { return nil, errors.New("ExposeService require credential") } request.SetContext(ctx) response = NewExposeServiceResponse() err = c.Send(request, response) return } func NewUpdateJobRequest() (request *UpdateJobRequest) { request = &UpdateJobRequest{ BaseRequest: &tchttp.BaseRequest{}, } request.Init().WithApiInfo("tiems", APIVersion, "UpdateJob") return } func NewUpdateJobResponse() (response *UpdateJobResponse) { response = &UpdateJobResponse{ BaseResponse: &tchttp.BaseResponse{}, } return } // UpdateJob // 更新任务 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // FAILEDOPERATION_ALREADYEXISTS = "FailedOperation.AlreadyExists" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" func (c *Client) UpdateJob(request *UpdateJobRequest) (response *UpdateJobResponse, err error) { return c.UpdateJobWithContext(context.Background(), request) } // UpdateJob // 更新任务 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // FAILEDOPERATION_ALREADYEXISTS = "FailedOperation.AlreadyExists" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" func (c *Client) UpdateJobWithContext(ctx context.Context, request *UpdateJobRequest) (response *UpdateJobResponse, err error) { if request == nil { request = NewUpdateJobRequest() } if c.GetCredential() == nil { return nil, errors.New("UpdateJob require credential") } request.SetContext(ctx) response = NewUpdateJobResponse() err = c.Send(request, response) return } func NewUpdateRsgAsGroupRequest() (request *UpdateRsgAsGroupRequest) { request = &UpdateRsgAsGroupRequest{ BaseRequest: &tchttp.BaseRequest{}, } request.Init().WithApiInfo("tiems", APIVersion, "UpdateRsgAsGroup") return } func NewUpdateRsgAsGroupResponse() (response *UpdateRsgAsGroupResponse) { response = &UpdateRsgAsGroupResponse{ BaseResponse: &tchttp.BaseResponse{}, } return } // UpdateRsgAsGroup // 更新资源组的伸缩组 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNSUPPORTEDOPERATION = "UnsupportedOperation" func (c *Client) UpdateRsgAsGroup(request *UpdateRsgAsGroupRequest) (response *UpdateRsgAsGroupResponse, err error) { return c.UpdateRsgAsGroupWithContext(context.Background(), request) } // UpdateRsgAsGroup // 更新资源组的伸缩组 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNSUPPORTEDOPERATION = "UnsupportedOperation" func (c *Client) UpdateRsgAsGroupWithContext(ctx context.Context, request *UpdateRsgAsGroupRequest) (response *UpdateRsgAsGroupResponse, err error) { if request == nil { request = NewUpdateRsgAsGroupRequest() } if c.GetCredential() == nil { return nil, errors.New("UpdateRsgAsGroup require credential") } request.SetContext(ctx) response = NewUpdateRsgAsGroupResponse() err = c.Send(request, response) return } func NewUpdateServiceRequest() (request *UpdateServiceRequest) { request = &UpdateServiceRequest{ BaseRequest: &tchttp.BaseRequest{}, } request.Init().WithApiInfo("tiems", APIVersion, "UpdateService") return } func NewUpdateServiceResponse() (response *UpdateServiceResponse) { response = &UpdateServiceResponse{ BaseResponse: &tchttp.BaseResponse{}, } return } // UpdateService // 更新服务 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNAUTHORIZEDOPERATION = "UnauthorizedOperation" func (c *Client) UpdateService(request *UpdateServiceRequest) (response *UpdateServiceResponse, err error) { return c.UpdateServiceWithContext(context.Background(), request) } // UpdateService // 更新服务 // // 可能返回的错误码: // FAILEDOPERATION = "FailedOperation" // INTERNALERROR = "InternalError" // INVALIDPARAMETER = "InvalidParameter" // RESOURCENOTFOUND = "ResourceNotFound" // UNAUTHORIZEDOPERATION = "UnauthorizedOperation" func (c *Client) UpdateServiceWithContext(ctx context.Context, request *UpdateServiceRequest) (response *UpdateServiceResponse, err error) { if request == nil { request = NewUpdateServiceRequest() } if c.GetCredential() == nil { return nil, errors.New("UpdateService require credential") } request.SetContext(ctx) response = NewUpdateServiceResponse() err = c.Send(request, response) return }
urceNotFound" func
conftest.py
# Third-Party import pytest from rest_framework.test import APIClient # from rest_framework.test import RequestsClient # Django from django.test.client import Client # First-Party from .factories import AwardFactory from .factories import ChartFactory from .factories import ConventionFactory from .factories import GroupFactory from .factories import PersonFactory from .factories import UserFactory @pytest.fixture def admin_django_client():
@pytest.fixture def admin_api_client(): admin = UserFactory( is_staff=True, ) client = APIClient() client.force_authenticate(user=admin) return client @pytest.fixture def user_api_client(): # person = PersonFactory() user = UserFactory( is_staff=False, # person=person, ) client = APIClient() client.force_authenticate(user=user) return client @pytest.fixture def anon_api_client(): client = APIClient() return client @pytest.fixture def award(): return AwardFactory() @pytest.fixture def chart(): return ChartFactory() @pytest.fixture def convention(): return ConventionFactory() @pytest.fixture def group(): return GroupFactory() @pytest.fixture def person(): return PersonFactory() @pytest.fixture def user(): return UserFactory()
admin = UserFactory( is_staff=True, ) client = Client() client.force_login(admin) return client
line-chart.component.ts
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Created by Dolkkok on 2017. 7. 18.. */ import { AfterViewInit, Component, ElementRef, Injector, OnInit } from '@angular/core'; import { AxisType, CHART_STRING_DELIMITER, ChartColorList, ChartColorType, ChartType, ColorCustomMode, LineCornerType, LineStyle, LineType, Position, SeriesType, ShelveFieldType, ShelveType, SymbolType, UIChartDataLabelDisplayType } from '../../option/define/common'; import { OptionGenerator } from '../../option/util/option-generator'; import { UIChartColorBySeries, UILineChart, UIOption } from '../../option/ui-option'; import { Series } from '../../option/define/series'; import * as _ from 'lodash'; import { Pivot } from '../../../../../domain/workbook/configurations/pivot'; import { BaseOption } from '../../option/base-option'; import { LineChartSplit } from './line-chart.split'; import { BaseChart } from '../../base-chart'; import { ColorOptionConverter } from '../../option/converter/color-option-converter'; import { AnalysisPredictionService } from '../../../../../page/component/analysis/service/analysis.prediction.service'; import optGen = OptionGenerator; import Legend = OptionGenerator.Legend; import Grid = OptionGenerator.Grid; import Axis = OptionGenerator.Axis; import DataZoom = OptionGenerator.DataZoom; import Tooltip = OptionGenerator.Tooltip; import Brush = OptionGenerator.Brush; import Toolbox = OptionGenerator.Toolbox; import {UIChartAxis, UIChartAxisGrid, UIChartAxisLabelValue} from "../../option/ui-option/ui-axis"; import {AxisOptionConverter} from "../../option/converter/axis-option-converter"; import {Axis as AxisDefine} from "../../option/define/axis"; const transparentSymbolImage: string = 'image://' + window.location.origin + '/assets/images/icon_transparent_symbol.png'; @Component({ selector: 'line-chart', templateUrl: 'line-chart.component.html' }) export class LineChartComponent extends BaseChart implements OnInit, AfterViewInit { /*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= | Private Variables |-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/ // 스플릿 옵션 private split: LineChartSplit = new LineChartSplit(); /*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= | Protected Variables |-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/ /*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= | Public Variables |-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/ /*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= | Constructor |-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/ // 생성자 constructor(protected elementRef: ElementRef, protected injector: Injector) { super(elementRef, injector); } /*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= | Getter & Setter |-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/ /*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= | Override Method |-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/ /** * Init * */ ngOnInit() { super.ngOnInit(); } /** * ngAfterViewInit * */ ngAfterViewInit(): void { super.ngAfterViewInit(); } /** * Destory * */ ngOnDestroy() { super.ngOnDestroy(); } /*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= | Public Method |-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/ /** * 선반정보를 기반으로 차트를 그릴수 있는지 여부를 체크 * * @param shelve */ public isValid(shelve: Pivot): boolean { return ((this.getFieldTypeCount(shelve, ShelveType.COLUMNS, ShelveFieldType.DIMENSION) + this.getFieldTypeCount(shelve, ShelveType.COLUMNS, ShelveFieldType.TIMESTAMP)) > 0) && (this.getFieldTypeCount(shelve, ShelveType.AGGREGATIONS, ShelveFieldType.MEASURE) > 0 || this.getFieldTypeCount(shelve, ShelveType.AGGREGATIONS, ShelveFieldType.CALCULATED) > 0) && (this.getFieldTypeCount(shelve, ShelveType.COLUMNS, ShelveFieldType.MEASURE) == 0 && this.getFieldTypeCount(shelve, ShelveType.COLUMNS, ShelveFieldType.CALCULATED) == 0); } /** * line차트 draw * @param isKeepRange */ public draw(isKeepRange?: boolean): void { // this.uiOption.split = { // by: this.splitBy[0]['value'], // column: this.columnCount, // row: this.rowCount // } if (this.split.isSplit(this.uiOption)) { this.chartOption = this.split.setSplitData(this.data, this.pivot, this.uiOption, this.chartOption); this.apply(false); } else { // 변경된 pivot값에 따른 fieldInfo 정보 변경 this.setFieldInfo(); // 교차선반에 dimension이 있는경우 제거 (selection filter, 범례에서 필요) this.pivot = this.editPivotByColorType(); super.draw(isKeepRange); } } // ----------------------------------------------- // 고급분석 - 예측선 관련 // ----------------------------------------------- /** * 고급분석 - 예측선 사용시 라인차트를 그리기 위해서 사용 */ public predictionDraw(): void { this.draw(true); } /** * Forecast 변경시 */ public changeForecast() : void { this.chartOption = this.predictionLineLineStyleColorBySeries(); this.draw(true); } /** * Confidence 변경시 */ public changeConfidence() : void { this.chartOption = this.predictionLineAreaStyleColorBySeries(); this.draw(true); } /*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= | Protected Method |-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/ /** * 차트의 기본 옵션을 생성한다. * - 각 차트에서 Override */ protected initOption(): BaseOption { return { type: ChartType.LINE, grid: [Grid.verticalMode(10, 0, 0, 10, false, true, false)], xAxis: [Axis.categoryAxis(Position.MIDDLE, null, false, true, true, true)], yAxis: [Axis.valueAxis(Position.MIDDLE, null, false, false, true, true, true)], legend: Legend.custom(false, false, Position.LEFT, SymbolType.CIRCLE, '100%', 20, 5), dataZoom: [DataZoom.horizontalDataZoom(), DataZoom.horizontalInsideDataZoom()], tooltip: Tooltip.itemTooltip(), toolbox: Toolbox.hiddenToolbox(), brush: Brush.selectBrush(), series: [] }; } /** * 차트별 시리즈 추가정보 * - 반드시 각 차트에서 Override * @returns {BaseOption} */ protected convertSeriesData(): BaseOption { this.chartOption.series = this.data.columns.map((column) => { // 시리즈 생성 return { type: SeriesType.LINE, name: column.name, data: column.value, originData: _.cloneDeep(column.value), connectNulls: true, showAllSymbol: true, symbol: SymbolType.CIRCLE, sampling: 'max', itemStyle: optGen.ItemStyle.auto(), label: optGen.LabelStyle.defaultLabelStyle(false, Position.TOP), uiData: column }; }); return this.chartOption; } /** * 라인차트의 series값으로 설정되는 부분 */ protected additionalSeries(): BaseOption { // 차트 시리즈 라인 표현 타입 변경(라인&포인트/라인만/포인트만) this.chartOption = this.convertLineStyle(); // 차트 라인 코너 스타일 변경 this.chartOption = this.convertCornerStyle(); // 색상 설정 변경 this.chartOption = this.convertColor(); // 예측선 사용시 if (this.isAnalysisPredictionLineEmpty() === false) { // 늘어난 예측기간 만큼 count 를 늘려준다 const count = 20 + Number(this.analysis.interval); this.convertDataZoomAutoRange(this.chartOption, count, 500, 10, this.existTimeField); } return this.chartOption; } /** * 라인차트의 범례 설정부분 */ protected additionalLegend(): BaseOption { // color by dimension일때에는 color by series에 해당되는 부분으로 설정 if (this.uiOption.color.type == ChartColorType.DIMENSION) { this.chartOption.legend.data = this.chartOption.series.map(item => item.name); } return this.chartOption; } /** * 셀렉션 이벤트를 등록한다. * - 필요시 각 차트에서 Override */ protected selection(): void { this.addChartSelectEventListener(); this.addChartMultiSelectEventListener(); this.addLegendSelectEventListener(); } /** * dataLabel, tooltip 중첩에 따라서 설정 * @returns {UIOption} */ protected setDataLabel(): UIOption { if (!this.pivot || !this.pivot.aggregations || !this.pivot.rows) return this.uiOption; // 시리즈관련 리스트 제거 const spliceSeriesTypeList = ((seriesTypeList, dataLabel: any): UIOption => { // 미리보기 리스트가 빈값인지 체크 const previewFl = !!dataLabel.previewList; let index: number; for (const item of seriesTypeList) { index = dataLabel.displayTypes.indexOf(item); if (-1 !== index) { // 라벨에서 제거 dataLabel.displayTypes[index] = null; // previewList가 있는경우 if (previewFl) { // 미리보기리스트에서 제거 _.remove(dataLabel.previewList, {value: item}); } } } return dataLabel; }); // 교차선반에 dimension이나 measure의 개수가 2개이상이 아닌경우 if (this.pivot.aggregations.length < 2 && this.pivot.rows.length < 1) { // 조건을 만족시 제거될 series타입 리스트 const seriesTypeList = [UIChartDataLabelDisplayType.SERIES_NAME, UIChartDataLabelDisplayType.SERIES_VALUE, UIChartDataLabelDisplayType.SERIES_PERCENT]; // 데이터라벨에서 series관련 설정제거 if (this.uiOption.dataLabel && this.uiOption.dataLabel.displayTypes) this.uiOption.dataLabel = spliceSeriesTypeList(seriesTypeList, this.uiOption.dataLabel); // 툴팁의 series관련 설정제거 if (this.uiOption.toolTip && this.uiOption.toolTip.displayTypes) this.uiOption.toolTip = spliceSeriesTypeList(seriesTypeList, this.uiOption.toolTip); } return this.uiOption; } /** * 차트별 Y축 추가정보 * - 필요시 각 차트에서 Override * @returns {BaseOption} */ protected additionalYAxis(): BaseOption { // Min / Max를 재계산한다. this.convertAxisAutoScale(AxisType.Y); // 차트옵션 반환 return this.chartOption; } /** * 차트별 X축 추가정보 * - 필요시 각 차트에서 Override * @returns {BaseOption} */ protected additionalXAxis(): BaseOption { // Min / Max를 재계산한다. this.convertAxisAutoScale(AxisType.X); // 차트옵션 반환 return this.chartOption; } /** * Min / Max 연산 * @param grid * @param result * @param isYAsis */ protected calculateMinMax(grid: UIChartAxisGrid, result: any, isYAsis: boolean): void { // 라인차트는 Override 함으로서 데이터 가공처리를 하지 않음 // // 축범위 자동설정일 경우 // if( grid.autoScaled ) { // if( result.data.categories && result.data.categories.length > 0 ) { // let min = null; // let max = null; // _.each(result.data.columns, (column) => { // _.each(column.value, (value) => { // if( min == null || value < min ) { // min = value; // } // if( max == null || value > max ) { // max = value; // } // }); // }); // grid.min = min > 0 // ? Math.ceil(min - ((max - min) * 0.05)) // : min // grid.max = max; // } // else { // grid.min = result.data.info.minValue > 0 // ? Math.ceil(result.data.info.minValue - ((result.data.info.maxValue - result.data.info.minValue) * 0.05)) // : result.data.info.minValue // grid.max = result.data.info.maxValue; // } // } // // // Min / Max값이 없다면 수행취소 // if( ((_.isUndefined(grid.min) || grid.min == 0) // && (_.isUndefined(grid.max) || grid.max == 0)) ) { // return; // } // // // 멀티시리즈 개수를 구한다. // let seriesList = []; // result.data.columns.map((column, index) => { // let nameArr = _.split(column.name, CHART_STRING_DELIMITER); // let name = ""; // if( nameArr.length > 1 ) { // nameArr.map((temp, index) => { // if( index < nameArr.length - 1 ) { // if( index > 0 ) { // name += CHART_STRING_DELIMITER; // } // name += temp; // } // }); // } // else { // name = nameArr[0]; // } // // let isAlready = false; // seriesList.map((series, index) => { // if( series == name ) { // isAlready = true; // return false; // } // }); // // if( !isAlready ) { // seriesList.push(name); // } // }); // // // Min/Max 처리 // result.data.columns.map((column, index) => { // column.value.map((value, index) => { // if( value < grid.min ) { // column.value[index] = grid.min; // } // else if( value > grid.max ) { // column.value[index] = grid.max; // } // }); // }); } /** * 축 교차점 연산 * @param baseline * @param result * @param isYAsis */ protected calculateBaseline(baseline: number, result: any, isYAsis: boolean): void { // 멀티시리즈 개수를 구한다. let seriesList = []; result.data.columns.map((column, index) => { let nameArr = _.split(column.name, CHART_STRING_DELIMITER); let name = ""; if( nameArr.length > 1 ) { nameArr.map((temp, index) => { if( index < nameArr.length - 1 ) { if( index > 0 ) { name += CHART_STRING_DELIMITER; } name += temp; } }); } else { name = nameArr[0]; } let isAlready = false; seriesList.map((series, index) => { if( series == name ) { isAlready = true; return false; } }); if( !isAlready ) { seriesList.push(name); } }); // Value값을 마이너스 처리 result.data.columns.map((column, index) => { column.value.map((value, index) => { if( value > 0 ) { column.value[index] = value - baseline; } else { column.value[index] = (Math.abs(value) + Math.abs(baseline)) * -1; } }); }); } /*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= | Private Method |-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/ /** * Min / Max 오토스케일 * @param axisType */ private convertAxisAutoScale(axisType: AxisType): BaseOption { // Min / Max값을 재계산한다. /////////////////////////// // UI 옵션에서 값 추출 /////////////////////////// // 축에 해당하는 Axis 옵션 let axisOption: UIChartAxis[] = AxisOptionConverter.getAxisOption(this.uiOption, axisType); /////////////////////////// // 차트 옵션에 적용 /////////////////////////// // 축 let axis: AxisDefine[] = this.chartOption[axisType]; _.each(axis, (option: AxisDefine, index) => { // Value축일 경우 if ((<UIChartAxisLabelValue>axisOption[index].label) && _.eq((<UIChartAxisLabelValue>axisOption[index].label).type, AxisType.VALUE) && axisOption[index].grid) { let min = null; let max = null; let calculateMin = null; if( this.originalData.categories || this.originalData.categories.length > 0 ) { _.each(this.originalData.columns, (column) => { _.each(column.value, (value) => { if( min == null || value < min ) { min = value; } if( max == null || value > max ) { max = value; } }); }); calculateMin = Math.ceil(this.originalData.info.minValue - ((this.originalData.info.maxValue - this.originalData.info.minValue) * 0.05)); // min = min > 0 // ? calculateMin >= 0 ? calculateMin : min // : min; max = max; } else { calculateMin = Math.ceil(this.originalData.info.minValue - ((this.originalData.info.maxValue - this.originalData.info.minValue) * 0.05)); min = this.originalData.info.minValue; // min = this.originalData.info.minValue > 0 // ? calculateMin >= 0 ? calculateMin : min // : this.originalData.info.minValue; max = this.originalData.info.maxValue; } // Min / Max 업데이트 AxisOptionConverter.axisMinMax[axisType].min = min; AxisOptionConverter.axisMinMax[axisType].max = max; // 기준선 변경시 let baseline = 0; if( axisOption[index].baseline && axisOption[index].baseline != 0 ) { baseline = axisOption[index].baseline } // 축 범위 자동설정이 설정되지 않았고 // 오토스케일 적용시 if( baseline == 0 && axisOption[index].grid.autoScaled ) { // // 적용 // option.min = min > 0 // ? Math.ceil(min - ((max - min) * 0.05)) // : min; // option.max = max; delete option.min; delete option.max; option.scale = true; } else { delete option.scale; } } }); // 차트옵션 반환 return this.chartOption; } /** * 차트 시리즈 라인 표현 타입 변경(라인&포인트/라인만/포인트만) * */ private convertLineStyle(): BaseOption { const lineUIOption = <UILineChart>this.uiOption; if (this.isAnalysisPredictionLineEmpty()) { const series = this.chartOption.series; series.map((obj) => { // 유효 데이터가 1개만 있다면 포인트는 무조건 표시 const dataLength = obj.originData.filter((value) => { return value != null; }).length; // 포인트가 있는경우 circle로 설정 const transparentSymbolImage: string = 'image://' + window.location.origin + '/assets/images/icon_transparent_symbol.png'; obj.symbol = obj.data.length === 1 || dataLength === 1 || LineStyle.LINE !== lineUIOption.lineStyle ? 'circle' : transparentSymbolImage; // 기존 라인 스타일이 존재 하지 않을 경우 기본스타일 생성 후 적용 if (_.isUndefined(obj.lineStyle)) obj.lineStyle = optGen.LineStyle.auto(); // 라인 표시여부에 따라 불투명도 적용 (라인이 있는경우 opacity 1적용) if (!_.isUndefined(obj.lineStyle) && !_.isUndefined(obj.lineStyle.normal)) obj.lineStyle.normal.opacity = LineStyle.POINT !== lineUIOption.lineStyle ? 1 : 0; }); } else { this.chartOption = this.exceptPredictionLineShowLine(lineUIOption.lineStyle); } return this.chartOption; } /** * 코너 스타일 변경 */ private convertCornerStyle(): BaseOption { const lineUIOption = <UILineChart>this.uiOption; const series = this.chartOption.series; series.map((obj) => { obj.smooth = _.eq(lineUIOption.curveStyle, LineCornerType.SMOOTH); }); return this.chartOption; } /** * color by dimension / series에 따라 fieldInfo를 변경하여 => 범례 설정을 변경 */ private editPivotByColorType(): Pivot{ const shelve = this.pivot; // agg 선반에서 설정 for (let num = shelve.aggregations.length; num--;) { let item = shelve.aggregations[num]; // 타입이 dimension인 if (String(ShelveFieldType.DIMENSION) == item.type) { // 값을 shelve에서 제거 shelve.aggregations.splice(num, 1); // rows에 추가 shelve.rows.push(item); } } return this.pivot; } /** * line차트에서의 color by dimension 설정 * @returns {BaseOption} */ private convertLineColorByDimension(): BaseOption { // color by dimension일때에는 색상 설정 if (this.uiOption.color.type == ChartColorType.DIMENSION) { const option = this.chartOption; let schema = (<UIChartColorBySeries>this.uiOption.color).schema; let codes = _.cloneDeep(ChartColorList[schema]); // visualMap 존재한다면 삭제 if (!_.isUndefined(option.visualMap)) delete option.visualMap; const series: any = _.cloneDeep(option.series.map(item => item.name)); _.each(option.series, (obj) => { // 라인차트의 dimension때에는 name을 그대로 가져오기 const aggName = obj.name; // 측정값 필드명의 인덱스 const fieldIdx = _.indexOf(series, aggName); // 측정값 필드명의 인덱스에 맵핑되는 컬러인덱스 const colorIdx = fieldIdx >= codes['length'] ? fieldIdx % codes['length'] : fieldIdx; obj.itemStyle.normal.color = codes[colorIdx]; }); } return this.chartOption; } // ----------------------------------------------- // 고급분석 - 예측선 관련 // ----------------------------------------------- /** * 고급분석 - 예측선 데이터가 없을 경우 * @returns {boolean} */ private isAnalysisPredictionLineEmpty(): boolean { return _.isUndefined(this.analysis) || _.isEmpty(this.analysis); } /** * 고급분석 시리즈 데이터 series measure 수치값 기준으로 색상 지정 * - 라인차트에 고급분석 데이터가 있는 경우만 사용하는 함수 * @returns {BaseOption} */ private predictionLineLineStyleColorBySeries(): BaseOption { // 라인타입 LineType<Enum> 으로 변환 const convertLineTypeEnum: Function = (lineType: string | LineType): string | LineType => { if (lineType === 'SOLID') { lineType = LineType.SOLID; } else if (lineType === 'DASHED') { lineType = LineType.DASHED; } else if (lineType === 'DOTTED') { lineType = LineType.DOTTED; } return lineType; }; // 시리즈 let series = this.chartOption.series; this.analysis.forecast.parameters.forEach((parameter) => { series .filter((obj) => { return obj.name === parameter.field; }) .map((obj) => { const index = series.findIndex((data) => { return data.name === parameter.field; }); // 예측선의 forecast.style.color 값 let color: string = this.analysis.forecast.style.color; // 예측선내에서 색상 설정하는부분이 아닐때 if (!this.analysis.forecast.style.predictColorUseFl) { color = obj.itemStyle.normal.color; } const lineThickness: number = this.analysis.forecast.style.lineThickness; const lineType: LineType = convertLineTypeEnum(this.analysis.forecast.style.lineType); const lowerSeries = series[index + 1]; const observationsSeries = series[index + 2]; const upperSeries = series[index + 3]; const addSeries = series[index + 4]; if (_.isUndefined(lowerSeries.lineStyle)) { lowerSeries.lineStyle = optGen.LineStyle.auto(); } if (_.isUndefined(observationsSeries.lineStyle)) { observationsSeries.lineStyle = optGen.LineStyle.auto(); } if (_.isUndefined(upperSeries.lineStyle)) { upperSeries.lineStyle = optGen.LineStyle.auto(); } lowerSeries.lineStyle.normal.color = color; lowerSeries.lineStyle.normal.type = lineType; lowerSeries.lineStyle.normal.width = lineThickness; lowerSeries.lineStyle.normal.opacity = 0; lowerSeries.showSymbol = false; observationsSeries.lineStyle.normal.color = color; observationsSeries.lineStyle.normal.type = lineType; observationsSeries.lineStyle.normal.width = lineThickness; observationsSeries.showSymbol = false; upperSeries.lineStyle.normal.color = color; upperSeries.lineStyle.normal.type = lineType; upperSeries.lineStyle.normal.width = lineThickness; upperSeries.lineStyle.normal.opacity = 0; upperSeries.showSymbol = false;
if (_.isUndefined(addSeries) === false) { if (addSeries.name === `${obj.name} - ${AnalysisPredictionService.predictionLineTypeAdditional}`) { addSeries.lineStyle = optGen.LineStyle.auto(); addSeries.lineStyle.normal.color = color; addSeries.lineStyle.normal.type = lineType; addSeries.lineStyle.normal.width = lineThickness; addSeries.lineStyle.normal.opacity = 0; addSeries.showSymbol = false; } } }); }); return this.chartOption; } /** * 고급분석 시리즈 데이터 series measure 수치값 기준으로 색상 지정 * - 라인차트에 고급분석 데이터가 있는 경우만 사용하는 함수 * @returns {BaseOption} */ private predictionLineAreaStyleColorBySeries(): BaseOption { // 시리즈에 영역 색상 지정 const setSeriesAreaStyleColor: Function = (series: Series, color: string): void => { if (_.isUndefined(series.areaStyle)) { series.areaStyle = optGen.AreaStyle.auto(); } series.areaStyle.normal.color = color; }; // confidence.style.transparency 값을 나누기 위한 몫 const transparencyDivisionShare: number = 100; // Confidence opacity 계산 const confidenceOpacityCalculator: Function = (): number => { return this.analysis.confidence.style.transparency / transparencyDivisionShare; }; // 시리즈에 영역 투명도 지정 const setSeriesAreaStyleOpacity: Function = (series: Series, opacity: number): void => { series.areaStyle.normal.opacity = opacity; }; // 시리즈 let series = this.chartOption.series; this.analysis.forecast.parameters.forEach((parameter) => { series .filter((obj) => { return obj.name === parameter.field; }) .map((obj) => { const index = series.findIndex((data) => { return data.name === parameter.field; }); let color: string = this.analysis.confidence.style.color; // const isConfidenceStyleInColorEmpty = _.isEmpty(color); // if (isConfidenceStyleInColorEmpty) { // const seriesLineStyleNormalColor = series[index].itemStyle.normal.color; // 예측선내에서 색상 설정하는부분이 아닐때 if (!this.analysis.confidence.style.predictColorUseFl) { color = series[index].itemStyle.normal.color; } // } const lowerSeries = series[index + 1]; const upperSeries = series[index + 3]; const addSeries = series[index + 4]; lowerSeries.stack = upperSeries.name; upperSeries.stack = upperSeries.name; // 투명도 const opacity: number = confidenceOpacityCalculator(); // 영역 색상 지정 setSeriesAreaStyleColor(upperSeries, color); // 영역 색상 투명도 지정 setSeriesAreaStyleOpacity(upperSeries, opacity); if (_.isUndefined(addSeries) === false) { if (addSeries.name === `${obj.name} - ${AnalysisPredictionService.predictionLineTypeAdditional}`) { // 영역 색상 지정 setSeriesAreaStyleColor(addSeries, color); // 영역 색상 투명도 지정 setSeriesAreaStyleOpacity(addSeries, opacity); } } }); }); return this.chartOption; } /** * 고급분석 시리즈를 제외하고 원본라인 & 포인트 표시여부 * @param lineStyle * @returns {BaseOption} */ private exceptPredictionLineShowLine(lineStyle: LineStyle): BaseOption { const series = this.chartOption.series; this.analysis.forecast.parameters.forEach((parameter) => { series .filter((obj) => { return obj.name === parameter.field; }) .map((obj) => { if (_.eq(obj.type, SeriesType.LINE)) { // 유효 데이터가 1개만 있다면 포인트는 무조건 표시 const dataLength = obj.originData.filter((value) => { return value != null; }).length; // 포인트가 있는경우 circle로 설정 obj.symbol = obj.data.length === 1 || dataLength === 1 || LineStyle.LINE !== lineStyle ? 'circle' : transparentSymbolImage; // 기존 라인 스타일이 존재 하지 않을 경우 기본스타일 생성 후 적용 if (_.isUndefined(obj.lineStyle)) obj.lineStyle = optGen.LineStyle.auto(); // 라인 표시여부에 따라 불투명도 적용(라인이 있는경우 opacity 1적용) if (!_.isUndefined(obj.lineStyle) && !_.isUndefined(obj.lineStyle.normal)) { obj.lineStyle.normal.opacity = LineStyle.POINT !== lineStyle ? 1 : 0; } } }); }); return this.chartOption; } /** * 고급분석 시리즈를 제외하고 원본 데이터 series measure 수치값 기준으로 색상 지정 * - 라인차트에 고급분석 데이터가 있는 경우만 사용하는 함수 * @returns {BaseOption} */ private exceptPredictionLineColorBySeries(): BaseOption { const color = (<UIChartColorBySeries>this.uiOption.color); let schema = color.schema; let list: any = _.cloneDeep(ChartColorList[schema]); // userCodes가 있는경우 codes대신 userCodes를 설정한다 if ((<UIChartColorBySeries>color).mapping) { Object.keys((<UIChartColorBySeries>color).mapping).forEach((key, index) => { list[index] = (<UIChartColorBySeries>color).mapping[key]; }); } // userCodes가 있는경우 codes대신 userCodes를 설정한다 if((<UIChartColorBySeries>this.uiOption.color).mapping) list = _.cloneDeep((<UIChartColorBySeries>this.uiOption.color).mapping); // subType이 value가 아닌경우visualMap 존재한다면 삭제 if (this.uiOption.color.type !== ChartColorType.MEASURE) delete this.chartOption.visualMap; // 시리즈 let series = this.chartOption.series; this.analysis.forecast.parameters.forEach((parameter, parameterIdx) => { series .filter((obj) => { return obj.name === parameter.field; }) .map((obj) => { // 시리즈명을 delimiter 로 분리, 현재 시리즈의 측정값 필드명 추출, 라인차트의 dimension때에는 name을 그대로 가져오기 const aggName = _.last(_.split(obj.name, CHART_STRING_DELIMITER)); // 측정값 필드명의 인덱스 const fieldIdx = _.indexOf(this.fieldInfo.aggs, aggName); // 측정값 필드명의 인덱스에 맵핑되는 컬러인덱스 const colorIdx = fieldIdx >= list.length ? fieldIdx % list.length : fieldIdx; // 기존 스타일이 존재 하지 않을 경우 기본스타일 생성 후 적용 if (_.isUndefined(obj.itemStyle)) obj.itemStyle = optGen.ItemStyle.auto(); // 현재 시리즈에 컬러 적용 // border 가 존재한다면 border 에 컬러 적용 if (!_.isUndefined(obj.itemStyle.normal.borderWidth) && obj.itemStyle.normal.borderWidth > 0) { obj.itemStyle.normal.borderColor = list[colorIdx]; delete obj.itemStyle.normal.color; } else { obj.itemStyle.normal.color = list[parameterIdx]; } // 텍스트로 구성되는 차트일 경우 if (!_.isUndefined(obj.textStyle)) obj.textStyle.normal.color = list[colorIdx]; }); }); return this.chartOption; } /** * 차트 표현 방향에 따른 그리드 위치값 변경 * @param option * @param orient */ // private gridPosition(orient: Orient): BaseOption { // // 범례 존재여부 // const withLegend: boolean = (!_.isUndefined(this.chartOption.legend) && this.chartOption.legend.show) || (!_.isUndefined(this.chartOption.visualMap) && this.chartOption.visualMap.show); // // DataZoom(미니맵) 존재여부 // const withDataZooom: boolean = !_.isUndefined(this.chartOption.dataZoom) && this.chartOption.dataZoom[0].show; // const withSubAxis: boolean = this.chartOption.xAxis.length > 1 || this.chartOption.yAxis.length > 1; // // DataZoom(미니맵) 존재 여부에 따라서 여백 조정 // this.chartOption.grid.map((obj, idx) => { // this.chartOption.grid[idx] = _.eq(orient, Orient.BOTH) // ? OptionGenerator.Grid.bothMode(10, 0, 0, 20, withLegend, withDataZooom) // : _.eq(orient, Orient.VERTICAL) // ? OptionGenerator.Grid.verticalMode(20, 0, 0, 10, withLegend, withDataZooom, withSubAxis) // : OptionGenerator.Grid.horizontalMode(10, 0, 0, 20, withLegend, withDataZooom); // }); // return this.chartOption; // } /** * 차트 공통 속성 변경 - 색상( UI를 통한 변경 ) * @returns {BaseOption} */ private convertColor(): BaseOption { const color = this.uiOption.color; // 라인차트의 경우 dimension을 series 타입으로 타게함 switch (color.type) { case ChartColorType.SERIES: { let schema = (<UIChartColorBySeries>color).schema; let colorCodes = _.cloneDeep(ChartColorList[schema]); // userCodes가 있는경우 codes대신 userCodes를 설정한다 if ((<UIChartColorBySeries>color).mapping) { Object.keys((<UIChartColorBySeries>color).mapping).forEach((key, index) => { colorCodes[index] = (<UIChartColorBySeries>color).mapping[key]; }); } if (this.isAnalysisPredictionLineEmpty()) { this.chartOption = ColorOptionConverter.convertColorBySeries(this.chartOption, this.fieldInfo, colorCodes); } else { this.chartOption = this.exceptPredictionLineColorBySeries(); this.chartOption = this.predictionLineLineStyleColorBySeries(); this.chartOption = this.predictionLineAreaStyleColorBySeries(); } break; } case ChartColorType.DIMENSION: { this.chartOption = this.convertLineColorByDimension(); break; } case ChartColorType.MEASURE: { // gradation일때 if (this.uiOption.color['customMode'] && ColorCustomMode.GRADIENT == this.uiOption.color['customMode']) { this.chartOption = ColorOptionConverter.convertColorByValueGradation(this.chartOption, this.uiOption); // 그이외의 경우일떄 } else { this.chartOption = ColorOptionConverter.convertColorByValue(this.chartOption, this.uiOption); } if (!this.isAnalysisPredictionLineEmpty()) { this.chartOption = this.exceptPredictionLineColorBySeries(); this.chartOption = this.predictionLineLineStyleColorBySeries(); this.chartOption = this.predictionLineAreaStyleColorBySeries(); } break; } } return this.chartOption; } }
symbol.rs
//! An "interner" is a data structure that associates values with usize tags and //! allows bidirectional lookup; i.e., given a value, one can easily find the //! type, and vice versa. use rustc_arena::DroplessArena; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::stable_hasher::{HashStable, StableHasher, ToStableHashKey}; use rustc_macros::HashStable_Generic; use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; use std::cmp::{Ord, PartialEq, PartialOrd}; use std::fmt; use std::hash::{Hash, Hasher}; use std::str; use crate::{with_session_globals, Edition, Span, DUMMY_SP}; #[cfg(test)] mod tests; // The proc macro code for this is in `compiler/rustc_macros/src/symbols.rs`. symbols! { // After modifying this list adjust `is_special`, `is_used_keyword`/`is_unused_keyword`, // this should be rarely necessary though if the keywords are kept in alphabetic order. Keywords { // Special reserved identifiers used internally for elided lifetimes, // unnamed method parameters, crate root module, error recovery etc. Empty: "", PathRoot: "{{root}}", DollarCrate: "$crate", Underscore: "_", // Keywords that are used in stable Rust. As: "as", Break: "break", Const: "const", Continue: "continue", Crate: "crate", Else: "else", Enum: "enum", Extern: "extern", False: "false", Fn: "fn", For: "for", If: "if", Impl: "impl", In: "in", Let: "let", Loop: "loop", Match: "match", Mod: "mod", Move: "move", Mut: "mut", Pub: "pub", Ref: "ref", Return: "return", SelfLower: "self", SelfUpper: "Self", Static: "static", Struct: "struct", Super: "super", Trait: "trait", True: "true", Type: "type", Unsafe: "unsafe", Use: "use", Where: "where", While: "while", // Keywords that are used in unstable Rust or reserved for future use. Abstract: "abstract", Become: "become", Box: "box", Do: "do", Final: "final", Macro: "macro", Override: "override", Priv: "priv", Typeof: "typeof", Unsized: "unsized", Virtual: "virtual", Yield: "yield", // Edition-specific keywords that are used in stable Rust. Async: "async", // >= 2018 Edition only Await: "await", // >= 2018 Edition only Dyn: "dyn", // >= 2018 Edition only // Edition-specific keywords that are used in unstable Rust or reserved for future use. Try: "try", // >= 2018 Edition only // Special lifetime names UnderscoreLifetime: "'_", StaticLifetime: "'static", // Weak keywords, have special meaning only in specific contexts. Auto: "auto", Catch: "catch", Default: "default", MacroRules: "macro_rules", Raw: "raw", Union: "union", } // Pre-interned symbols that can be referred to with `rustc_span::sym::*`. // // The symbol is the stringified identifier unless otherwise specified, in // which case the name should mention the non-identifier punctuation. // E.g. `sym::proc_dash_macro` represents "proc-macro", and it shouldn't be // called `sym::proc_macro` because then it's easy to mistakenly think it // represents "proc_macro". // // As well as the symbols listed, there are symbols for the strings // "0", "1", ..., "9", which are accessible via `sym::integer`. // // The proc macro will abort if symbols are not in alphabetical order (as // defined by `impl Ord for str`) or if any symbols are duplicated. Vim // users can sort the list by selecting it and executing the command // `:'<,'>!LC_ALL=C sort`. // // There is currently no checking that all symbols are used; that would be // nice to have. Symbols { AcqRel, Acquire, Alignment, Any, Arc, Argument, ArgumentV1, Arguments, AsMut, AsRef, AtomicBool, AtomicI128, AtomicI16, AtomicI32, AtomicI64, AtomicI8, AtomicIsize, AtomicPtr, AtomicU128, AtomicU16, AtomicU32, AtomicU64, AtomicU8, AtomicUsize, BTreeEntry, BTreeMap, BTreeSet, BinaryHeap, Borrow, Break, C, CStr, CString, Center, Clone, Continue, Copy, Count, Cow, Debug, DebugStruct, DebugTuple, Decodable, Decoder, Default, Deref, DirBuilder, DoubleEndedIterator, Duration, Encodable, Encoder, Eq, Equal, Err, Error, File, FileType, FormatSpec, Formatter, From, FromIterator, Future, FxHashMap, FxHashSet, GlobalAlloc, Hash, HashMap, HashMapEntry, HashSet, Hasher, Implied, Input, IntoIterator, IoRead, IoWrite, Is, ItemContext, Iterator, Layout, Left, LinkedList, LintPass, None, Ok, Option, Ord, Ordering, OsStr, OsString, Output, Param, PartialEq, PartialOrd, Path, PathBuf, Pending, Pin, Poll, ProcMacro, ProcMacroHack, ProceduralMasqueradeDummyType, Range, RangeFrom, RangeFull, RangeInclusive, RangeTo, RangeToInclusive, Rc, Ready, Receiver, Relaxed, Release, Result, Return, Right, RustcDecodable, RustcEncodable, Send, SeqCst, Some, StructuralEq, StructuralPartialEq, Sync, Target, ToOwned, ToString, Try, Ty, TyCtxt, TyKind, Unknown, Vec, Yield, _DECLS, _Self, __D, __H, __S, __next, __try_var, _d, _e, _task_context, a32, aarch64_target_feature, abi, abi_amdgpu_kernel, abi_avr_interrupt, abi_c_cmse_nonsecure_call, abi_efiapi, abi_msp430_interrupt, abi_ptx, abi_sysv64, abi_thiscall, abi_unadjusted, abi_vectorcall, abi_x86_interrupt, abort, aborts, add, add_assign, add_with_overflow, address, advanced_slice_patterns, adx_target_feature, alias, align, align_offset, alignstack, all, alloc, alloc_error_handler, alloc_layout, alloc_zeroed, allocator, allocator_internals, allow, allow_fail, allow_internal_unsafe, allow_internal_unstable, allowed, always, and, and_then, any, arbitrary_enum_discriminant, arbitrary_self_types, arith_offset, arm, arm_target_feature, array, arrays, as_ptr, as_str, asm, assert, assert_inhabited, assert_macro, assert_receiver_is_total_eq, assert_uninit_valid, assert_zero_valid, associated_consts, associated_type_bounds, associated_type_defaults, associated_types, assume, assume_init, async_await, async_closure,
atomic_mod, atomics, att_syntax, attr, attr_literals, attributes, augmented_assignments, auto_traits, automatically_derived, avx512_target_feature, await_macro, bang, begin_panic, begin_panic_fmt, bench, bin, bind_by_move_pattern_guards, bindings_after_at, bitand, bitand_assign, bitor, bitor_assign, bitreverse, bitxor, bitxor_assign, black_box, block, bool, borrowck_graphviz_format, borrowck_graphviz_postflow, borrowck_graphviz_preflow, box_free, box_patterns, box_syntax, bpf_target_feature, braced_empty_structs, branch, breakpoint, bridge, bswap, c_str, c_unwind, c_variadic, call, call_mut, call_once, caller_location, capture_disjoint_fields, cdylib, ceilf32, ceilf64, cfg, cfg_accessible, cfg_attr, cfg_attr_multi, cfg_doctest, cfg_eval, cfg_panic, cfg_sanitize, cfg_target_abi, cfg_target_feature, cfg_target_has_atomic, cfg_target_thread_local, cfg_target_vendor, cfg_version, char, client, clippy, clobber_abi, clone, clone_closures, clone_from, closure, closure_to_fn_coercion, cmp, cmp_max, cmp_min, cmpxchg16b_target_feature, cmse_nonsecure_entry, coerce_unsized, cold, column, compare_and_swap, compare_exchange, compare_exchange_weak, compile_error, compiler_builtins, compiler_fence, concat, concat_idents, conservative_impl_trait, console, const_allocate, const_async_blocks, const_compare_raw_pointers, const_constructor, const_eval_limit, const_evaluatable_checked, const_extern_fn, const_fn, const_fn_floating_point_arithmetic, const_fn_fn_ptr_basics, const_fn_trait_bound, const_fn_transmute, const_fn_union, const_fn_unsize, const_for, const_format_args, const_generic_defaults, const_generics, const_generics_defaults, const_if_match, const_impl_trait, const_in_array_repeat_expressions, const_indexing, const_let, const_loop, const_mut_refs, const_panic, const_panic_fmt, const_precise_live_drops, const_ptr, const_raw_ptr_deref, const_raw_ptr_to_usize_cast, const_refs_to_cell, const_slice_ptr, const_trait_bound_opt_out, const_trait_impl, const_transmute, const_try, constant, constructor, contents, context, convert, copy, copy_closures, copy_nonoverlapping, copysignf32, copysignf64, core, core_intrinsics, core_panic, core_panic_2015_macro, core_panic_macro, cosf32, cosf64, crate_id, crate_in_paths, crate_local, crate_name, crate_type, crate_visibility_modifier, crt_dash_static: "crt-static", cstring_type, ctlz, ctlz_nonzero, ctpop, cttz, cttz_nonzero, custom_attribute, custom_derive, custom_inner_attributes, custom_test_frameworks, d, dead_code, dealloc, debug, debug_assert_macro, debug_assertions, debug_struct, debug_trait, debug_trait_builder, debug_tuple, decl_macro, declare_lint_pass, decode, default_alloc_error_handler, default_lib_allocator, default_method_body_is_const, default_type_parameter_fallback, default_type_params, delay_span_bug_from_inside_query, deny, deprecated, deref, deref_method, deref_mut, deref_target, derive, derive_default_enum, destructuring_assignment, diagnostic, direct, discriminant_kind, discriminant_type, discriminant_value, dispatch_from_dyn, display_trait, div, div_assign, doc, doc_alias, doc_cfg, doc_keyword, doc_masked, doc_notable_trait, doc_primitive, doc_spotlight, doctest, document_private_items, dotdot_in_tuple_patterns, dotdoteq_in_patterns, dreg, dreg_low16, dreg_low8, drop, drop_in_place, drop_types_in_const, dropck_eyepatch, dropck_parametricity, dylib, dyn_metadata, dyn_trait, edition_macro_pats, eh_catch_typeinfo, eh_personality, emit_enum, emit_enum_variant, emit_enum_variant_arg, emit_struct, emit_struct_field, enable, enclosing_scope, encode, env, eq, ermsb_target_feature, exact_div, except, exchange_malloc, exclusive_range_pattern, exhaustive_integer_patterns, exhaustive_patterns, existential_type, exp2f32, exp2f64, expect, expected, expf32, expf64, explicit_generic_args_with_impl_trait, export_name, expr, extended_key_value_attributes, extern_absolute_paths, extern_crate_item_prelude, extern_crate_self, extern_in_paths, extern_prelude, extern_types, external_doc, f, f16c_target_feature, f32, f32_runtime, f64, f64_runtime, fabsf32, fabsf64, fadd_fast, fdiv_fast, feature, fence, fetch_update, ffi, ffi_const, ffi_pure, ffi_returns_twice, field, field_init_shorthand, file, fill, finish, flags, float_to_int_unchecked, floorf32, floorf64, fmaf32, fmaf64, fmt, fmt_as_str, fmt_internals, fmul_fast, fn_align, fn_must_use, fn_mut, fn_once, fn_once_output, forbid, forget, format, format_args, format_args_capture, format_args_nl, format_macro, freeze, freg, frem_fast, from, from_desugaring, from_generator, from_iter, from_method, from_output, from_residual, from_size_align_unchecked, from_trait, from_usize, fsub_fast, fundamental, future, future_trait, ge, gen_future, gen_kill, generator, generator_state, generators, generic_arg_infer, generic_associated_types, generic_param_attrs, get_context, global_allocator, global_asm, globs, gt, half_open_range_patterns, hash, hashmap_type, hashset_type, hexagon_target_feature, hidden, homogeneous_aggregate, html_favicon_url, html_logo_url, html_no_source, html_playground_url, html_root_url, hwaddress, i, i128, i128_type, i16, i32, i64, i8, ident, if_let, if_let_guard, if_while_or_patterns, ignore, impl_header_lifetime_elision, impl_lint_pass, impl_macros, impl_trait_in_bindings, import_shadowing, imported_main, in_band_lifetimes, include, include_bytes, include_str, inclusive_range_syntax, index, index_mut, infer_outlives_requirements, infer_static_outlives_requirements, inherent_associated_types, inlateout, inline, inline_const, inout, instruction_set, intel, into_iter, into_trait, intra_doc_pointers, intrinsics, irrefutable_let_patterns, isa_attribute, isize, issue, issue_5723_bootstrap, issue_tracker_base_url, item, item_like_imports, iter, iter_repeat, keyword, kind, kreg, label, label_break_value, lang, lang_items, large_assignments, lateout, lazy_normalization_consts, le, len, let_chains, lhs, lib, libc, lifetime, likely, line, link, link_args, link_cfg, link_llvm_intrinsics, link_name, link_ordinal, link_section, linkage, lint_reasons, literal, llvm_asm, load, local, local_inner_macros, log10f32, log10f64, log2f32, log2f64, log_syntax, logf32, logf64, loop_break_value, lt, macro_at_most_once_rep, macro_attributes_in_derive_output, macro_escape, macro_export, macro_lifetime_matcher, macro_literal_matcher, macro_reexport, macro_use, macro_vis_matcher, macros_in_extern, main, managed_boxes, manually_drop, map, marker, marker_trait_attr, masked, match_beginning_vert, match_default_bindings, maxnumf32, maxnumf64, may_dangle, maybe_uninit, maybe_uninit_uninit, maybe_uninit_zeroed, mem_discriminant, mem_drop, mem_forget, mem_replace, mem_size_of, mem_size_of_val, mem_uninitialized, mem_zeroed, member_constraints, memory, message, meta, metadata_type, min_align_of, min_align_of_val, min_const_fn, min_const_generics, min_const_unsafe_fn, min_specialization, min_type_alias_impl_trait, minnumf32, minnumf64, mips_target_feature, misc, mmx_reg, modifiers, module, module_path, more_qualified_paths, more_struct_aliases, movbe_target_feature, move_ref_pattern, move_size_limit, mul, mul_assign, mul_with_overflow, must_use, mut_ptr, mut_slice_ptr, naked, naked_functions, name, native_link_modifiers, native_link_modifiers_as_needed, native_link_modifiers_bundle, native_link_modifiers_verbatim, native_link_modifiers_whole_archive, ne, nearbyintf32, nearbyintf64, needs_allocator, needs_drop, needs_panic_runtime, neg, negate_unsigned, negative_impls, never, never_type, never_type_fallback, new, new_unchecked, next, nll, no, no_builtins, no_core, no_coverage, no_crate_inject, no_debug, no_default_passes, no_implicit_prelude, no_inline, no_link, no_main, no_mangle, no_niche, no_sanitize, no_stack_check, no_start, no_std, nomem, non_ascii_idents, non_exhaustive, non_modrs_mods, none_error, nontemporal_store, noop_method_borrow, noop_method_clone, noop_method_deref, noreturn, nostack, not, notable_trait, note, object_safe_for_dispatch, of, offset, omit_gdb_pretty_printer_section, on, on_unimplemented, oom, opaque, ops, opt_out_copy, optimize, optimize_attribute, optin_builtin_traits, option, option_env, option_type, options, or, or_patterns, other, out, overlapping_marker_traits, owned_box, packed, panic, panic_2015, panic_2021, panic_abort, panic_bounds_check, panic_fmt, panic_handler, panic_impl, panic_implementation, panic_info, panic_location, panic_runtime, panic_str, panic_unwind, panicking, param_attrs, parent_trait, partial_cmp, partial_ord, passes, pat, pat_param, path, pattern_parentheses, phantom_data, pin, pinned, platform_intrinsics, plugin, plugin_registrar, plugins, pointee_trait, pointer, pointer_trait, pointer_trait_fmt, poll, position, post_dash_lto: "post-lto", powerpc_target_feature, powf32, powf64, powif32, powif64, pre_dash_lto: "pre-lto", precise_pointer_size_matching, precision, pref_align_of, prefetch_read_data, prefetch_read_instruction, prefetch_write_data, prefetch_write_instruction, preg, prelude, prelude_import, preserves_flags, primitive, proc_dash_macro: "proc-macro", proc_macro, proc_macro_attribute, proc_macro_def_site, proc_macro_derive, proc_macro_expr, proc_macro_gen, proc_macro_hygiene, proc_macro_internals, proc_macro_mod, proc_macro_non_items, proc_macro_path_invoc, profiler_builtins, profiler_runtime, ptr_guaranteed_eq, ptr_guaranteed_ne, ptr_null, ptr_null_mut, ptr_offset_from, pub_macro_rules, pub_restricted, pure, pushpop_unsafe, qreg, qreg_low4, qreg_low8, quad_precision_float, question_mark, quote, range_inclusive_new, raw_dylib, raw_eq, raw_identifiers, raw_ref_op, re_rebalance_coherence, read_enum, read_enum_variant, read_enum_variant_arg, read_struct, read_struct_field, readonly, realloc, reason, receiver, recursion_limit, reexport_test_harness_main, ref_unwind_safe_trait, reference, reflect, reg, reg16, reg32, reg64, reg_abcd, reg_byte, reg_nonzero, reg_thumb, register_attr, register_tool, relaxed_adts, relaxed_struct_unsize, rem, rem_assign, repr, repr128, repr_align, repr_align_enum, repr_no_niche, repr_packed, repr_simd, repr_transparent, residual, result, result_type, rhs, rintf32, rintf64, riscv_target_feature, rlib, rotate_left, rotate_right, roundf32, roundf64, rt, rtm_target_feature, rust, rust_2015, rust_2015_preview, rust_2018, rust_2018_preview, rust_2021, rust_2021_preview, rust_begin_unwind, rust_eh_catch_typeinfo, rust_eh_personality, rust_eh_register_frames, rust_eh_unregister_frames, rust_oom, rustc, rustc_allocator, rustc_allocator_nounwind, rustc_allow_const_fn_unstable, rustc_attrs, rustc_builtin_macro, rustc_capture_analysis, rustc_clean, rustc_const_stable, rustc_const_unstable, rustc_conversion_suggestion, rustc_def_path, rustc_deprecated, rustc_diagnostic_item, rustc_diagnostic_macros, rustc_dirty, rustc_dummy, rustc_dump_env_program_clauses, rustc_dump_program_clauses, rustc_dump_user_substs, rustc_dump_vtable, rustc_error, rustc_evaluate_where_clauses, rustc_expected_cgu_reuse, rustc_if_this_changed, rustc_inherit_overflow_checks, rustc_insignificant_dtor, rustc_layout, rustc_layout_scalar_valid_range_end, rustc_layout_scalar_valid_range_start, rustc_legacy_const_generics, rustc_macro_transparency, rustc_main, rustc_mir, rustc_nonnull_optimization_guaranteed, rustc_object_lifetime_default, rustc_on_unimplemented, rustc_outlives, rustc_paren_sugar, rustc_partition_codegened, rustc_partition_reused, rustc_peek, rustc_peek_definite_init, rustc_peek_indirectly_mutable, rustc_peek_liveness, rustc_peek_maybe_init, rustc_peek_maybe_uninit, rustc_polymorphize_error, rustc_private, rustc_proc_macro_decls, rustc_promotable, rustc_regions, rustc_reservation_impl, rustc_serialize, rustc_skip_array_during_method_dispatch, rustc_specialization_trait, rustc_stable, rustc_std_internal_symbol, rustc_symbol_name, rustc_synthetic, rustc_test_marker, rustc_then_this_would_need, rustc_unsafe_specialization_marker, rustc_variance, rustdoc, rustfmt, rvalue_static_promotion, s, sanitize, sanitizer_runtime, saturating_add, saturating_sub, self_in_typedefs, self_struct_ctor, semitransparent, send_trait, shl, shl_assign, should_panic, shr, shr_assign, simd, simd_add, simd_and, simd_bitmask, simd_cast, simd_ceil, simd_div, simd_eq, simd_extract, simd_fabs, simd_fcos, simd_fexp, simd_fexp2, simd_ffi, simd_flog, simd_flog10, simd_flog2, simd_floor, simd_fma, simd_fmax, simd_fmin, simd_fpow, simd_fpowi, simd_fsin, simd_fsqrt, simd_gather, simd_ge, simd_gt, simd_insert, simd_le, simd_lt, simd_mul, simd_ne, simd_neg, simd_or, simd_reduce_add_ordered, simd_reduce_add_unordered, simd_reduce_all, simd_reduce_and, simd_reduce_any, simd_reduce_max, simd_reduce_max_nanless, simd_reduce_min, simd_reduce_min_nanless, simd_reduce_mul_ordered, simd_reduce_mul_unordered, simd_reduce_or, simd_reduce_xor, simd_rem, simd_round, simd_saturating_add, simd_saturating_sub, simd_scatter, simd_select, simd_select_bitmask, simd_shl, simd_shr, simd_sub, simd_trunc, simd_xor, since, sinf32, sinf64, size, size_of, size_of_val, sized, skip, slice, slice_alloc, slice_len_fn, slice_patterns, slice_u8, slice_u8_alloc, slicing_syntax, soft, specialization, speed, spotlight, sqrtf32, sqrtf64, sreg, sreg_low16, sse4a_target_feature, stable, staged_api, start, state, static_in_const, static_nobundle, static_recursion, staticlib, std, std_inject, std_panic, std_panic_2015_macro, std_panic_macro, stmt, stmt_expr_attributes, stop_after_dataflow, store, str, str_alloc, string_type, stringify, struct_field_attributes, struct_inherit, struct_variant, structural_match, structural_peq, structural_teq, sty, sub, sub_assign, sub_with_overflow, suggestion, sym, sync, sync_trait, t32, target_abi, target_arch, target_endian, target_env, target_family, target_feature, target_feature_11, target_has_atomic, target_has_atomic_equal_alignment, target_has_atomic_load_store, target_os, target_pointer_width, target_target_vendor, target_thread_local, target_vendor, task, tbm_target_feature, termination, termination_trait, termination_trait_test, test, test_2018_feature, test_accepted_feature, test_case, test_removed_feature, test_runner, then_with, thread, thread_local, tool_attributes, tool_lints, trace_macros, track_caller, trait_alias, trait_upcasting, transmute, transparent, transparent_enums, transparent_unions, trivial_bounds, truncf32, truncf64, try_blocks, try_from, try_from_trait, try_into, try_into_trait, try_trait_v2, tt, tuple, tuple_from_req, tuple_indexing, two_phase, ty, type_alias_enum_variants, type_alias_impl_trait, type_ascription, type_id, type_length_limit, type_macros, type_name, u128, u16, u32, u64, u8, unaligned_volatile_load, unaligned_volatile_store, unboxed_closures, unchecked_add, unchecked_div, unchecked_mul, unchecked_rem, unchecked_shl, unchecked_shr, unchecked_sub, underscore_const_names, underscore_imports, underscore_lifetimes, uniform_paths, unit, universal_impl_trait, unix, unlikely, unmarked_api, unnamed_fields, unpin, unreachable, unreachable_code, unrestricted_attribute_tokens, unsafe_block_in_unsafe_fn, unsafe_cell, unsafe_no_drop_flag, unsize, unsized_fn_params, unsized_locals, unsized_tuple_coercion, unstable, untagged_unions, unused_qualifications, unwind, unwind_attributes, unwind_safe_trait, unwrap, unwrap_or, use_extern_macros, use_nested_groups, used, usize, v1, va_arg, va_copy, va_end, va_list, va_start, val, var, variant_count, vec, vec_type, vecdeque_type, version, vis, visible_private_types, volatile, volatile_copy_memory, volatile_copy_nonoverlapping_memory, volatile_load, volatile_set_memory, volatile_store, vreg, vreg_low16, warn, wasm_abi, wasm_import_module, wasm_target_feature, while_let, width, windows, windows_subsystem, wrapping_add, wrapping_mul, wrapping_sub, wreg, write_bytes, x87_reg, xmm_reg, ymm_reg, zmm_reg, } } #[derive(Copy, Clone, Eq, HashStable_Generic, Encodable, Decodable)] pub struct Ident { pub name: Symbol, pub span: Span, } impl Ident { #[inline] /// Constructs a new identifier from a symbol and a span. pub const fn new(name: Symbol, span: Span) -> Ident { Ident { name, span } } /// Constructs a new identifier with a dummy span. #[inline] pub const fn with_dummy_span(name: Symbol) -> Ident { Ident::new(name, DUMMY_SP) } #[inline] pub fn invalid() -> Ident { Ident::with_dummy_span(kw::Empty) } /// Maps a string to an identifier with a dummy span. pub fn from_str(string: &str) -> Ident { Ident::with_dummy_span(Symbol::intern(string)) } /// Maps a string and a span to an identifier. pub fn from_str_and_span(string: &str, span: Span) -> Ident { Ident::new(Symbol::intern(string), span) } /// Replaces `lo` and `hi` with those from `span`, but keep hygiene context. pub fn with_span_pos(self, span: Span) -> Ident { Ident::new(self.name, span.with_ctxt(self.span.ctxt())) } pub fn without_first_quote(self) -> Ident { Ident::new(Symbol::intern(self.as_str().trim_start_matches('\'')), self.span) } /// "Normalize" ident for use in comparisons using "item hygiene". /// Identifiers with same string value become same if they came from the same macro 2.0 macro /// (e.g., `macro` item, but not `macro_rules` item) and stay different if they came from /// different macro 2.0 macros. /// Technically, this operation strips all non-opaque marks from ident's syntactic context. pub fn normalize_to_macros_2_0(self) -> Ident { Ident::new(self.name, self.span.normalize_to_macros_2_0()) } /// "Normalize" ident for use in comparisons using "local variable hygiene". /// Identifiers with same string value become same if they came from the same non-transparent /// macro (e.g., `macro` or `macro_rules!` items) and stay different if they came from different /// non-transparent macros. /// Technically, this operation strips all transparent marks from ident's syntactic context. pub fn normalize_to_macro_rules(self) -> Ident { Ident::new(self.name, self.span.normalize_to_macro_rules()) } /// Convert the name to a `SymbolStr`. This is a slowish operation because /// it requires locking the symbol interner. pub fn as_str(self) -> SymbolStr { self.name.as_str() } } impl PartialEq for Ident { fn eq(&self, rhs: &Self) -> bool { self.name == rhs.name && self.span.ctxt() == rhs.span.ctxt() } } impl Hash for Ident { fn hash<H: Hasher>(&self, state: &mut H) { self.name.hash(state); self.span.ctxt().hash(state); } } impl fmt::Debug for Ident { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(self, f)?; fmt::Debug::fmt(&self.span.ctxt(), f) } } /// This implementation is supposed to be used in error messages, so it's expected to be identical /// to printing the original identifier token written in source code (`token_to_string`), /// except that AST identifiers don't keep the rawness flag, so we have to guess it. impl fmt::Display for Ident { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(&IdentPrinter::new(self.name, self.is_raw_guess(), None), f) } } /// This is the most general way to print identifiers. /// AST pretty-printer is used as a fallback for turning AST structures into token streams for /// proc macros. Additionally, proc macros may stringify their input and expect it survive the /// stringification (especially true for proc macro derives written between Rust 1.15 and 1.30). /// So we need to somehow pretty-print `$crate` in a way preserving at least some of its /// hygiene data, most importantly name of the crate it refers to. /// As a result we print `$crate` as `crate` if it refers to the local crate /// and as `::other_crate_name` if it refers to some other crate. /// Note, that this is only done if the ident token is printed from inside of AST pretty-pringing, /// but not otherwise. Pretty-printing is the only way for proc macros to discover token contents, /// so we should not perform this lossy conversion if the top level call to the pretty-printer was /// done for a token stream or a single token. pub struct IdentPrinter { symbol: Symbol, is_raw: bool, /// Span used for retrieving the crate name to which `$crate` refers to, /// if this field is `None` then the `$crate` conversion doesn't happen. convert_dollar_crate: Option<Span>, } impl IdentPrinter { /// The most general `IdentPrinter` constructor. Do not use this. pub fn new(symbol: Symbol, is_raw: bool, convert_dollar_crate: Option<Span>) -> IdentPrinter { IdentPrinter { symbol, is_raw, convert_dollar_crate } } /// This implementation is supposed to be used when printing identifiers /// as a part of pretty-printing for larger AST pieces. /// Do not use this either. pub fn for_ast_ident(ident: Ident, is_raw: bool) -> IdentPrinter { IdentPrinter::new(ident.name, is_raw, Some(ident.span)) } } impl fmt::Display for IdentPrinter { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { if self.is_raw { f.write_str("r#")?; } else if self.symbol == kw::DollarCrate { if let Some(span) = self.convert_dollar_crate { let converted = span.ctxt().dollar_crate_name(); if !converted.is_path_segment_keyword() { f.write_str("::")?; } return fmt::Display::fmt(&converted, f); } } fmt::Display::fmt(&self.symbol, f) } } /// An newtype around `Ident` that calls [Ident::normalize_to_macro_rules] on /// construction. // FIXME(matthewj, petrochenkov) Use this more often, add a similar // `ModernIdent` struct and use that as well. #[derive(Copy, Clone, Eq, PartialEq, Hash)] pub struct MacroRulesNormalizedIdent(Ident); impl MacroRulesNormalizedIdent { pub fn new(ident: Ident) -> Self { Self(ident.normalize_to_macro_rules()) } } impl fmt::Debug for MacroRulesNormalizedIdent { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(&self.0, f) } } impl fmt::Display for MacroRulesNormalizedIdent { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(&self.0, f) } } /// An interned string. /// /// Internally, a `Symbol` is implemented as an index, and all operations /// (including hashing, equality, and ordering) operate on that index. The use /// of `rustc_index::newtype_index!` means that `Option<Symbol>` only takes up 4 bytes, /// because `rustc_index::newtype_index!` reserves the last 256 values for tagging purposes. /// /// Note that `Symbol` cannot directly be a `rustc_index::newtype_index!` because it /// implements `fmt::Debug`, `Encodable`, and `Decodable` in special ways. #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct Symbol(SymbolIndex); rustc_index::newtype_index! { pub struct SymbolIndex { .. } } impl Symbol { const fn new(n: u32) -> Self { Symbol(SymbolIndex::from_u32(n)) } /// Maps a string to its interned representation. pub fn intern(string: &str) -> Self { with_interner(|interner| interner.intern(string)) } /// Convert to a `SymbolStr`. This is a slowish operation because it /// requires locking the symbol interner. pub fn as_str(self) -> SymbolStr { with_interner(|interner| unsafe { SymbolStr { string: std::mem::transmute::<&str, &str>(interner.get(self)) } }) } pub fn as_u32(self) -> u32 { self.0.as_u32() } pub fn len(self) -> usize { with_interner(|interner| interner.get(self).len()) } pub fn is_empty(self) -> bool { self == kw::Empty } /// This method is supposed to be used in error messages, so it's expected to be /// identical to printing the original identifier token written in source code /// (`token_to_string`, `Ident::to_string`), except that symbols don't keep the rawness flag /// or edition, so we have to guess the rawness using the global edition. pub fn to_ident_string(self) -> String { Ident::with_dummy_span(self).to_string() } } impl fmt::Debug for Symbol { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(&self.as_str(), f) } } impl fmt::Display for Symbol { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(&self.as_str(), f) } } impl<S: Encoder> Encodable<S> for Symbol { fn encode(&self, s: &mut S) -> Result<(), S::Error> { s.emit_str(&self.as_str()) } } impl<D: Decoder> Decodable<D> for Symbol { #[inline] fn decode(d: &mut D) -> Result<Symbol, D::Error> { Ok(Symbol::intern(&d.read_str()?)) } } impl<CTX> HashStable<CTX> for Symbol { #[inline] fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) { self.as_str().hash_stable(hcx, hasher); } } impl<CTX> ToStableHashKey<CTX> for Symbol { type KeyType = SymbolStr; #[inline] fn to_stable_hash_key(&self, _: &CTX) -> SymbolStr { self.as_str() } } // The `&'static str`s in this type actually point into the arena. // // The `FxHashMap`+`Vec` pair could be replaced by `FxIndexSet`, but #75278 // found that to regress performance up to 2% in some cases. This might be // revisited after further improvements to `indexmap`. #[derive(Default)] pub struct Interner { arena: DroplessArena, names: FxHashMap<&'static str, Symbol>, strings: Vec<&'static str>, } impl Interner { fn prefill(init: &[&'static str]) -> Self { Interner { strings: init.into(), names: init.iter().copied().zip((0..).map(Symbol::new)).collect(), ..Default::default() } } #[inline] pub fn intern(&mut self, string: &str) -> Symbol { if let Some(&name) = self.names.get(string) { return name; } let name = Symbol::new(self.strings.len() as u32); // `from_utf8_unchecked` is safe since we just allocated a `&str` which is known to be // UTF-8. let string: &str = unsafe { str::from_utf8_unchecked(self.arena.alloc_slice(string.as_bytes())) }; // It is safe to extend the arena allocation to `'static` because we only access // these while the arena is still alive. let string: &'static str = unsafe { &*(string as *const str) }; self.strings.push(string); self.names.insert(string, name); name } // Get the symbol as a string. `Symbol::as_str()` should be used in // preference to this function. pub fn get(&self, symbol: Symbol) -> &str { self.strings[symbol.0.as_usize()] } } // This module has a very short name because it's used a lot. /// This module contains all the defined keyword `Symbol`s. /// /// Given that `kw` is imported, use them like `kw::keyword_name`. /// For example `kw::Loop` or `kw::Break`. pub mod kw { pub use super::kw_generated::*; } // This module has a very short name because it's used a lot. /// This module contains all the defined non-keyword `Symbol`s. /// /// Given that `sym` is imported, use them like `sym::symbol_name`. /// For example `sym::rustfmt` or `sym::u8`. pub mod sym { use super::Symbol; use std::convert::TryInto; #[doc(inline)] pub use super::sym_generated::*; // Used from a macro in `librustc_feature/accepted.rs` pub use super::kw::MacroRules as macro_rules; /// Get the symbol for an integer. /// /// The first few non-negative integers each have a static symbol and therefore /// are fast. pub fn integer<N: TryInto<usize> + Copy + ToString>(n: N) -> Symbol { if let Result::Ok(idx) = n.try_into() { if idx < 10 { return Symbol::new(super::SYMBOL_DIGITS_BASE + idx as u32); } } Symbol::intern(&n.to_string()) } } impl Symbol { fn is_special(self) -> bool { self <= kw::Underscore } fn is_used_keyword_always(self) -> bool { self >= kw::As && self <= kw::While } fn is_used_keyword_conditional(self, edition: impl FnOnce() -> Edition) -> bool { (self >= kw::Async && self <= kw::Dyn) && edition() >= Edition::Edition2018 } fn is_unused_keyword_always(self) -> bool { self >= kw::Abstract && self <= kw::Yield } fn is_unused_keyword_conditional(self, edition: impl FnOnce() -> Edition) -> bool { self == kw::Try && edition() >= Edition::Edition2018 } pub fn is_reserved(self, edition: impl Copy + FnOnce() -> Edition) -> bool { self.is_special() || self.is_used_keyword_always() || self.is_unused_keyword_always() || self.is_used_keyword_conditional(edition) || self.is_unused_keyword_conditional(edition) } /// A keyword or reserved identifier that can be used as a path segment. pub fn is_path_segment_keyword(self) -> bool { self == kw::Super || self == kw::SelfLower || self == kw::SelfUpper || self == kw::Crate || self == kw::PathRoot || self == kw::DollarCrate } /// Returns `true` if the symbol is `true` or `false`. pub fn is_bool_lit(self) -> bool { self == kw::True || self == kw::False } /// Returns `true` if this symbol can be a raw identifier. pub fn can_be_raw(self) -> bool { self != kw::Empty && self != kw::Underscore && !self.is_path_segment_keyword() } } impl Ident { // Returns `true` for reserved identifiers used internally for elided lifetimes, // unnamed method parameters, crate root module, error recovery etc. pub fn is_special(self) -> bool { self.name.is_special() } /// Returns `true` if the token is a keyword used in the language. pub fn is_used_keyword(self) -> bool { // Note: `span.edition()` is relatively expensive, don't call it unless necessary. self.name.is_used_keyword_always() || self.name.is_used_keyword_conditional(|| self.span.edition()) } /// Returns `true` if the token is a keyword reserved for possible future use. pub fn is_unused_keyword(self) -> bool { // Note: `span.edition()` is relatively expensive, don't call it unless necessary. self.name.is_unused_keyword_always() || self.name.is_unused_keyword_conditional(|| self.span.edition()) } /// Returns `true` if the token is either a special identifier or a keyword. pub fn is_reserved(self) -> bool { // Note: `span.edition()` is relatively expensive, don't call it unless necessary. self.name.is_reserved(|| self.span.edition()) } /// A keyword or reserved identifier that can be used as a path segment. pub fn is_path_segment_keyword(self) -> bool { self.name.is_path_segment_keyword() } /// We see this identifier in a normal identifier position, like variable name or a type. /// How was it written originally? Did it use the raw form? Let's try to guess. pub fn is_raw_guess(self) -> bool { self.name.can_be_raw() && self.is_reserved() } } #[inline] fn with_interner<T, F: FnOnce(&mut Interner) -> T>(f: F) -> T { with_session_globals(|session_globals| f(&mut *session_globals.symbol_interner.lock())) } /// An alternative to [`Symbol`], useful when the chars within the symbol need to /// be accessed. It deliberately has limited functionality and should only be /// used for temporary values. /// /// Because the interner outlives any thread which uses this type, we can /// safely treat `string` which points to interner data, as an immortal string, /// as long as this type never crosses between threads. // // FIXME: ensure that the interner outlives any thread which uses `SymbolStr`, // by creating a new thread right after constructing the interner. #[derive(Clone, Eq, PartialOrd, Ord)] pub struct SymbolStr { string: &'static str, } // This impl allows a `SymbolStr` to be directly equated with a `String` or // `&str`. impl<T: std::ops::Deref<Target = str>> std::cmp::PartialEq<T> for SymbolStr { fn eq(&self, other: &T) -> bool { self.string == other.deref() } } impl !Send for SymbolStr {} impl !Sync for SymbolStr {} /// This impl means that if `ss` is a `SymbolStr`: /// - `*ss` is a `str`; /// - `&*ss` is a `&str` (and `match &*ss { ... }` is a common pattern). /// - `&ss as &str` is a `&str`, which means that `&ss` can be passed to a /// function expecting a `&str`. impl std::ops::Deref for SymbolStr { type Target = str; #[inline] fn deref(&self) -> &str { self.string } } impl fmt::Debug for SymbolStr { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(self.string, f) } } impl fmt::Display for SymbolStr { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(self.string, f) } } impl<CTX> HashStable<CTX> for SymbolStr { #[inline] fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) { self.string.hash_stable(hcx, hasher) } } impl<CTX> ToStableHashKey<CTX> for SymbolStr { type KeyType = SymbolStr; #[inline] fn to_stable_hash_key(&self, _: &CTX) -> SymbolStr { self.clone() } }
atomic,
lattice_boltzmann_method_python_jax.py
r""" Solves the incompressible Navier Stokes equations using the Lattice-Boltzmann Method¹. The scenario is the flow around a cylinder in 2D which yields a van Karman vortex street. periodic +-------------------------------------------------------------+ | | | ---> | | | | ---> **** | | ******** | inflow | ---> ********** | outflow | ******** | | ---> **** | | | | ---> | | | +-------------------------------------------------------------+ periodic -> uniform inflow profile with only horizontal velocities at left boundary -> outflow boundary at the right -> top and bottom boundary connected by periodicity -> the circle in the center (representing a slice from the 3d cylinder) uses a no-slip Boundary Condition -> initially, fluid is NOT at rest and has the horizontal velocity profile all over the domain ¹ To be fully correct, LBM considers the compressible Navier-Stokes Equations. This can also be seen by the fact that we have a changing macroscopic density over the domain and that we actively use it throughout the computations. However, our flow speeds are below the 0.3 Mach limit which results in only minor density fluctuations. Hence, the fluid behaves almost incompressible. ------ Solution strategy: Discretize the domain into a Cartesian mesh. Each grid vertex is associated with 9 discrete velocities (D2Q9) and 2 macroscopic velocities. Then iterate over time. 1. Apply outflow boundary condition on the right boundary 2. Compute Macroscopic Quantities (density and velocities) 3. Apply Inflow Profile by Zou/He Dirichlet Boundary Condition on the left boundary 4. Compute the discrete equilibria velocities 5. Perform a Collision step according to BGK (Bhatnagar–Gross–Krook) 6. Apply Bounce-Back Boundary Conditions on the cylinder obstacle 7. Stream alongside the lattice velocities 8. Advance in time (repeat the loop) The 7th step implicitly yields the periodic Boundary Conditions at the top and bottom boundary. ------ Employed Discretization: D2Q9 grid, i.e. 2-dim space with 9 discrete velocities per node. In Other words the 2d space is discretized into N_x by N_y by 9 points. 6 2 5 \ | / 3 - 0 - 1 / | \ 7 4 8 Therefore we have the shapes: - macroscopic velocity : (N_x, N_y, 2) - discrete velocity : (N_x, N_y, 9) - density : (N_x, N_y) ------ Lattice Boltzmann Computations Density: ρ = ∑ᵢ fᵢ
u = 1/ρ ∑ᵢ fᵢ cᵢ Equilibrium: fᵢᵉ = ρ Wᵢ (1 + 3 cᵢ ⋅ u + 9/2 (cᵢ ⋅ u)² − 3/2 ||u||₂²) BGK Collision: fᵢ ← fᵢ − ω (fᵢ − fᵢᵉ) with the following quantities: fᵢ : Discrete velocities fᵢᵉ : Equilibrium discrete velocities ρ : Density ∑ᵢ : Summation over all discrete velocities cᵢ : Lattice Velocities Wᵢ : Lattice Weights ω : Relaxation factor ------ The flow configuration is defined using the Reynolds Number Re = (U R) / ν with: Re : Reynolds Number U : Inflow Velocity R : Cylinder Radius ν : Kinematic Viscosity Can be re-arranged in terms of the kinematic viscosity ν = (U R) / Re Then the relaxation factor is computed according to ω = 1 / (3 ν + 0.5) ------ Note that this scheme can become unstable for Reynoldsnumbers >~ 350 ² ² Note that the stability of the D2Q9 scheme is mathematically not linked to the Reynoldsnumber. Just use this as a reference. Stability for this scheme is realted to the velocity magnitude. Consequentially, the actual limiting factor is the Mach number (the ratio between velocity magnitude and the speed of sound). """ import jax import jax.numpy as jnp import matplotlib.pyplot as plt import cmasher as cmr from tqdm import tqdm N_ITERATIONS = 15_000 REYNOLDS_NUMBER = 80 N_POINTS_X = 300 N_POINTS_Y = 50 CYLINDER_CENTER_INDEX_X = N_POINTS_X // 5 CYLINDER_CENTER_INDEX_Y = N_POINTS_Y // 2 CYLINDER_RADIUS_INDICES = N_POINTS_Y // 9 MAX_HORIZONTAL_INFLOW_VELOCITY = 0.04 VISUALIZE = True PLOT_EVERY_N_STEPS = 100 SKIP_FIRST_N_ITERATIONS = 5000 r""" LBM Grid: D2Q9 6 2 5 \ | / 3 - 0 - 1 / | \ 7 4 8 """ N_DISCRETE_VELOCITIES = 9 LATTICE_VELOCITIES = jnp.array([ [ 0, 1, 0, -1, 0, 1, -1, -1, 1,], [ 0, 0, 1, 0, -1, 1, 1, -1, -1,] ]) LATTICE_INDICES = jnp.array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, ]) OPPOSITE_LATTICE_INDICES = jnp.array([ 0, 3, 4, 1, 2, 7, 8, 5, 6, ]) LATTICE_WEIGHTS = jnp.array([ 4/9, # Center Velocity [0,] 1/9, 1/9, 1/9, 1/9, # Axis-Aligned Velocities [1, 2, 3, 4] 1/36, 1/36, 1/36, 1/36, # 45 ° Velocities [5, 6, 7, 8] ]) RIGHT_VELOCITIES = jnp.array([1, 5, 8]) UP_VELOCITIES = jnp.array([2, 5, 6]) LEFT_VELOCITIES = jnp.array([3, 6, 7]) DOWN_VELOCITIES = jnp.array([4, 7, 8]) PURE_VERTICAL_VELOCITIES = jnp.array([0, 2, 4]) PURE_HORIZONTAL_VELOCITIES = jnp.array([0, 1, 3]) def get_density(discrete_velocities): density = jnp.sum(discrete_velocities, axis=-1) return density def get_macroscopic_velocities(discrete_velocities, density): macroscopic_velocities = jnp.einsum( "NMQ,dQ->NMd", discrete_velocities, LATTICE_VELOCITIES, ) / density[..., jnp.newaxis] return macroscopic_velocities def get_equilibrium_discrete_velocities(macroscopic_velocities, density): projected_discrete_velocities = jnp.einsum( "dQ,NMd->NMQ", LATTICE_VELOCITIES, macroscopic_velocities, ) macroscopic_velocity_magnitude = jnp.linalg.norm( macroscopic_velocities, axis=-1, ord=2, ) equilibrium_discrete_velocities = ( density[..., jnp.newaxis] * LATTICE_WEIGHTS[jnp.newaxis, jnp.newaxis, :] * ( 1 + 3 * projected_discrete_velocities + 9/2 * projected_discrete_velocities**2 - 3/2 * macroscopic_velocity_magnitude[..., jnp.newaxis]**2 ) ) return equilibrium_discrete_velocities def main(): jax.config.update("jax_enable_x64", True) kinematic_viscosity = ( ( MAX_HORIZONTAL_INFLOW_VELOCITY * CYLINDER_RADIUS_INDICES ) / ( REYNOLDS_NUMBER ) ) relaxation_omega = ( ( 1.0 ) / ( 3.0 * kinematic_viscosity + 0.5 ) ) # Define a mesh x = jnp.arange(N_POINTS_X) y = jnp.arange(N_POINTS_Y) X, Y = jnp.meshgrid(x, y, indexing="ij") # Obstacle Mask: An array of the shape like X or Y, but contains True if the # point belongs to the obstacle and False if not obstacle_mask = ( jnp.sqrt( ( X - CYLINDER_CENTER_INDEX_X )**2 + ( Y - CYLINDER_CENTER_INDEX_Y )**2 ) < CYLINDER_RADIUS_INDICES ) velocity_profile = jnp.zeros((N_POINTS_X, N_POINTS_Y, 2)) velocity_profile = velocity_profile.at[:, :, 0].set(MAX_HORIZONTAL_INFLOW_VELOCITY) @jax.jit def update(discrete_velocities_prev): # (1) Prescribe the outflow BC on the right boundary discrete_velocities_prev = discrete_velocities_prev.at[-1, :, LEFT_VELOCITIES].set( discrete_velocities_prev[-2, :, LEFT_VELOCITIES] ) # (2) Macroscopic Velocities density_prev = get_density(discrete_velocities_prev) macroscopic_velocities_prev = get_macroscopic_velocities( discrete_velocities_prev, density_prev, ) # (3) Prescribe Inflow Dirichlet BC using Zou/He scheme macroscopic_velocities_prev =\ macroscopic_velocities_prev.at[0, 1:-1, :].set( velocity_profile[0, 1:-1, :] ) density_prev = density_prev.at[0, :].set( ( get_density(discrete_velocities_prev[0, :, PURE_VERTICAL_VELOCITIES].T) + 2 * get_density(discrete_velocities_prev[0, :, LEFT_VELOCITIES].T) ) / ( 1 - macroscopic_velocities_prev[0, :, 0] ) ) # (4) Compute discrete Equilibria velocities equilibrium_discrete_velocities = get_equilibrium_discrete_velocities( macroscopic_velocities_prev, density_prev, ) # (3) Belongs to the Zou/He scheme discrete_velocities_prev = \ discrete_velocities_prev.at[0, :, RIGHT_VELOCITIES].set( equilibrium_discrete_velocities[0, :, RIGHT_VELOCITIES] ) # (5) Collide according to BGK discrete_velocities_post_collision = ( discrete_velocities_prev - relaxation_omega * ( discrete_velocities_prev - equilibrium_discrete_velocities ) ) # (6) Bounce-Back Boundary Conditions to enfore the no-slip for i in range(N_DISCRETE_VELOCITIES): discrete_velocities_post_collision =\ discrete_velocities_post_collision.at[obstacle_mask, LATTICE_INDICES[i]].set( discrete_velocities_prev[obstacle_mask, OPPOSITE_LATTICE_INDICES[i]] ) # (7) Stream alongside lattice velocities discrete_velocities_streamed = discrete_velocities_post_collision for i in range(N_DISCRETE_VELOCITIES): discrete_velocities_streamed = discrete_velocities_streamed.at[:, :, i].set( jnp.roll( jnp.roll( discrete_velocities_post_collision[:, :, i], LATTICE_VELOCITIES[0, i], axis=0, ), LATTICE_VELOCITIES[1, i], axis=1, ) ) return discrete_velocities_streamed discrete_velocities_prev = get_equilibrium_discrete_velocities( velocity_profile, jnp.ones((N_POINTS_X, N_POINTS_Y)), ) plt.style.use("dark_background") plt.figure(figsize=(15, 6), dpi=100) for iteration_index in tqdm(range(N_ITERATIONS)): discrete_velocities_next = update(discrete_velocities_prev) discrete_velocities_prev = discrete_velocities_next if iteration_index % PLOT_EVERY_N_STEPS == 0 and VISUALIZE and iteration_index > SKIP_FIRST_N_ITERATIONS: density = get_density(discrete_velocities_next) macroscopic_velocities = get_macroscopic_velocities( discrete_velocities_next, density, ) velocity_magnitude = jnp.linalg.norm( macroscopic_velocities, axis=-1, ord=2, ) d_u__d_x, d_u__d_y = jnp.gradient(macroscopic_velocities[..., 0]) d_v__d_x, d_v__d_y = jnp.gradient(macroscopic_velocities[..., 1]) curl = (d_u__d_y - d_v__d_x) # Velocity Magnitude Contour Plot in the top plt.subplot(211) plt.contourf( X, Y, velocity_magnitude, levels=50, cmap=cmr.amber, ) plt.colorbar().set_label("Velocity Magnitude") plt.gca().add_patch(plt.Circle( (CYLINDER_CENTER_INDEX_X, CYLINDER_CENTER_INDEX_Y), CYLINDER_RADIUS_INDICES, color="darkgreen", )) # Vorticity Magnitude Contour PLot in the bottom plt.subplot(212) plt.contourf( X, Y, curl, levels=50, cmap=cmr.redshift, vmin=-0.02, vmax= 0.02, ) plt.colorbar().set_label("Vorticity Magnitude") plt.gca().add_patch(plt.Circle( (CYLINDER_CENTER_INDEX_X, CYLINDER_CENTER_INDEX_Y), CYLINDER_RADIUS_INDICES, color="darkgreen", )) plt.draw() plt.pause(0.0001) plt.clf() if VISUALIZE: plt.show() if __name__ == "__main__": main()
Velocities:
distancePlusAmplitude.py
import argparse import numpy as np import cv2 from TauLidarCommon.frame import FrameType from TauLidarCamera.camera import Camera def setup(serialPort=None): port = None camera = None
ports = Camera.scan() ## Scan for available Tau Camera devices if len(ports) > 0: port = ports[0] else: port = serialPort if port is not None: Camera.setRange(0, 4500) ## points in the distance range to be colored camera = Camera.open(port) ## Open the first available Tau Camera camera.setModulationChannel(0) ## autoChannelEnabled: 0, channel: 0 camera.setIntegrationTime3d(0, 1000) ## set integration time 0: 1000 camera.setMinimalAmplitude(0, 10) ## set minimal amplitude 0: 80 cameraInfo = camera.info() print("\nToF camera opened successfully:") print(" model: %s" % cameraInfo.model) print(" firmware: %s" % cameraInfo.firmware) print(" uid: %s" % cameraInfo.uid) print(" resolution: %s" % cameraInfo.resolution) print(" port: %s" % cameraInfo.port) print("\nPress Esc key over GUI or Ctrl-c in terminal to shutdown ...") cv2.namedWindow('Depth Map') cv2.namedWindow('Amplitude') cv2.moveWindow('Depth Map', 20, 20) cv2.moveWindow('Amplitude', 20, 360) return camera def run(camera): while True: frame = camera.readFrame(FrameType.DISTANCE_AMPLITUDE) if frame: mat_depth_rgb = np.frombuffer(frame.data_depth_rgb, dtype=np.uint16, count=-1, offset=0).reshape(frame.height, frame.width, 3) mat_depth_rgb = mat_depth_rgb.astype(np.uint8) mat_amplitude = np.frombuffer(frame.data_amplitude, dtype=np.float32, count=-1, offset=0).reshape(frame.height, frame.width) mat_amplitude = mat_amplitude.astype(np.uint8) # Upscalling the image upscale = 4 depth_img = cv2.resize(mat_depth_rgb, (frame.width*upscale, frame.height*upscale)) amplitude_img = cv2.resize(mat_amplitude, (frame.width*upscale, frame.height*upscale)) cv2.imshow('Depth Map', depth_img) cv2.imshow('Amplitude', amplitude_img) if cv2.waitKey(1) == 27: break def cleanup(camera): print('\nShutting down ...') cv2.destroyAllWindows() camera.close() if __name__ == "__main__": parser = argparse.ArgumentParser(description='Sample program to demonstrate acquiring frames with both distance / depth and amplitude data from the Tau LiDAR Camera') parser.add_argument('--port', metavar='<serial port>', default=None, help='Specify a serial port for the Tau Camera') args = parser.parse_args() camera = setup(args.port) if camera: try: run(camera) except Exception as e: print(e) cleanup(camera)
# if no serial port is specified, scan for available Tau Camera devices if serialPort is None:
model_handler.py
import os from typing import Any, Dict, List, Union import onnx import onnxoptimizer import mlrun from mlrun.artifacts import Artifact from mlrun.frameworks._common import ModelHandler class ONNXModelHandler(ModelHandler): """ Class for handling an ONNX model, enabling loading and saving it during runs. """ def __init__( self, model_name: str, model_path: str = None, model: onnx.ModelProto = None, context: mlrun.MLClientCtx = None, ): """ Initialize the handler. The model can be set here so it won't require loading. Notice that if the model path given is of a previously logged model (store model object path), all of the other configurations will be loaded automatically as they were logged with the model, hence they are optional. :param model_name: The model name for saving and logging the model. :param model_path: Path to the model's directory to load it from. The onnx file must start with the given model name and the directory must contain the onnx file. The model path can be also passed as a model object path in the following format: 'store://models/<PROJECT_NAME>/<MODEL_NAME>:<VERSION>'. :param model: Model to handle or None in case a loading parameters were supplied. :param context: MLRun context to work with for logging the model. :raise MLRunInvalidArgumentError: There was no model or model directory supplied. """ # Setup the base handler class: super(ONNXModelHandler, self).__init__( model_name=model_name, model_path=model_path, model=model, context=context, ) # TODO: output_path won't work well with logging artifacts. Need to look into changing the logic of 'log_artifact'. def
( self, output_path: str = None, *args, **kwargs ) -> Union[Dict[str, Artifact], None]: """ Save the handled model at the given output path. If a MLRun context is available, the saved model files will be logged and returned as artifacts. :param output_path: The full path to the directory to save the handled model at. If not given, the context stored will be used to save the model in the defaulted artifacts location. :return The saved model artifacts dictionary if context is available and None otherwise. """ super(ONNXModelHandler, self).save(output_path=output_path) # Setup the returning model artifacts list: artifacts = {} # type: Dict[str, Artifact] model_file = None # type: str # Set the output path: if output_path is None: output_path = os.path.join(self._context.artifact_path, self._model_name) # Save the model: model_file = "{}.onnx".format(self._model_name) onnx.save(self._model, model_file) # Update the paths and log artifacts if context is available: self._model_file = model_file if self._context is not None: artifacts[ self._get_model_file_artifact_name() ] = self._context.log_artifact( model_file, local_path=model_file, artifact_path=output_path, db_key=False, ) return artifacts if self._context is not None else None def load(self, *args, **kwargs): """ Load the specified model in this handler. """ super(ONNXModelHandler, self).load() # Check that the model is well formed: onnx.checker.check_model(self._model_file) # Load the ONNX model: self._model = onnx.load(self._model_file) def log( self, labels: Dict[str, Union[str, int, float]] = None, parameters: Dict[str, Union[str, int, float]] = None, extra_data: Dict[str, Any] = None, artifacts: Dict[str, Artifact] = None, ): """ Log the model held by this handler into the MLRun context provided. :param labels: Labels to log the model with. :param parameters: Parameters to log with the model. :param extra_data: Extra data to log with the model. :param artifacts: Artifacts to log the model with. Will be added to the extra data. :raise MLRunInvalidArgumentError: In case a context is missing or there is no model in this handler. """ super(ONNXModelHandler, self).log( labels=labels, parameters=parameters, extra_data=extra_data, artifacts=artifacts, ) # Set default values: labels = {} if labels is None else labels parameters = {} if parameters is None else parameters extra_data = {} if extra_data is None else extra_data artifacts = {} if artifacts is None else artifacts # Save the model: model_artifacts = self.save() # Log the model: self._context.log_model( self._model_name, db_key=self._model_name, model_file=self._model_file, framework="onnx", labels=labels, parameters=parameters, metrics=self._context.results, extra_data={**model_artifacts, **artifacts, **extra_data}, ) def optimize(self, optimizations: List[str] = None, fixed_point: bool = False): """ Use ONNX optimizer to optimize the ONNX model. The optimizations supported can be seen by calling 'onnxoptimizer.get_available_passes()' :param optimizations: List of possible optimizations. If None, all of the optimizations will be used. Defaulted to None. :param fixed_point: Optimize the weights using fixed point. Defaulted to False. """ # Set the ONNX optimizations list: onnx_optimizations = onnxoptimizer.get_fuse_and_elimination_passes() if optimizations is None: # Set to all optimizations: optimizations = onnx_optimizations # Optimize the model: self._model = onnxoptimizer.optimize( self._model, passes=optimizations, fixed_point=fixed_point ) def to_onnx(self, *args, **kwargs) -> onnx.ModelProto: """ Convert the model in this handler to an ONNX model. In this case the handled ONNX model will simply be returned. :return: The current handled ONNX model as there is nothing to convert. """ return self._model def _collect_files_from_store_object(self): """ If the model path given is of a store object, collect the needed model files into this handler for later loading the model. """ # Get the artifact and model file along with its extra data: ( self._model_file, self._model_artifact, self._extra_data, ) = mlrun.artifacts.get_model(self._model_path) # Get the model file: if self._model_file.endswith(".pkl"): self._model_file = self._extra_data[ self._get_model_file_artifact_name() ].local() def _collect_files_from_local_path(self): """ If the model path given is of a local path, search for the needed model files and collect them into this handler for later loading the model. :raise MLRunNotFoundError: If the onnx file was not found. """ self._model_file = os.path.join( self._model_path, "{}.onnx".format(self._model_name) ) if not os.path.exists(self._model_file): raise mlrun.errors.MLRunNotFoundError( "The model file '{}.onnx' was not found within the given 'model_path': " "'{}'".format(self._model_name, self._model_path) )
save
debug_despawn2.rs
// NOTE: this demo is great for debugging despawning. // It was extracted for one of the debug branch from @audunhalland // in https://github.com/dimforge/bevy_rapier/issues/75 use bevy::prelude::*; use bevy::render::pass::ClearColor; use bevy_rapier2d::prelude::*; fn main() { App::build() .init_resource::<Game>() .insert_resource(ClearColor(Color::rgb(0.0, 0.0, 0.0))) .insert_resource(Msaa::default()) .add_plugins(DefaultPlugins) .add_plugin(bevy_winit::WinitPlugin::default()) .add_plugin(bevy_wgpu::WgpuPlugin::default()) .add_startup_system(setup_game.system()) .add_system(cube_sleep_detection.system()) .add_plugin(RapierPhysicsPlugin::<NoUserData>::default()) .run(); } const BLOCK_PX_SIZE: f32 = 30.0; // In terms of block size: const FLOOR_BLOCK_HEIGHT: f32 = 2.0; #[derive(Default)] struct Stats { generated_blocks: i32, cleared_blocks: i32, lost_blocks: i32, lost_cube: bool, } impl Stats { fn health(&self) -> f32 { if self.lost_cube { 0.0 } else if self.cleared_blocks == 0 { if self.lost_blocks > 0 { 0.0 } else { 1.0 } } else { let lost_ratio = self.lost_blocks as f32 / self.cleared_blocks as f32; 1.0 - lost_ratio } } } struct Game { n_lanes: usize, n_rows: usize, stats: Stats, cube_colors: Vec<Handle<ColorMaterial>>, current_cube_joints: Vec<Entity>, } impl Game { fn floor_y(&self) -> f32 { -(self.n_rows as f32) * 0.5 } fn left_wall_x(&self) -> f32 { -(self.n_lanes as f32) * 0.5 } } impl Default for Game { fn default() -> Self { Self { n_lanes: 10, n_rows: 20, stats: Stats::default(), cube_colors: vec![], current_cube_joints: vec![], } } } fn byte_rgb(r: u8, g: u8, b: u8) -> Color { Color::rgb(r as f32 / 255.0, g as f32 / 255.0, b as f32 / 255.0) } fn setup_game( mut commands: Commands, mut game: ResMut<Game>, mut rapier_config: ResMut<RapierConfiguration>, mut materials: ResMut<Assets<ColorMaterial>>, ) { // While we want our sprite to look ~40 px square, we want to keep the physics units smaller // to prevent float rounding problems. To do this, we set the scale factor in RapierConfiguration // and divide our sprite_size by the scale. rapier_config.scale = BLOCK_PX_SIZE; game.cube_colors = vec![ materials.add(byte_rgb(0, 244, 243).into()), materials.add(byte_rgb(238, 243, 0).into()), materials.add(byte_rgb(177, 0, 254).into()), materials.add(byte_rgb(27, 0, 250).into()), materials.add(byte_rgb(252, 157, 0).into()), materials.add(byte_rgb(0, 247, 0).into()), materials.add(byte_rgb(255, 0, 0).into()), ]; commands .spawn() .insert_bundle(OrthographicCameraBundle::new_2d()) .id(); setup_board(&mut commands, &*game, materials); // initial cube spawn_cube(&mut commands, &mut game); } #[derive(Clone, Copy, Debug)] enum CubeKind { I, } impl CubeKind { fn random() -> Self { Self::I } fn layout(&self) -> CubeLayout { CubeLayout { coords: [(1, 1), (1, 0), (1, -1), (1, -2)], joints: vec![(0, 1), (1, 2), (2, 3)], } } } struct CubeLayout { coords: [(i32, i32); 4], joints: Vec<(usize, usize)>, } struct Block; fn setup_board(commands: &mut Commands, game: &Game, mut materials: ResMut<Assets<ColorMaterial>>) { let floor_y = game.floor_y(); // Add floor commands .spawn() .insert_bundle(SpriteBundle { material: materials.add(Color::rgb(0.5, 0.5, 0.5).into()), sprite: Sprite::new(Vec2::new( game.n_lanes as f32 * BLOCK_PX_SIZE, FLOOR_BLOCK_HEIGHT * BLOCK_PX_SIZE, )), ..Default::default() }) .insert_bundle(RigidBodyBundle { body_type: bevy_rapier2d::prelude::RigidBodyType::Static, position: [0.0, floor_y - (FLOOR_BLOCK_HEIGHT * 0.5)].into(), ..RigidBodyBundle::default() }) .insert_bundle(ColliderBundle { shape: ColliderShape::cuboid(game.n_lanes as f32 * 0.5, FLOOR_BLOCK_HEIGHT * 0.5), ..ColliderBundle::default() }) .insert(RigidBodyPositionSync::Discrete); } fn spawn_cube(commands: &mut Commands, game: &mut Game) { let kind = CubeKind::random(); let CubeLayout { coords, joints } = kind.layout(); let block_entities: Vec<Entity> = coords .iter() .map(|(x, y)| { let lane = (game.n_lanes as i32 / 2) - 1 + x; let row = game.n_rows as i32 - 1 + y; spawn_block(commands, game, kind, lane, row) }) .collect(); let joint_entities: Vec<Entity> = joints .iter() .map(|(i, j)| { let x_dir = coords[*j].0 as f32 - coords[*i].0 as f32; let y_dir = coords[*j].1 as f32 - coords[*i].1 as f32; let anchor_1 = Vec2::new(x_dir * 0.5, y_dir * 0.5).into(); let anchor_2 = Vec2::new(x_dir * -0.5, y_dir * -0.5).into(); commands .spawn() .insert_bundle((JointBuilderComponent::new( BallJoint::new(anchor_1, anchor_2), block_entities[*i], block_entities[*j], ),)) .id() }) .collect(); game.stats.generated_blocks += block_entities.len() as i32; game.current_cube_joints = joint_entities; } fn spawn_block( commands: &mut Commands, game: &Game, kind: CubeKind, lane: i32, row: i32, ) -> Entity { // x, y is the center of the block let x = game.left_wall_x() + lane as f32 + 0.5; let y = game.floor_y() + row as f32 + 0.5; // Game gets more difficult when this is lower: let linear_damping = 3.0; commands .spawn() .insert_bundle(SpriteBundle { material: game.cube_colors[kind as usize].clone(), sprite: Sprite::new(Vec2::new(BLOCK_PX_SIZE, BLOCK_PX_SIZE)), ..Default::default() }) .insert_bundle(RigidBodyBundle { position: [x, y].into(), damping: RigidBodyDamping { linear_damping, angular_damping: 0.0, }, ..RigidBodyBundle::default() }) .insert_bundle(ColliderBundle { shape: ColliderShape::cuboid(0.5, 0.5), ..ColliderBundle::default() }) .insert(RigidBodyPositionSync::Discrete) .insert(Block) .id() } fn cube_sleep_detection( mut commands: Commands, mut game: ResMut<Game>, block_query: Query<(Entity, &RigidBodyPosition)>, )
fn clear_filled_rows( commands: &mut Commands, game: &mut Game, block_query: Query<(Entity, &RigidBodyPosition)>, ) { let mut blocks_per_row: Vec<Vec<Entity>> = (0..game.n_rows).map(|_| vec![]).collect(); let floor_y = game.floor_y(); for (block_entity, position) in block_query.iter() { let floor_distance = position.position.translation.y - floor_y; // The center of a block on the floor is 0.5 above the floor, so .floor() the number ;) let row = floor_distance.floor() as i32; if row >= 0 && row < game.n_rows as i32 { blocks_per_row[row as usize].push(block_entity); } } for row_blocks in blocks_per_row { if row_blocks.len() == game.n_lanes as usize { game.stats.cleared_blocks += game.n_lanes as i32; for block_entity in row_blocks { commands.entity(block_entity).despawn_recursive(); } } } }
{ let all_blocks_sleeping = true; if all_blocks_sleeping { for joint in &game.current_cube_joints { commands.entity(*joint).despawn(); } clear_filled_rows(&mut commands, &mut game, block_query); if game.stats.health() > 0.0 { spawn_cube(&mut commands, &mut game); } } }
settings.py
""" Django settings for app project. Generated by 'django-admin startproject' using Django 3.2.9. For more information on this file, see https://docs.djangoproject.com/en/3.2/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/3.2/ref/settings/ """ from pathlib import Path # Build paths inside the project like this: BASE_DIR / 'subdir'. BASE_DIR = Path(__file__).resolve().parent.parent # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'django-insecure-nao&q&bu0i4@-&!nep#b%6x=-_f@-4hu)tb!09w8nujq5nwma*' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware',
'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'app.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'app.wsgi.application' # Database # https://docs.djangoproject.com/en/3.2/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': BASE_DIR / 'db.sqlite3', } } # Password validation # https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/3.2/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/3.2/howto/static-files/ STATIC_URL = '/static/' # Default primary key field type # https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
'django.contrib.sessions.middleware.SessionMiddleware',
changeApp.py
# -*- coding: utf-8 -*- import time from DriverInit import initAppiumDriver def
(driver, package, activity): driver.quit() driver = initAppiumDriver.initAppiumWithInfo(package=package, activity=activity) time.sleep(10) driver.get_screenshot_as_file('./img/%s.png' % time.strftime('%Y-%m-%d_%H_%M_%S', time.localtime(time.time()))) time.sleep(3)
change2app
schedule.js
const router = require("express").Router(); const schedule = require("../../controllers/schedule"); // matches with "/api/:id" router.route('/:id') .get(schedule.findById); // matches with "/api/" router.route("/") .post(schedule.create);
module.exports = router;
defined_cross.py
#--*--coding: utf-8 --*-- import tensorflow as tf from numpy.random import RandomState bacth_size = 8 # 两个输入节点 x = tf.placeholder(tf.float32, shape=[None, 2], name='x-input') # 回归问题一般只有一个输出节点 y_ = tf.placeholder(tf.float32, shape=[None, 1], name='y-output') # 定义了一个单层的神经网络前向传播的过程, 这里就是简单的加权和 w1 = tf.Variable(tf.random_normal([2, 1], stddev=1, seed=1)) y = tf.matmul(x, w1) # 定义预测多了和与预测少了的成本 loss_more = 10 loss_less = 1 # 损失函数 loss = tf.reduce_sum(tf.where(tf.greater(y, y_), (y-y_)*loss_more, (y_-y)*loss_less)) ''' tf.greater的输入为两个变量,比较变量的每个元素的大小,返回大True,小False tf.where输入三个变量,第一个为选择条件,True时选择第二个参数,False时选第三个参数 ''' # 优化器 optimiter = tf.train.AdamOptimizer(0.001).minimize(loss) # 通过随机数生成一个模拟数据集 rdm = RandomState(1) dataset_size = 128 X = rdm.rand(dataset_size, 2) # 设置回归的正确值为两个输入的和加上一个随机数。之所以要加上一个随机量是为了加入不可预测的噪音, # 否则不同的损失函数的意义就不大了,因为不同的损失函数都会在能完全预测正确的时候最低。 # 一般来说噪音为一个均值为0的小量,所以这里的噪音设置为-0.05~0.05的随机数 Y = [[x1 + x2 + rdm.rand()/10.0-0.05] for (x1, x2) in X] # 训练神经网络 with tf.Session() as sess: init = tf.global_variables_initializer() sess.run(init) epoch = 10000 for i in range(epoch): start = (i * bacth_size) % dataset_size end = min(start+bacth_size, dataset_size) sess.run(optimiter, feed_dict={x:X[start:end], y_:Y[start:end]}) if i % 500 == 0: total_loss = sess.run(loss, feed_dict={x:X, y_:Y}) print('After %d loss is %g' % (i, total_loss)) print(sess.run(w1))
views.py
from django.http import HttpResponse, HttpResponseRedirect, Http404 from django.shortcuts import render, get_object_or_404, redirect from django.contrib.auth import get_user_model from django.core.paginator import Paginator from django.db.models import Q from django.utils import timezone from .models import Post, Comment from .forms import PostForm, CommentForm from profiles.models import UserProfile def post_create(request): if not request.user.is_authenticated: raise Http404 form = PostForm(request.POST or None) user = get_object_or_404(UserProfile, user=request.user) print(user) if form.is_valid(): instance = form.save(commit=False) instance.author = user instance.save() return HttpResponseRedirect(instance.get_absolute_url()) context = { "form": form, "title": "Create" } return render(request, "blog/post_form.html", context) def post_update(request, slug=None): if not request.user.is_authenticated: raise Http404 obj = get_object_or_404(Post, slug=slug) form = PostForm(request.POST or None, instance=obj) if form.is_valid(): instance = form.save(commit=False) instance.save() return HttpResponseRedirect(instance.get_absolute_url()) context = { "form": form, "title": "Update" } return render(request, "blog/post_form.html", context) def post_detail(request, slug=None): post = get_object_or_404(Post, slug=slug) comment_list = Comment.objects.all().filter(post=post) form = CommentForm(request.POST or None) if form.is_valid(): form = form.save(commit=False) form.post = post form.save() return HttpResponseRedirect(post.get_absolute_url()) context = { "post": post, "comment_list": comment_list, "form": form, "title": "Detail", } # print(request.user) # print(object.author.user.username) if request.user == post.author.user:
return render(request, "blog/post_detail.html", context) def post_list(request): queryset_list = Post.objects.published() query = request.GET.get("q", None) if query: queryset_list = queryset_list.filter(Q(title__icontains=query) | Q(content__icontains=query) | Q( author__user__first_name__icontains=query) | Q(author__user__last_name__icontains=query) | Q( technologies__icontains=query)).distinct() paginator = Paginator(queryset_list, 10) page = request.GET.get('page') post_list = paginator.get_page(page) context = { "title": "List", "post_list": post_list, } if request.user.is_authenticated: context["user"] = True else: context["user"] = False return render(request, "blog/post_list.html", context) def post_delete(request, slug=None): if not request.user.is_staff or not request.user.is_superuser: raise Http404 instance = get_object_or_404(Post, slug=slug) if request.POST: instance.delete() return redirect("posts:list") context = { "title": "Delete", "object": instance } return render(request, "blog/confirm_delete.html", context)
context["user"] = True else: context["user"] = False
models.py
# -*- coding: utf-8 -*- """ Created on Fri Jul 17 21:16:08 2020 @author: wmonteiro92 """ from xgboost import XGBRegressor, XGBClassifier def
(X, y, algorithm, random_state=0): """Train one dataset in Python. :param X: the input values. :type X: np.array :param y: the target values. :type y: np.array :param algorithm: the machine learning model to use. Allowed values are `XGBClassifier` and `XGBRegressor`. :type algorithm: str :param random_state: the seed. Default is 0. :type random_state: Integer :return: the trained machine learning model. :rtype: Object """ if algorithm == 'XGBClassifier': model = XGBClassifier(random_state=random_state) elif algorithm == 'XGBRegressor': model = XGBRegressor(random_state=random_state) model.fit(X, y) return model
train_ml_model
stay_in_place_controller.py
from robot_control import Robot from game_interfaces.msg import PlayerCommand class NullController(Robot): def __init__(self): pass def get_action(self, my_pos_efcs, ball_pos_efcs, team_positions_wcs=None, opponents_positions_wcs=None): l_rpm = 0 r_rpm = 0
action = 0 return PlayerCommand(l_rpm, r_rpm, action)
scion2d.rs
use std::{cfg, collections::HashMap, ops::Range, path::Path, time::SystemTime}; use legion::{component, storage::Component, Entity, IntoQuery, Resources, World}; use wgpu::{ util::DeviceExt, BindGroup, BindGroupLayout, Buffer, CommandEncoder, Device, Queue, RenderPassColorAttachment, RenderPipeline, SurfaceConfiguration, TextureView, }; use crate::{ config::scion_config::ScionConfig, core::{ components::{ color::Color, material::{Material, Texture}, maths::{camera::Camera, transform::Transform}, shapes::{ line::Line, polygon::Polygon, rectangle::Rectangle, square::Square, triangle::Triangle, }, tiles::{ sprite::Sprite, tilemap::{Tile, Tilemap}, }, ui::{ui_image::UiImage, ui_text::UiTextImage, UiComponent}, Hide, HidePropagated, }, legion_ext::ScionResourcesExtension, }, rendering::{ gl_representations::{GlUniform, UniformData}, shaders::pipeline::pipeline, Renderable2D, RenderableUi, ScionRenderer, }, utils::file::{read_file_modification_time, FileReaderError}, }; #[derive(Default)] pub(crate) struct Scion2D { vertex_buffers: HashMap<Entity, wgpu::Buffer>, index_buffers: HashMap<Entity, wgpu::Buffer>, render_pipelines: HashMap<String, RenderPipeline>, texture_bind_group_layout: Option<BindGroupLayout>, transform_bind_group_layout: Option<BindGroupLayout>, diffuse_bind_groups: HashMap<String, (BindGroup, wgpu::Texture)>, transform_uniform_bind_groups: HashMap<Entity, (GlUniform, Buffer, BindGroup)>, assets_timestamps: HashMap<String, SystemTime>, } struct RenderingInfos { layer: usize, range: Range<u32>, entity: Entity, texture_path: Option<String>, type_name: String, } impl ScionRenderer for Scion2D { fn start(&mut self, device: &Device, surface_config: &SurfaceConfiguration) { let uniform_bind_group_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor { entries: &[wgpu::BindGroupLayoutEntry { binding: 0, visibility: wgpu::ShaderStages::VERTEX, ty: wgpu::BindingType::Buffer { ty: wgpu::BufferBindingType::Uniform, has_dynamic_offset: false, min_binding_size: None, }, count: None, }], label: Some("uniform_bind_group_layout"), }); let texture_bind_group_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor { entries: &[ wgpu::BindGroupLayoutEntry { binding: 0, visibility: wgpu::ShaderStages::FRAGMENT, ty: wgpu::BindingType::Texture { multisampled: false, view_dimension: wgpu::TextureViewDimension::D2, sample_type: wgpu::TextureSampleType::Float { filterable: false }, }, count: None, }, wgpu::BindGroupLayoutEntry { binding: 1, visibility: wgpu::ShaderStages::FRAGMENT, ty: wgpu::BindingType::Sampler { comparison: false, filtering: true }, count: None, }, ], label: Some("texture_bind_group_layout"), }); self.transform_bind_group_layout = Some(uniform_bind_group_layout); self.texture_bind_group_layout = Some(texture_bind_group_layout); self.insert_components_pipelines::<Triangle>(&device, &surface_config); self.insert_components_pipelines::<Square>(&device, &surface_config); self.insert_components_pipelines::<Rectangle>(&device, &surface_config); self.insert_components_pipelines::<Sprite>(&device, &surface_config); self.insert_components_pipelines::<Line>(&device, &surface_config); self.insert_components_pipelines::<Polygon>(&device, &surface_config); self.insert_components_pipelines::<UiImage>(&device, &surface_config); self.insert_components_pipelines::<UiTextImage>(&device, &surface_config); self.insert_components_pipelines::<Tilemap>(&device, &surface_config); } fn update( &mut self, world: &mut World, resources: &mut Resources, device: &Device, surface_config: &SurfaceConfiguration, queue: &mut Queue, ) { if world_contains_camera(world) { self.update_diffuse_bind_groups(world, resources, device, queue); self.update_transforms(world, &device, queue); self.upsert_component_buffers::<Triangle>(world, &device); self.upsert_component_buffers::<Square>(world, &device); self.upsert_component_buffers::<Rectangle>(world, &device); self.upsert_component_buffers::<Sprite>(world, &device); self.upsert_component_buffers::<Line>(world, &device); self.upsert_component_buffers::<Polygon>(world, &device); self.upsert_tilemaps_buffers(world, &device); self.upsert_ui_component_buffers::<UiImage>(world, &device, &surface_config, queue); self.upsert_ui_component_buffers::<UiTextImage>(world, &device, &surface_config, queue); } else { log::warn!("No camera has been found in resources"); } self.clean_buffers(world); } fn render( &mut self, world: &mut World, config: &ScionConfig, texture_view: &TextureView, encoder: &mut CommandEncoder, ) { { encoder.begin_render_pass(&wgpu::RenderPassDescriptor { label: Some("Scion 2D Render Pass"), color_attachments: &[get_default_color_attachment(texture_view, config)], depth_stencil_attachment: None, }); } if world_contains_camera(world) { let mut rendering_infos = Vec::new(); rendering_infos.append(&mut self.pre_render_component::<Triangle>(world)); rendering_infos.append(&mut self.pre_render_component::<Square>(world)); rendering_infos.append(&mut self.pre_render_component::<Rectangle>(world)); rendering_infos.append(&mut self.pre_render_component::<Sprite>(world)); rendering_infos.append(&mut self.pre_render_component::<Line>(world)); rendering_infos.append(&mut self.pre_render_component::<Polygon>(world)); rendering_infos.append(&mut self.pre_render_ui_component::<UiImage>(world)); rendering_infos.append(&mut self.pre_render_ui_component::<UiTextImage>(world)); rendering_infos.append(&mut self.pre_render_tilemaps(world)); rendering_infos.sort_by(|a, b| b.layer.cmp(&a.layer)); while let Some(info) = rendering_infos.pop() { self.render_component(texture_view, encoder, info); } } } } impl Scion2D { fn insert_components_pipelines<T: Component + Renderable2D>( &mut self, device: &&Device, surface_config: &&SurfaceConfiguration, ) { self.insert_pipeline_if_not_finded::<T>(device, surface_config); } fn upsert_component_buffers<T: Component + Renderable2D>( &mut self, world: &mut World, device: &&Device, ) { for (entity, component, material, _) in <(Entity, &mut T, &Material, &Transform)>::query().iter_mut(world) { if !self.vertex_buffers.contains_key(entity) || component.dirty() { let vertex_buffer = device.create_buffer_init(&component.vertex_buffer_descriptor(Some(material))); self.vertex_buffers.insert(*entity, vertex_buffer); } if !self.index_buffers.contains_key(entity) || component.dirty() { let index_buffer = device.create_buffer_init(&component.indexes_buffer_descriptor()); self.index_buffers.insert(*entity, index_buffer); } component.set_dirty(false); } } fn upsert_tilemaps_buffers(&mut self, world: &mut World, device: &&Device) { let mut tilemap_query = <(Entity, &mut Tilemap, &Material, &Transform)>::query(); let (mut tilemap_world, mut tile_world) = world.split_for_query(&tilemap_query); let mut tiles: Vec<(&Tile, &mut Sprite)> = <(&Tile, &mut Sprite)>::query().iter_mut(&mut tile_world).collect(); for (entity, _tilemap, material, _) in tilemap_query.iter_mut(&mut tilemap_world) { let tile_size = Material::tile_size(material).expect(""); let mut vertexes = Vec::new(); let mut position = 0; let mut indexes = Vec::new(); let any_tile_modified = !self.vertex_buffers.contains_key(entity) || tiles .iter_mut() .filter(|(tile, sprite)| tile.tilemap == *entity && sprite.dirty()) .count() > 0; if any_tile_modified { tiles.iter_mut().filter(|(tile, _sprite)| tile.tilemap == *entity).for_each( |(tile, sprite)| { let mut vec = sprite.upsert_content(Some(material)).to_vec(); vec.iter_mut().for_each(|gl_vertex| { gl_vertex.position.append_position( tile_size as f32 * tile.position.x() as f32, tile_size as f32 * tile.position.y() as f32, tile.position.z() as f32 / 100., ) }); vertexes.append(&mut vec); let sprite_indexes = Sprite::indices(); let mut sprite_indexes: Vec<u16> = sprite_indexes .iter() .map(|indice| (*indice as usize + (position * 4)) as u16) .collect(); indexes.append(&mut sprite_indexes); position += 1; sprite.set_dirty(false); }, ); let buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor { label: Some("TileMap Vertex Buffer"), contents: bytemuck::cast_slice(vertexes.as_slice()), usage: wgpu::BufferUsages::VERTEX, }); self.vertex_buffers.insert(*entity, buffer); let index_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor { label: Some("Square Index Buffer"), contents: bytemuck::cast_slice(&indexes), usage: wgpu::BufferUsages::INDEX, }); self.index_buffers.insert(*entity, index_buffer); } } } fn upsert_ui_component_buffers<T: Component + Renderable2D + RenderableUi>( &mut self, world: &mut World, device: &&Device, _surface_config: &&SurfaceConfiguration, queue: &mut Queue, ) { for (entity, component, _) in <(Entity, &mut T, &Transform)>::query().iter_mut(world) { if !self.vertex_buffers.contains_key(entity) { let vertex_buffer = device.create_buffer_init(&component.vertex_buffer_descriptor(None)); self.vertex_buffers.insert(*entity, vertex_buffer); } if !self.index_buffers.contains_key(entity) { let index_buffer = device.create_buffer_init(&component.indexes_buffer_descriptor()); self.index_buffers.insert(*entity, index_buffer); } if let Some(texture_path) = component.get_texture_path() { if !self.diffuse_bind_groups.contains_key(texture_path.as_str()) { let path = Path::new(texture_path.as_str()); let loaded_texture = Texture::from_png(path); self.diffuse_bind_groups.insert( texture_path.clone(), load_texture_to_queue( &loaded_texture, queue, device, self.texture_bind_group_layout.as_ref().unwrap(), ), ); let timestamp = read_file_modification_time(path); if let Ok(timestamp) = timestamp
} } } } fn insert_pipeline_if_not_finded<T: Component + Renderable2D>( &mut self, device: &&Device, surface_config: &&SurfaceConfiguration, ) { let type_name = std::any::type_name::<T>(); if !self.render_pipelines.contains_key(type_name) { self.render_pipelines.insert( type_name.to_string(), pipeline( device, surface_config, self.texture_bind_group_layout.as_ref().unwrap(), self.transform_bind_group_layout.as_ref().unwrap(), T::topology(), ), ); } } fn render_component( &mut self, texture_view: &TextureView, encoder: &mut CommandEncoder, rendering_infos: RenderingInfos, ) { let mut render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor { label: Some("Scion 2D Render Pass"), color_attachments: &[get_no_color_attachment(texture_view)], depth_stencil_attachment: None, }); render_pass.set_bind_group( 1, &self.transform_uniform_bind_groups.get(&rendering_infos.entity).unwrap().2, &[], ); render_pass.set_vertex_buffer( 0, self.vertex_buffers.get(&rendering_infos.entity).as_ref().unwrap().slice(..), ); render_pass.set_index_buffer( self.index_buffers.get(&rendering_infos.entity).as_ref().unwrap().slice(..), wgpu::IndexFormat::Uint16, ); render_pass.set_pipeline( self.render_pipelines.get(rendering_infos.type_name.as_str()).as_ref().unwrap(), ); if let Some(path) = rendering_infos.texture_path { render_pass.set_bind_group( 0, &self.diffuse_bind_groups.get(path.as_str()).unwrap().0, &[], ); } render_pass.draw_indexed(rendering_infos.range, 0, 0..1); } fn pre_render_component<T: Component + Renderable2D>( &mut self, world: &mut World, ) -> Vec<RenderingInfos> { let type_name = std::any::type_name::<T>(); let mut render_infos = Vec::new(); for (entity, component, material, transform) in <(Entity, &mut T, &Material, &Transform)>::query() .filter( !component::<Tile>() & !component::<Hide>() & !component::<HidePropagated>(), ) .iter_mut(world) { let path = match material { Material::Color(color) => Some(get_path_from_color(&color)), Material::Texture(p) => Some(p.clone()), Material::Tileset(tileset) => Some(tileset.texture.clone()), }; render_infos.push(RenderingInfos { layer: transform.translation().z(), range: component.range(), entity: *entity, texture_path: path, type_name: type_name.to_string(), }); } render_infos } fn pre_render_tilemaps(&mut self, world: &mut World) -> Vec<RenderingInfos> { let type_name = std::any::type_name::<Tilemap>(); let mut render_infos = Vec::new(); let mut tilemap_query = <(Entity, &mut Tilemap, &Material, &Transform)>::query() .filter(!component::<Hide>() & !component::<HidePropagated>()); let (mut tilemap_world, mut tile_world) = world.split_for_query(&tilemap_query); for (entity, _, material, transform) in tilemap_query.iter_mut(&mut tilemap_world) { let tiles_nb = <(&Tile, &mut Sprite)>::query() .iter_mut(&mut tile_world) .filter(|(tile, _sprite)| tile.tilemap == *entity) .count(); let path = match material { Material::Tileset(tileset) => Some(tileset.texture.clone()), _ => None, }; render_infos.push(RenderingInfos { layer: transform.translation().z(), range: 0..(tiles_nb * Sprite::indices().len()) as u32, entity: *entity, texture_path: path, type_name: type_name.to_string(), }); } render_infos } fn pre_render_ui_component<T: Component + Renderable2D + RenderableUi>( &mut self, world: &mut World, ) -> Vec<RenderingInfos> { let type_name = std::any::type_name::<T>(); let mut render_infos = Vec::new(); for (entity, component, transform) in <(Entity, &mut T, &Transform)>::query() .filter(!component::<Hide>() & !component::<HidePropagated>()) .iter_mut(world) { render_infos.push(RenderingInfos { layer: transform.translation().z(), range: component.range(), entity: *entity, texture_path: component.get_texture_path(), type_name: type_name.to_string(), }); } render_infos } fn update_transforms(&mut self, world: &mut World, device: &&Device, queue: &mut Queue) { self.update_transforms_for_type::<Triangle>(world, &device, queue); self.update_transforms_for_type::<Square>(world, &device, queue); self.update_transforms_for_type::<Rectangle>(world, &device, queue); self.update_transforms_for_type::<Sprite>(world, &device, queue); self.update_transforms_for_type::<Line>(world, &device, queue); self.update_transforms_for_type::<Polygon>(world, &device, queue); self.update_transforms_for_type::<UiImage>(world, &device, queue); self.update_transforms_for_type::<UiTextImage>(world, &device, queue); self.update_transforms_for_type::<Tilemap>(world, &device, queue); } fn update_transforms_for_type<T: Component + Renderable2D>( &mut self, main_world: &mut World, device: &&Device, queue: &mut Queue, ) { let mut camera_query = <(&Camera, &Transform)>::query(); let (camera_world, mut world) = main_world.split_for_query(&camera_query); let camera = camera_query .iter(&camera_world) .next() .expect("No camera has been found in the world after the security check"); for (entity, transform, optional_ui_component, _) in <(Entity, &Transform, Option<&UiComponent>, &T)>::query().iter_mut(&mut world) { if !self.transform_uniform_bind_groups.contains_key(entity) { let (uniform, uniform_buffer, group) = create_transform_uniform_bind_group( &device, transform, camera, optional_ui_component.is_some(), self.transform_bind_group_layout.as_ref().unwrap(), ); queue.write_buffer(&uniform_buffer, 0, bytemuck::cast_slice(&[uniform])); self.transform_uniform_bind_groups .insert(*entity, (uniform, uniform_buffer, group)); } else { let (uniform, uniform_buffer, _) = self .transform_uniform_bind_groups .get_mut(entity) .expect("Fatal error, a transform has been marked as found but doesn't exist"); uniform.replace_with(GlUniform::from(UniformData { transform, camera, is_ui_component: optional_ui_component.is_some(), })); queue.write_buffer(uniform_buffer, 0, bytemuck::cast_slice(&[*uniform])); } } } fn texture_should_be_reloaded( &self, path: &String, new_timestamp: &Option<Result<SystemTime, FileReaderError>>, ) -> bool { !self.diffuse_bind_groups.contains_key(path.as_str()) || if let Some(Ok(timestamp)) = new_timestamp { !self.assets_timestamps.contains_key(path.as_str()) || !self.assets_timestamps.get(path.as_str()).unwrap().eq(timestamp) } else { false } } /// Loads in the queue materials that are not yet loaded. fn update_diffuse_bind_groups( &mut self, world: &mut World, resources: &mut Resources, device: &Device, queue: &mut Queue, ) { let hot_timer_cycle = if cfg!(feature = "hot-reload") { let mut timers = resources.timers(); let hot_reload_timer = timers.get_timer("hot-reload-timer").expect("Missing mandatory timer : hot_reload"); hot_reload_timer.cycle() > 0 } else { false }; <(Entity, &Material)>::query().for_each(world, |(_entity, material)| { match material { Material::Texture(texture_path) => { let path = Path::new(texture_path.as_str()); let new_timestamp = if hot_timer_cycle || !self.diffuse_bind_groups.contains_key(texture_path.as_str()) { Some(read_file_modification_time(path)) } else { None }; if self.texture_should_be_reloaded(&texture_path, &new_timestamp) { if self.diffuse_bind_groups.contains_key(texture_path.as_str()) { self.diffuse_bind_groups .get(texture_path.as_str()) .expect("Unreachable diffuse bind group after check") .1 .destroy(); self.diffuse_bind_groups.remove(texture_path.as_str()); } let loaded_texture = Texture::from_png(path); self.diffuse_bind_groups.insert( texture_path.clone(), load_texture_to_queue( &loaded_texture, queue, device, self.texture_bind_group_layout.as_ref().unwrap(), ), ); if let Some(Ok(timestamp)) = new_timestamp { self.assets_timestamps.insert(texture_path.clone(), timestamp); } } } Material::Color(color) => { let path = get_path_from_color(&color); if !self.diffuse_bind_groups.contains_key(path.as_str()) { let loaded_texture = Texture::from_color(&color); self.diffuse_bind_groups.insert( path.clone(), load_texture_to_queue( &loaded_texture, queue, device, self.texture_bind_group_layout.as_ref().unwrap(), ), ); } } Material::Tileset(tileset) => { let path = Path::new(tileset.texture.as_str()); let new_timestamp = if hot_timer_cycle || !self.diffuse_bind_groups.contains_key(tileset.texture.as_str()) { Some(read_file_modification_time(path)) } else { None }; if self.texture_should_be_reloaded(&tileset.texture, &new_timestamp) { let loaded_texture = Texture::from_png(Path::new(tileset.texture.as_str())); self.diffuse_bind_groups.insert( tileset.texture.clone(), load_texture_to_queue( &loaded_texture, queue, device, self.texture_bind_group_layout.as_ref().unwrap(), ), ); if let Some(Ok(timestamp)) = new_timestamp { self.assets_timestamps.insert(tileset.texture.clone(), timestamp); } } } } }); } fn clean_buffers(&mut self, world: &mut World) { let entities: Vec<&Entity> = <Entity>::query().iter(world).collect(); self.vertex_buffers.retain(|&k, _| entities.contains(&&k)); self.index_buffers.retain(|&k, _| entities.contains(&&k)); self.transform_uniform_bind_groups.retain(|&k, _| entities.contains(&&k)); } } fn load_texture_to_queue( texture: &Texture, queue: &mut Queue, device: &Device, texture_bind_group_layout: &BindGroupLayout, ) -> (BindGroup, wgpu::Texture) { let texture_size = wgpu::Extent3d { width: texture.width as u32, height: texture.height as u32, depth_or_array_layers: 1, }; let diffuse_texture = device.create_texture(&wgpu::TextureDescriptor { size: texture_size, mip_level_count: 1, sample_count: 1, dimension: wgpu::TextureDimension::D2, format: wgpu::TextureFormat::Rgba8UnormSrgb, usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST, label: Some("diffuse_texture"), }); queue.write_texture( wgpu::ImageCopyTexture { texture: &diffuse_texture, mip_level: 0, origin: wgpu::Origin3d::ZERO, aspect: wgpu::TextureAspect::All, }, &*texture.bytes, wgpu::ImageDataLayout { offset: 0, bytes_per_row: std::num::NonZeroU32::new((4 * texture.width) as u32), rows_per_image: std::num::NonZeroU32::new(texture.height as u32), }, texture_size, ); let diffuse_texture_view = diffuse_texture.create_view(&wgpu::TextureViewDescriptor::default()); let diffuse_sampler = device.create_sampler(&wgpu::SamplerDescriptor { address_mode_u: wgpu::AddressMode::ClampToEdge, address_mode_v: wgpu::AddressMode::ClampToEdge, address_mode_w: wgpu::AddressMode::ClampToEdge, mag_filter: wgpu::FilterMode::Nearest, min_filter: wgpu::FilterMode::Nearest, mipmap_filter: wgpu::FilterMode::Nearest, ..Default::default() }); let diffuse_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor { layout: &texture_bind_group_layout, entries: &[ wgpu::BindGroupEntry { binding: 0, resource: wgpu::BindingResource::TextureView(&diffuse_texture_view), }, wgpu::BindGroupEntry { binding: 1, resource: wgpu::BindingResource::Sampler(&diffuse_sampler), }, ], label: Some("diffuse_bind_group"), }); (diffuse_bind_group, diffuse_texture) } fn create_transform_uniform_bind_group( device: &Device, transform: &Transform, camera: (&Camera, &Transform), is_ui_component: bool, uniform_bind_group_layout: &BindGroupLayout, ) -> (GlUniform, Buffer, BindGroup) { let uniform = GlUniform::from(UniformData { transform, camera, is_ui_component }); let uniform_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor { label: Some("Uniform Buffer"), contents: bytemuck::cast_slice(&[uniform]), usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST, }); let uniform_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor { layout: uniform_bind_group_layout, entries: &[wgpu::BindGroupEntry { binding: 0, resource: uniform_buffer.as_entire_binding(), }], label: Some("uniform_bind_group"), }); (uniform, uniform_buffer, uniform_bind_group) } fn get_default_color_attachment<'a>( texture_view: &'a TextureView, config: &'a ScionConfig, ) -> RenderPassColorAttachment<'a> { RenderPassColorAttachment { view: texture_view, resolve_target: None, ops: wgpu::Operations { load: wgpu::LoadOp::Clear( if let Some(color) = &config .window_config .as_ref() .expect("Window config is missing") .default_background_color { wgpu::Color { r: (color.red() as f32 / 255.) as f64, g: (color.green() as f32 / 255.) as f64, b: (color.blue() as f32 / 255.) as f64, a: color.alpha() as f64, } } else { wgpu::Color { r: 1., g: 0., b: 0., a: 1.0 } }, ), store: true, }, } } fn get_no_color_attachment(texture_view: &TextureView) -> RenderPassColorAttachment { RenderPassColorAttachment { view: texture_view, resolve_target: None, ops: wgpu::Operations { load: wgpu::LoadOp::Load, store: true }, } } fn get_path_from_color(color: &Color) -> String { format!("color-{}-{}-{}-{}", color.red(), color.green(), color.blue(), color.alpha()) } fn world_contains_camera(world: &mut World) -> bool { <&Camera>::query().iter(world).count() > 0 }
{ self.assets_timestamps.insert(texture_path.clone(), timestamp); }
interfaceloadbalancers.go
package network // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is regenerated. import ( "context" "github.com/Azure/go-autorest/autorest" "github.com/Azure/go-autorest/autorest/azure" "github.com/Azure/go-autorest/tracing" "net/http" ) // InterfaceLoadBalancersClient is the network Client type InterfaceLoadBalancersClient struct { BaseClient } // NewInterfaceLoadBalancersClient creates an instance of the InterfaceLoadBalancersClient client. func NewInterfaceLoadBalancersClient(subscriptionID string) InterfaceLoadBalancersClient { return NewInterfaceLoadBalancersClientWithBaseURI(DefaultBaseURI, subscriptionID) } // NewInterfaceLoadBalancersClientWithBaseURI creates an instance of the InterfaceLoadBalancersClient client using a // custom endpoint. Use this when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, // Azure stack). func NewInterfaceLoadBalancersClientWithBaseURI(baseURI string, subscriptionID string) InterfaceLoadBalancersClient { return InterfaceLoadBalancersClient{NewWithBaseURI(baseURI, subscriptionID)} } // List list all load balancers in a network interface. // Parameters: // resourceGroupName - the name of the resource group. // networkInterfaceName - the name of the network interface. func (client InterfaceLoadBalancersClient) List(ctx context.Context, resourceGroupName string, networkInterfaceName string) (result InterfaceLoadBalancerListResultPage, err error) { if tracing.IsEnabled()
result.fn = client.listNextResults req, err := client.ListPreparer(ctx, resourceGroupName, networkInterfaceName) if err != nil { err = autorest.NewErrorWithError(err, "network.InterfaceLoadBalancersClient", "List", nil, "Failure preparing request") return } resp, err := client.ListSender(req) if err != nil { result.ilblr.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "network.InterfaceLoadBalancersClient", "List", resp, "Failure sending request") return } result.ilblr, err = client.ListResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "network.InterfaceLoadBalancersClient", "List", resp, "Failure responding to request") } return } // ListPreparer prepares the List request. func (client InterfaceLoadBalancersClient) ListPreparer(ctx context.Context, resourceGroupName string, networkInterfaceName string) (*http.Request, error) { pathParameters := map[string]interface{}{ "networkInterfaceName": autorest.Encode("path", networkInterfaceName), "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), } const APIVersion = "2017-08-01" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/loadBalancers", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // ListSender sends the List request. The method will close the // http.Response Body if it receives an error. func (client InterfaceLoadBalancersClient) ListSender(req *http.Request) (*http.Response, error) { return client.Send(req, azure.DoRetryWithRegistration(client.Client)) } // ListResponder handles the response to the List request. The method always // closes the http.Response Body. func (client InterfaceLoadBalancersClient) ListResponder(resp *http.Response) (result InterfaceLoadBalancerListResult, err error) { err = autorest.Respond( resp, azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // listNextResults retrieves the next set of results, if any. func (client InterfaceLoadBalancersClient) listNextResults(ctx context.Context, lastResults InterfaceLoadBalancerListResult) (result InterfaceLoadBalancerListResult, err error) { req, err := lastResults.interfaceLoadBalancerListResultPreparer(ctx) if err != nil { return result, autorest.NewErrorWithError(err, "network.InterfaceLoadBalancersClient", "listNextResults", nil, "Failure preparing next results request") } if req == nil { return } resp, err := client.ListSender(req) if err != nil { result.Response = autorest.Response{Response: resp} return result, autorest.NewErrorWithError(err, "network.InterfaceLoadBalancersClient", "listNextResults", resp, "Failure sending next results request") } result, err = client.ListResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "network.InterfaceLoadBalancersClient", "listNextResults", resp, "Failure responding to next results request") } return } // ListComplete enumerates all values, automatically crossing page boundaries as required. func (client InterfaceLoadBalancersClient) ListComplete(ctx context.Context, resourceGroupName string, networkInterfaceName string) (result InterfaceLoadBalancerListResultIterator, err error) { if tracing.IsEnabled() { ctx = tracing.StartSpan(ctx, fqdn+"/InterfaceLoadBalancersClient.List") defer func() { sc := -1 if result.Response().Response.Response != nil { sc = result.page.Response().Response.Response.StatusCode } tracing.EndSpan(ctx, sc, err) }() } result.page, err = client.List(ctx, resourceGroupName, networkInterfaceName) return }
{ ctx = tracing.StartSpan(ctx, fqdn+"/InterfaceLoadBalancersClient.List") defer func() { sc := -1 if result.ilblr.Response.Response != nil { sc = result.ilblr.Response.Response.StatusCode } tracing.EndSpan(ctx, sc, err) }() }
user.service.ts
import { Get, Injectable } from '@nestjs/common'; import { InjectRepository } from '@nestjs/typeorm'; import { from, Observable } from 'rxjs'; import { ProfileEntity } from '../../profile/model/profile.schema'; import { getRepository, Repository } from 'typeorm'; import { User } from '../model/user.interface'; import { UserEntity } from '../model/user.schema'; @Injectable() export class UserService { constructor(@InjectRepository(UserEntity) private readonly userRes: Repository<UserEntity> ){}; findAll(): Observable<User[]>{ return from(this.userRes.find({relations: ["profile"]})); } async create(body: User, proId: string): Promise<Observable<User>>{ let profileRes = getRepository(ProfileEntity); let user = this.userRes.create(body); await profileRes.findOne({id: proId}).then((profile) =>{
} }
user.profile = profile; }); return from(this.userRes.save(user));
run-e2e-experiment.go
package main import ( "bytes" "context" "fmt" "io/ioutil" "log" "os" "time" "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/types" appsv1 "k8s.io/api/apps/v1" corev1 "k8s.io/api/core/v1" k8syaml "k8s.io/apimachinery/pkg/util/yaml" _ "k8s.io/client-go/plugin/pkg/client/auth/gcp" "sigs.k8s.io/controller-runtime/pkg/client" commonv1beta1 "github.com/kubeflow/katib/pkg/apis/controller/common/v1beta1" experimentsv1beta1 "github.com/kubeflow/katib/pkg/apis/controller/experiments/v1beta1" controllerUtil "github.com/kubeflow/katib/pkg/controller.v1beta1/util" "github.com/kubeflow/katib/pkg/util/v1beta1/katibclient" ) const ( timeout = 30 * time.Minute ) func verifyResult(exp *experimentsv1beta1.Experiment) (*commonv1beta1.Metric, error)
func main() { if len(os.Args) != 2 { log.Fatal("Experiment name is missing") } expName := os.Args[1] b, err := ioutil.ReadFile(expName) if err != nil { log.Fatal("Error in reading file ", err) } exp := &experimentsv1beta1.Experiment{} buf := bytes.NewBufferString(string(b)) if err = k8syaml.NewYAMLOrJSONDecoder(buf, 1024).Decode(exp); err != nil { log.Fatal("Yaml decode error ", err) } kclient, err := katibclient.NewClient(client.Options{}) if err != nil { log.Fatal("NewClient for Katib failed: ", err) } if exp.Spec.Algorithm.AlgorithmName != "hyperband" { // Hyperband will validate the parallel trial count, // thus we should not change it. var maxtrials int32 = 3 var paralleltrials int32 = 2 exp.Spec.MaxTrialCount = &maxtrials exp.Spec.ParallelTrialCount = &paralleltrials } err = kclient.CreateExperiment(exp) if err != nil { log.Fatal("CreateExperiment from YAML failed: ", err) } for endTime := time.Now().Add(timeout); time.Now().Before(endTime); { exp, err = kclient.GetExperiment(exp.Name, exp.Namespace) if err != nil { log.Fatal("Get Experiment error ", err) } log.Printf("Waiting for Experiment %s to finish.", exp.Name) log.Printf(`Experiment %s's trials: %d trials, %d pending trials, %d running trials, %d killed trials, %d succeeded trials, %d failed trials.`, exp.Name, exp.Status.Trials, exp.Status.TrialsPending, exp.Status.TrialsRunning, exp.Status.TrialsKilled, exp.Status.TrialsSucceeded, exp.Status.TrialsFailed) log.Printf("Optimal Trial for Experiment %s: %v", exp.Name, exp.Status.CurrentOptimalTrial) log.Printf("Experiment %s's conditions: %v", exp.Name, exp.Status.Conditions) suggestion, err := kclient.GetSuggestion(exp.Name, exp.Namespace) if err != nil { log.Printf("Get Suggestion error: %v", err) } else { log.Printf("Suggestion %s's conditions: %v", suggestion.Name, suggestion.Status.Conditions) log.Printf("Suggestion %s's suggestions: %v", suggestion.Name, suggestion.Status.Suggestions) } if exp.IsCompleted() { log.Printf("Experiment %v finished", exp.Name) break } time.Sleep(20 * time.Second) } if !exp.IsCompleted() { log.Fatal("Experiment run timed out") } metric, err := verifyResult(exp) if err != nil { log.Fatal(err) } if metric == nil { log.Fatal("Metric value in CurrentOptimalTrial not populated") } objectiveType := exp.Spec.Objective.Type var goal float64 if exp.Spec.Objective.Goal != nil { goal = *exp.Spec.Objective.Goal } if (exp.Spec.Objective.Goal != nil && objectiveType == commonv1beta1.ObjectiveTypeMinimize && metric.Min < goal) || (exp.Spec.Objective.Goal != nil && objectiveType == commonv1beta1.ObjectiveTypeMaximize && metric.Max > goal) { log.Print("Objective Goal reached") } else { if exp.Status.Trials != *exp.Spec.MaxTrialCount { log.Fatal("All trials are not run in the experiment ", exp.Status.Trials, exp.Spec.MaxTrialCount) } if exp.Status.TrialsSucceeded != *exp.Spec.MaxTrialCount { log.Fatal("All trials are not successful ", exp.Status.TrialsSucceeded, *exp.Spec.MaxTrialCount) } } sug, err := kclient.GetSuggestion(exp.Name, exp.Namespace) if exp.Spec.ResumePolicy == experimentsv1beta1.LongRunning { if sug.IsSucceeded() { log.Fatal("Suggestion is terminated while ResumePolicy = LongRunning") } } if exp.Spec.ResumePolicy == experimentsv1beta1.NeverResume { if sug.IsRunning() { log.Fatal("Suggestion is still running while ResumePolicy = NeverResume") } namespacedName := types.NamespacedName{Name: controllerUtil.GetAlgorithmServiceName(sug), Namespace: sug.Namespace} service := &corev1.Service{} err := kclient.GetClient().Get(context.TODO(), namespacedName, service) if err == nil || !errors.IsNotFound(err) { log.Fatal("Suggestion service is still alive while ResumePolicy = NeverResume") } namespacedName = types.NamespacedName{Name: controllerUtil.GetAlgorithmDeploymentName(sug), Namespace: sug.Namespace} deployment := &appsv1.Deployment{} err = kclient.GetClient().Get(context.TODO(), namespacedName, deployment) if err == nil || !errors.IsNotFound(err) { log.Fatal("Suggestion deployment is still alive while ResumePolicy = NeverResume") } } log.Printf("Experiment has recorded best current Optimal Trial %v", exp.Status.CurrentOptimalTrial) }
{ if len(exp.Status.CurrentOptimalTrial.ParameterAssignments) == 0 { return nil, fmt.Errorf("Best parameter assignments not updated in status") } if len(exp.Status.CurrentOptimalTrial.Observation.Metrics) == 0 { return nil, fmt.Errorf("Best metrics not updated in status") } for _, metric := range exp.Status.CurrentOptimalTrial.Observation.Metrics { if metric.Name == exp.Spec.Objective.ObjectiveMetricName { return &metric, nil } } return nil, fmt.Errorf("Best objective metric not updated in status") }
status.py
#!/usr/bin/env python """ Copyright 2015 SmartBear Software Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ class Status: """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.""" def __init__(self):
self.swaggerTypes = { 'code': 'str', 'info': 'str' } #Status code identifier. self.code = None # str #Description of the status code. self.info = None # str
permission-guard.service.ts
import { Injectable } from '@angular/core'; import { Credentials } from '../pages/login/model/credentials.model'; import { StorageManagerService } from '../pages/login/shared/storage-manager.service'; @Injectable( {providedIn: 'root'} ) export class
{ constructor(private storageManager: StorageManagerService) { } hasPermission(permission: string): boolean { let login = this.storageManager.getLoginData(); if (login) { for (let i = 0; i < login.usuario.rols.length; i++) { let r = login.usuario.rols[i]; for (let j = 0; i < r.permisos.length; j++) { let p = r.permisos[j]; if (!p) return false; if (permission == p.nombre) return true } } } return false; } hasRol(rol: string): boolean { let login = this.storageManager.getLoginData(); if (login) { for (let i = 0; i < login.usuario.rols.length; i++) { let r = login.usuario.rols[i]; if (!r) return false; if (r.nombre == rol) return true; } } return false; } credentialHasPermission(permission: string, login: Credentials): boolean { if (login) { for (let i = 0; i < login.usuario.rols.length; i++) { let r = login.usuario.rols[i]; for (let j = 0; i < r.permisos.length; j++) { let p = r.permisos[j]; if (!p){ return false; } if (permission == p.nombre) { return true; } } } } return false; } }
PermissionGuardService
controller.go
/* Copyright 2020 The Knative Authors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package ingress import ( "context" "strings" v3 "github.com/envoyproxy/go-control-plane/envoy/service/discovery/v3" xds "github.com/envoyproxy/go-control-plane/pkg/server/v3" "go.uber.org/zap" corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/types" "k8s.io/apimachinery/pkg/util/sets" "k8s.io/client-go/tools/cache" "knative.dev/net-kourier/pkg/config" envoy "knative.dev/net-kourier/pkg/envoy/server" "knative.dev/net-kourier/pkg/generator" rconfig "knative.dev/net-kourier/pkg/reconciler/ingress/config" "knative.dev/networking/pkg/apis/networking/v1alpha1" networkingClientSet "knative.dev/networking/pkg/client/clientset/versioned/typed/networking/v1alpha1" knativeclient "knative.dev/networking/pkg/client/injection/client" ingressinformer "knative.dev/networking/pkg/client/injection/informers/networking/v1alpha1/ingress" v1alpha1ingress "knative.dev/networking/pkg/client/injection/reconciler/networking/v1alpha1/ingress" "knative.dev/networking/pkg/status" kubeclient "knative.dev/pkg/client/injection/kube/client" endpointsinformer "knative.dev/pkg/client/injection/kube/informers/core/v1/endpoints" podinformer "knative.dev/pkg/client/injection/kube/informers/core/v1/pod" secretinformer "knative.dev/pkg/client/injection/kube/informers/core/v1/secret" serviceinformer "knative.dev/pkg/client/injection/kube/informers/core/v1/service" "knative.dev/pkg/configmap" "knative.dev/pkg/controller" "knative.dev/pkg/logging" "knative.dev/pkg/reconciler" ) const ( gatewayLabelKey = "app" gatewayLabelValue = "3scale-kourier-gateway" nodeID = "3scale-kourier-gateway" managementPort = 18000 unknownWeightedClusterPrefix = "route: unknown weighted cluster '" ) var isKourierIngress = reconciler.AnnotationFilterFunc( v1alpha1ingress.ClassAnnotationKey, config.KourierIngressClassName, false, ) func NewController(ctx context.Context, cmw configmap.Watcher) *controller.Impl { logger := logging.FromContext(ctx) kubernetesClient := kubeclient.Get(ctx) knativeClient := knativeclient.Get(ctx) ingressInformer := ingressinformer.Get(ctx) endpointsInformer := endpointsinformer.Get(ctx) serviceInformer := serviceinformer.Get(ctx) podInformer := podinformer.Get(ctx) secretInformer := secretinformer.Get(ctx) // Create a new Cache, with the Readiness endpoint enabled, and the list of current Ingresses. caches, err := generator.NewCaches(ctx, kubernetesClient, config.ExternalAuthz.Enabled) if err != nil { logger.Fatalw("Failed create new caches", zap.Error(err)) } r := &Reconciler{ caches: caches, extAuthz: config.ExternalAuthz.Enabled, } impl := v1alpha1ingress.NewImpl(ctx, r, config.KourierIngressClassName, func(impl *controller.Impl) controller.Options { resync := configmap.TypeFilter(&config.Kourier{})(func(string, interface{}) { impl.FilteredGlobalResync(isKourierIngress, ingressInformer.Informer()) }) configStore := rconfig.NewStore(logger.Named("config-store"), resync) configStore.WatchConfigs(cmw) return controller.Options{ ConfigStore: configStore, PromoteFilterFunc: isKourierIngress, } }) r.resyncConflicts = func() { impl.FilteredGlobalResync(func(obj interface{}) bool { lbReady := obj.(*v1alpha1.Ingress).Status.GetCondition(v1alpha1.IngressConditionLoadBalancerReady).GetReason() // Force reconcile all Kourier ingresses that are either not reconciled yet // (and thus might end up in a conflict) or already in conflict. return isKourierIngress(obj) && (lbReady == "" || lbReady == conflictReason) }, ingressInformer.Informer()) } envoyXdsServer := envoy.NewXdsServer( managementPort, &xds.CallbackFuncs{ StreamRequestFunc: func(_ int64, req *v3.DiscoveryRequest) error { if req.ErrorDetail == nil { return nil } logger.Warnf("Error pushing snapshot to gateway: code: %v message %s", req.ErrorDetail.Code, req.ErrorDetail.Message) // We know we can handle this error without a global resync. if strings.HasPrefix(req.ErrorDetail.Message, unknownWeightedClusterPrefix) { // The error message contains the service name as referenced by the ingress. svc := strings.TrimPrefix(strings.TrimSuffix(req.ErrorDetail.Message, "'"), unknownWeightedClusterPrefix) ns, name, err := cache.SplitMetaNamespaceKey(svc) if err != nil { logger.Errorw("Failed to parse service name from error", zap.Error(err)) return nil } logger.Infof("Triggering reconcile for all ingresses referencing %q", svc) impl.Tracker.OnChanged(&corev1.Service{ TypeMeta: metav1.TypeMeta{ Kind: "Service", APIVersion: "v1", }, ObjectMeta: metav1.ObjectMeta{ Namespace: ns, Name: name, }, }) return nil } // Fallback to a global resync of non-ready ingresses for every other error. impl.FilteredGlobalResync(func(obj interface{}) bool { return isKourierIngress(obj) && !obj.(*v1alpha1.Ingress).IsReady() }, ingressInformer.Informer()) return nil }, }, ) r.xdsServer = envoyXdsServer statusProber := status.NewProber( logger.Named("status-manager"), NewProbeTargetLister(logger, endpointsInformer.Lister()), func(ing *v1alpha1.Ingress) { logger.Debugf("Ready callback triggered for ingress: %s/%s", ing.Namespace, ing.Name) impl.EnqueueKey(types.NamespacedName{Namespace: ing.Namespace, Name: ing.Name}) }) r.statusManager = statusProber statusProber.Start(ctx.Done()) r.caches.SetOnEvicted(func(key types.NamespacedName, value interface{}) { logger.Debug("Evicted", key.String()) // We enqueue the ingress name and namespace as if it was a new event, to force // a config refresh. impl.EnqueueKey(key) }) ingressTranslator := generator.NewIngressTranslator( func(ns, name string) (*corev1.Secret, error) { return secretInformer.Lister().Secrets(ns).Get(name) }, func(ns, name string) (*corev1.Endpoints, error) { return endpointsInformer.Lister().Endpoints(ns).Get(name) }, func(ns, name string) (*corev1.Service, error) { return serviceInformer.Lister().Services(ns).Get(name) }, impl.Tracker) r.ingressTranslator = &ingressTranslator // Initialize the Envoy snapshot. snapshot, err := r.caches.ToEnvoySnapshot(ctx) if err != nil { logger.Fatalw("Failed to create snapshot", zap.Error(err)) } err = r.xdsServer.SetSnapshot(nodeID, snapshot) if err != nil { logger.Fatalw("Failed to set snapshot", zap.Error(err)) } // Get the current list of ingresses that are ready and seed the Envoy config with them. ingressesToSync, err := getReadyIngresses(ctx, knativeClient.NetworkingV1alpha1()) if err != nil { logger.Fatalw("Failed to fetch ready ingresses", zap.Error(err)) } logger.Infof("Priming the config with %d ingresses", len(ingressesToSync)) // The startup translator uses clients instead of listeners to correctly list all // resources at startup. startupTranslator := generator.NewIngressTranslator( func(ns, name string) (*corev1.Secret, error) { return kubernetesClient.CoreV1().Secrets(ns).Get(ctx, name, metav1.GetOptions{}) }, func(ns, name string) (*corev1.Endpoints, error) { return kubernetesClient.CoreV1().Endpoints(ns).Get(ctx, name, metav1.GetOptions{}) }, func(ns, name string) (*corev1.Service, error) { return kubernetesClient.CoreV1().Services(ns).Get(ctx, name, metav1.GetOptions{}) }, impl.Tracker) for _, ingress := range ingressesToSync { if err := generator.UpdateInfoForIngress( ctx, caches, ingress, &startupTranslator, config.ExternalAuthz.Enabled); err != nil { logger.Fatalw("Failed prewarm ingress", zap.Error(err)) } } // Update the entire batch of ready ingresses at once. if err := r.updateEnvoyConfig(ctx); err != nil { logger.Fatalw("Failed to set initial envoy config", zap.Error(err)) } // Let's start the management server **after** the configuration has been seeded. go func() { logger.Info("Starting Management Server on Port ", managementPort) if err := envoyXdsServer.RunManagementServer(); err != nil { logger.Fatalw("Failed to serve XDS Server", zap.Error(err)) } }() // Ingresses need to be filtered by ingress class, so Kourier does not // react to nor modify ingresses created by other gateways. ingressInformer.Informer().AddEventHandler(cache.FilteringResourceEventHandler{ FilterFunc: isKourierIngress, Handler: controller.HandleAll(impl.Enqueue), }) // Make sure trackers are deleted once the observers are removed. // Also reconcile all ingresses in conflict once another ingress is removed to // unwedge them. ingressInformer.Informer().AddEventHandler(cache.FilteringResourceEventHandler{ FilterFunc: isKourierIngress, Handler: cache.ResourceEventHandlerFuncs{ DeleteFunc: impl.Tracker.OnDeletedObserver, }, }) serviceInformer.Informer().AddEventHandler(controller.HandleAll( controller.EnsureTypeMeta( impl.Tracker.OnChanged, corev1.SchemeGroupVersion.WithKind("Services"), ), )) viaTracker := controller.EnsureTypeMeta( impl.Tracker.OnChanged, corev1.SchemeGroupVersion.WithKind("Endpoints")) endpointsInformer.Informer().AddEventHandler(cache.ResourceEventHandlerFuncs{ AddFunc: viaTracker, DeleteFunc: viaTracker, UpdateFunc: func(old interface{}, new interface{}) { before := readyAddresses(old.(*corev1.Endpoints)) after := readyAddresses(new.(*corev1.Endpoints)) // If the ready addresses have not changed, there is no reason for us to // reconcile this endpoint, so why bother? if before.Equal(after) { return } viaTracker(new) }, }) secretInformer.Informer().AddEventHandler(controller.HandleAll( controller.EnsureTypeMeta( impl.Tracker.OnChanged, corev1.SchemeGroupVersion.WithKind("Secrets"), ), )) podInformer.Informer().AddEventHandler(cache.FilteringResourceEventHandler{ FilterFunc: reconciler.LabelFilterFunc(gatewayLabelKey, gatewayLabelValue, false), Handler: cache.ResourceEventHandlerFuncs{ // Cancel probing when a Pod is deleted DeleteFunc: func(obj interface{}) { pod, ok := obj.(*corev1.Pod) if ok { statusProber.CancelPodProbing(pod) } }, }, }) return impl } func
(ctx context.Context, knativeClient networkingClientSet.NetworkingV1alpha1Interface) ([]*v1alpha1.Ingress, error) { ingresses, err := knativeClient.Ingresses("").List(ctx, metav1.ListOptions{}) if err != nil { return nil, err } ingressesToWarm := make([]*v1alpha1.Ingress, 0, len(ingresses.Items)) for i := range ingresses.Items { ingress := &ingresses.Items[i] if isKourierIngress(ingress) && ingress.GetDeletionTimestamp() == nil && // Ignore ingresses that are already marked for deletion. ingress.GetStatus().GetCondition(v1alpha1.IngressConditionNetworkConfigured).IsTrue() { ingressesToWarm = append(ingressesToWarm, ingress) } } return ingressesToWarm, nil } func readyAddresses(eps *corev1.Endpoints) sets.String { var count int for _, subset := range eps.Subsets { count += len(subset.Addresses) } if count == 0 { return nil } ready := make(sets.String, count) for _, subset := range eps.Subsets { for _, address := range subset.Addresses { ready.Insert(address.IP) } } return ready }
getReadyIngresses
lib.rs
//! This module contains a client implementation of the //! [Firmata Protocol](https://github.com/firmata/protocol) use std::collections::HashMap; use std::io; use std::iter::Iterator; use std::io::{Error, ErrorKind, Result, Write}; use std::str; use std::thread; use std::time::{Duration, Instant}; pub const ENCODER_DATA: u8 = 0x61; pub const ANALOG_MAPPING_QUERY: u8 = 0x69; pub const ANALOG_MAPPING_RESPONSE: u8 = 0x6A; pub const CAPABILITY_QUERY: u8 = 0x6B; pub const CAPABILITY_RESPONSE: u8 = 0x6C; pub const PIN_STATE_QUERY: u8 = 0x6D; pub const PIN_STATE_RESPONSE: u8 = 0x6E; pub const EXTENDED_ANALOG: u8 = 0x6F; pub const SERVO_CONFIG: u8 = 0x70; pub const STRING_DATA: u8 = 0x71; pub const STEPPER_DATA: u8 = 0x72; pub const ONEWIRE_DATA: u8 = 0x73; pub const SHIFT_DATA: u8 = 0x75; pub const I2C_REQUEST: u8 = 0x76; pub const I2C_REPLY: u8 = 0x77; pub const I2C_CONFIG: u8 = 0x78; pub const I2C_MODE_WRITE: u8 = 0x00; pub const I2C_MODE_READ: u8 = 0x01; pub const REPORT_FIRMWARE: u8 = 0x79; pub const PROTOCOL_VERSION: u8 = 0xF9; pub const SAMPLEING_INTERVAL: u8 = 0x7A; pub const SCHEDULER_DATA: u8 = 0x7B; pub const SYSEX_NON_REALTIME: u8 = 0x7E; pub const SYSEX_REALTIME: u8 = 0x7F; pub const START_SYSEX: u8 = 0xF0; pub const END_SYSEX: u8 = 0xF7; pub const PIN_MODE: u8 = 0xF4; pub const REPORT_DIGITAL: u8 = 0xD0; pub const REPORT_ANALOG: u8 = 0xC0; pub const DIGITAL_MESSAGE: u8 = 0x90; pub const ANALOG_MESSAGE: u8 = 0xE0; pub const INPUT: u8 = 0; pub const OUTPUT: u8 = 1; pub const ANALOG: u8 = 2; pub const PWM: u8 = 3; pub const SERVO: u8 = 4; pub const I2C: u8 = 6; pub const ONEWIRE: u8 = 7; pub const STEPPER: u8 = 8; pub const ENCODER: u8 = 9; pub const CC_EVENT: u8 = 0x03; pub const CC_JOYSTICK_EVENT: u8 = 0x04; pub const CC_BUTTON_EVENT: u8 = 0x05; pub const CC_GET: u8 = 0x00; pub const CC_SET: u8 = 0x01; pub const CC_RESPONSE: u8 = 0x02; pub const CC_BUTTON_UP: u8 = 6; pub const CC_BUTTON_DOWN: u8 = 7; pub const CC_BUTTON_LEFT: u8 = 8; pub const CC_BUTTON_RIGHT: u8 = 9; pub const CC_BUTTON_JOYSTICK: u8 = 13; pub const HID_ENABLED: u8 = 100; pub const HID_SETTING_JS_SENSITIVITY: u8 = 101; pub const HID_SETTING_JS_INVERTED: u8 = 102; pub const CC_DATA_STREAMING_ENABLED: u8 = 103; fn read<T: io::Read>(port: &mut T, len: i32) -> Result<(Vec<u8>)> { let mut vec: Vec<u8> = vec![]; let mut len = len; loop { let buf: &mut [u8; 1] = &mut [0u8]; match port.read(buf) { Ok(_) => { vec.push(buf[0]); len = len - 1; if len == 0 { break; } } Err(e) => { if e.kind() == ErrorKind::TimedOut { thread::sleep(Duration::from_millis(1)); continue; } } } } return Ok(vec); } fn read_once<T: io::Read>(port: &mut T, len: i32) -> Result<(Vec<u8>)> { let mut vec: Vec<u8> = vec![]; let mut len = len; loop { let buf: &mut [u8; 1] = &mut [0u8]; match port.read(buf) { Ok(_) => { vec.push(buf[0]); len = len - 1; if len == 0 { break; } } Err(_) => return Err(Error::new(ErrorKind::Other, "")) } } Ok(vec) } /// A structure representing an I2C reply. #[derive(Debug)] pub struct I2CReply { pub address: i32, pub register: i32, pub data: Vec<u8>, } /// A structure representing an available pin mode. #[derive(Debug, Clone)] pub struct Mode { pub mode: u8, pub resolution: u8, } /// A structure representing the current state and configuration of a pin. #[derive(Debug, Clone)] pub struct Pin { pub modes: Vec<Mode>, pub analog: bool, pub value: i32, pub mode: u8, } /// A structure representing the current state and configuration of CC device. #[derive(Debug)] pub struct CCSettings { pub config_map: HashMap<u8, u8>, } impl CCSettings { fn new() -> Self { CCSettings { config_map: HashMap::new(), } } fn set(&mut self, config: u8, value: u8) { self.config_map.insert(config, value); } fn get(&self, config: &u8) -> Option<u8> { return self.config_map.get(&config).cloned(); } pub fn get_char(&self, config: &u8) -> Option<char>
pub fn get_bool(&self, config: &u8) -> Option<bool> { match self.get(config) { None => None, Some(val) => Some(val != 0), } } pub fn enabled(&self, config: &u8) -> Option<bool> { return self.get_bool(config); } } /// A trait for implementing firmata boards. pub trait Firmata { /// This function returns the raw I2C replies that have been read from /// the board. fn i2c_data(&mut self) -> &mut Vec<I2CReply>; /// This function returns the pins that the board has access to. fn pins(&mut self) -> &Vec<Pin>; /// This function returns the current firmata protocol version. fn protocol_version(&mut self) -> &String; /// This function returns the firmware name. fn firmware_name(&mut self) -> &String; /// This function returns the firmware version. fn firmware_version(&mut self) -> &String; /// This function queries the board for available analog pins. fn query_analog_mapping(&mut self) -> Result<()>; /// This function queries the board for all available capabilities. fn query_capabilities(&mut self) -> Result<()>; /// This function queries the board for current firmware and protocol /// information. fn query_firmware(&mut self) -> Result<()>; /// This function configures the `delay` in microseconds for I2C devices /// that require a delay between when the register is written to and the /// data in that register can be read. fn i2c_config(&mut self, delay: i32) -> Result<()>; /// This function reads `size` bytes from I2C device at the specified /// `address`. fn i2c_read(&mut self, address: i32, size: i32) -> Result<()>; /// This function writes `data` to the I2C device at /// the specified `address`. fn i2c_write(&mut self, address: i32, data: &[u8]) -> Result<()>; /// This function sets the digital reporting `state` /// of the specified `pin`. fn report_digital(&mut self, pin: i32, state: i32) -> Result<()>; /// This function sets the analog reporting `state` of the specified `pin`. fn report_analog(&mut self, pin: i32, state: i32) -> Result<()>; /// This function writes `level` to the analog `pin`. fn analog_write(&mut self, pin: i32, level: i32) -> Result<()>; /// This function writes `level` to the digital `pin`. fn digital_write(&mut self, pin: i32, level: i32) -> Result<()>; /// This function sets the `mode` of the specified `pin`. fn set_pin_mode(&mut self, pin: i32, mode: u8) -> Result<()>; /// This function reads from the firmata device and parses one firmata /// message. fn read_and_decode(&mut self) -> Result<()>; // This function reads from firmata device and waits for an specific message. fn read_and_decode_message(&mut self, message_id: u8, timeout: isize) -> Result<Vec<u8>>; // Reading settings will fail sometimes (this has been tested) this function ensures // successful read of settings. fn read_setting_until_some(&mut self, setting: u8) -> Result<()>; // This function decodes a message head. fn decode(&mut self, buf: Vec<u8>) -> Result<Vec<u8>>; fn settings_get(&mut self, config: u8) -> Result<()>; fn settings_set(&mut self, config: u8, value: u8) -> Result<()>; } /// A structure representing a firmata board. pub struct Board<T: io::Read + io::Write> { pub connection: Box<T>, pub pins: Vec<Pin>, pub i2c_data: Vec<I2CReply>, pub protocol_version: String, pub firmware_name: String, pub firmware_version: String, pub cc_settings: CCSettings, } impl<T: io::Read + io::Write> Board<T> { /// Creates a new `Board` given an `io::Read+io::Write`. pub fn new(connection: Box<T>) -> Result<Board<T>> { let mut b = Board { connection: connection, firmware_name: String::new(), firmware_version: String::new(), protocol_version: String::new(), pins: vec![], i2c_data: vec![], cc_settings: CCSettings::new(), }; try!(b.query_firmware()); try!(b.read_and_decode()); try!(b.query_analog_mapping()); try!(b.read_and_decode()); try!(b.settings_get(HID_ENABLED)); try!(b.read_and_decode()); try!(b.settings_get(CC_DATA_STREAMING_ENABLED)); try!(b.read_and_decode()); b.read_setting_until_some(CC_BUTTON_UP); b.read_setting_until_some(CC_BUTTON_DOWN); b.read_setting_until_some(CC_BUTTON_LEFT); b.read_setting_until_some(CC_BUTTON_RIGHT); b.read_setting_until_some(CC_BUTTON_JOYSTICK); return Ok(b); } } impl<T: io::Read + io::Write> Firmata for Board<T> { fn settings_get(&mut self, config: u8) -> Result<()> { self.connection .write(&mut [START_SYSEX, CC_GET, config, END_SYSEX]) .map(|_| ()) } fn settings_set(&mut self, config: u8, value: u8) -> Result<()> { self.cc_settings.set(config, value); self.connection .write(&mut [START_SYSEX, CC_SET, config, value, END_SYSEX]) .map(|_| ()) } fn pins(&mut self) -> &Vec<Pin> { &self.pins } fn protocol_version(&mut self) -> &String { &self.protocol_version } fn firmware_name(&mut self) -> &String { &self.firmware_name } fn firmware_version(&mut self) -> &String { &self.firmware_version } fn i2c_data(&mut self) -> &mut Vec<I2CReply> { &mut self.i2c_data } fn query_analog_mapping(&mut self) -> Result<()> { self.connection .write(&mut [START_SYSEX, ANALOG_MAPPING_QUERY, END_SYSEX]) .map(|_| ()) } fn query_capabilities(&mut self) -> Result<()> { self.connection .write(&mut [START_SYSEX, CAPABILITY_QUERY, END_SYSEX]) .map(|_| ()) } fn query_firmware(&mut self) -> Result<()> { self.connection .write(&mut [START_SYSEX, REPORT_FIRMWARE, END_SYSEX]) .map(|_| ()) } fn i2c_config(&mut self, delay: i32) -> Result<()> { self.connection .write(&mut [ START_SYSEX, I2C_CONFIG, (delay & 0xFF) as u8, (delay >> 8 & 0xFF) as u8, END_SYSEX, ]).map(|_| ()) } fn i2c_read(&mut self, address: i32, size: i32) -> Result<()> { self.connection .write(&mut [ START_SYSEX, I2C_REQUEST, address as u8, (I2C_MODE_READ << 3), ((size as u8) & 0x7F), (((size) >> 7) & 0x7F) as u8, END_SYSEX, ]).map(|_| ()) } fn i2c_write(&mut self, address: i32, data: &[u8]) -> Result<()> { let mut buf = vec![]; buf.push(START_SYSEX); buf.push(I2C_REQUEST); buf.push(address as u8); buf.push(I2C_MODE_WRITE << 3); for i in data.iter() { buf.push(i & 0x7F); buf.push((((*i as i32) >> 7) & 0x7F) as u8); } buf.push(END_SYSEX); self.connection.write(&mut buf[..]).map(|_| ()) } fn report_digital(&mut self, pin: i32, state: i32) -> Result<()> { self.connection .write(&mut [REPORT_DIGITAL | pin as u8, state as u8]) .map(|_| ()) } fn report_analog(&mut self, pin: i32, state: i32) -> Result<()> { self.connection .write(&mut [REPORT_ANALOG | pin as u8, state as u8]) .map(|_| ()) } fn analog_write(&mut self, pin: i32, level: i32) -> Result<()> { self.pins[pin as usize].value = level; self.connection .write(&mut [ ANALOG_MESSAGE | pin as u8, (level & 0x7f) as u8, ((level >> 7) & 0x7f) as u8, ]).map(|_| ()) } fn digital_write(&mut self, pin: i32, level: i32) -> Result<()> { let port = (pin as f64 / 8f64).floor() as usize; let mut value = 0i32; let mut i = 0; self.pins[pin as usize].value = level; while i < 8 { if self.pins[8 * port + i].value != 0 { value = value | (1 << i) } i += 1; } self.connection .write(&mut [ DIGITAL_MESSAGE | port as u8, (value & 0x7f) as u8, ((value >> 7) & 0x7f) as u8, ]).map(|_| ()) } fn set_pin_mode(&mut self, pin: i32, mode: u8) -> Result<()> { self.pins[pin as usize].mode = mode; self.connection .write(&mut [PIN_MODE, pin as u8, mode as u8]) .map(|_| ()) } fn read_setting_until_some(&mut self, setting: u8) -> Result<()> { loop { try!(self.settings_get(setting)); try!(self.read_and_decode()); if self.cc_settings.get_char(&setting) != None { break; } } Ok(()) } fn read_and_decode(&mut self) -> Result<()> { // In original implementation read_and_decode has no timeout, keep like that. match self.read_and_decode_message(0, -1) { Ok(_) => Ok(()), Err(e) => Err(e) } } fn read_and_decode_message(&mut self, message_id: u8, timeout: isize) -> Result<Vec<u8>> { /* Logical extension of read_and_decode method, it accepts an expected identifier and reads serial port until that identifier is reached or after a given time passed. It also returns the read buffer. A message is expected to be of the form: |__| |__| |__| |__| .... |__| |__| ID TERMINATOR (SYSEX ONLY) |<-- HEAD -->| |<-- BODY -->| If expected == 0 it will read any command it gets. */ fn is_id<T: Iterator<Item=u8>>(i: u8, mut s: T) -> bool { s.any(|v: u8| v == i) } let mut is_identifier: bool; let start_time = Instant::now(); loop { if start_time.elapsed().subsec_millis() > timeout as u32 && timeout >= 0 { return Err(Error::new(ErrorKind::Other, "Timed Out")); } // Peek 1 byte to look for identifiers. match read_once(&mut self.connection, 1) { Ok(mut buf) => { is_identifier = is_id(buf[0], PROTOCOL_VERSION..=PROTOCOL_VERSION) || is_id(buf[0], START_SYSEX..=START_SYSEX) || is_id(buf[0], CC_EVENT..=CC_EVENT) || is_id(buf[0], ANALOG_MESSAGE..0xEF) || is_id(buf[0], DIGITAL_MESSAGE..0x9F); match is_identifier && (buf[0] == message_id || message_id == 0) { true => { // Get the rest of the header. buf.extend(&try!(read(&mut self.connection, 2))); return self.decode(buf); }, false => {} } }, Err(_) => continue, } } } fn decode(&mut self, mut buf: Vec<u8>) -> Result<Vec<u8>> { match buf[0] { PROTOCOL_VERSION => { self.protocol_version = format!("{:o}.{:o}", buf[1], buf[2]); Ok(buf) } ANALOG_MESSAGE...0xEF => { let value = (buf[1] as i32) | ((buf[2] as i32) << 7); let pin = ((buf[0] as i32) & 0x0F) + 14; if self.pins.len() as i32 > pin { self.pins[pin as usize].value = value; } Ok(buf) } DIGITAL_MESSAGE...0x9F => { let port = (buf[0] as i32) & 0x0F; let value = (buf[1] as i32) | ((buf[2] as i32) << 7); for i in 0..8 { let pin = (8 * port) + i; if self.pins.len() as i32 > pin { if self.pins[pin as usize].mode == INPUT { self.pins[pin as usize].value = (value >> (i & 0x07)) & 0x01; } } } Ok(buf) } CC_EVENT => { // Read the rest of the information. buf.extend(&try!(read(&mut self.connection, 2))); Ok(buf) } START_SYSEX => { loop { let message = try!(read(&mut *self.connection, 1)); buf.push(message[0]); if message[0] == END_SYSEX { break; } } match buf[1] { CC_RESPONSE => { self.cc_settings.set(buf[2], buf[3]); Ok(buf) } ANALOG_MAPPING_RESPONSE => { if self.pins.len() > 0 { let mut i = 2; while i < buf.len() - 1 { if buf[i] != 127u8 { self.pins[i - 2].analog = true; } i += 1; } } Ok(buf) } CAPABILITY_RESPONSE => { let mut pin = 0; let mut i = 2; self.pins = vec![]; self.pins.push(Pin { modes: vec![], analog: false, value: 0, mode: 0, }); while i < buf.len() - 1 { if buf[i] == 127u8 { pin += 1; i += 1; self.pins.push(Pin { modes: vec![], analog: false, value: 0, mode: 0, }); continue; } self.pins[pin].modes.push(Mode { mode: buf[i], resolution: buf[i + 1], }); i += 2; } Ok(buf) } REPORT_FIRMWARE => { self.firmware_version = format!("{:o}.{:o}", buf[2], buf[3]); self.firmware_name = str::from_utf8(&buf[4..buf.len() - 1]).unwrap().to_string(); Ok(buf) } I2C_REPLY => { let len = buf.len(); let mut reply = I2CReply { address: (buf[2] as i32) | ((buf[3] as i32) << 7), register: (buf[4] as i32) | ((buf[5] as i32) << 7), data: vec![buf[6] | buf[7] << 7], }; let mut i = 8; while i < len - 1 { if buf[i] == 0xF7 { break; } if i + 2 > len { break; } reply.data.push(buf[i] | buf[i + 1] << 7); i += 2; } self.i2c_data.push(reply); Ok(buf) } _ => Err(Error::new(ErrorKind::Other, "unknown sysex code")), } } _ => Err(Error::new(ErrorKind::Other, "bad byte")), } } }
{ match self.get(config) { None => None, Some(val) => Some(val as char), } }
actorSetPos.ts
import { BufferReader } from "../../buffer-reader"; import { ActorSetPos } from "../../definitions/ActorSetPos"; export function
(reader: BufferReader): ActorSetPos { return { r16: reader.nextUInt16(), waitForLoad: reader.nextUInt8(), unknown: reader.nextUInt8(), unknown2: reader.nextUInt32(), pos: reader.nextPosition3(), unknown3: reader.nextUInt32(), }; }
actorSetPos
fake_serviceplan.go
/* Copyright 2018 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package fake import ( servicecatalog "github.com/kubernetes-incubator/service-catalog/pkg/apis/servicecatalog" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" labels "k8s.io/apimachinery/pkg/labels" schema "k8s.io/apimachinery/pkg/runtime/schema" types "k8s.io/apimachinery/pkg/types" watch "k8s.io/apimachinery/pkg/watch" testing "k8s.io/client-go/testing" ) // FakeServicePlans implements ServicePlanInterface type FakeServicePlans struct { Fake *FakeServicecatalog ns string } var serviceplansResource = schema.GroupVersionResource{Group: "servicecatalog.k8s.io", Version: "", Resource: "serviceplans"} var serviceplansKind = schema.GroupVersionKind{Group: "servicecatalog.k8s.io", Version: "", Kind: "ServicePlan"} // Get takes name of the servicePlan, and returns the corresponding servicePlan object, and an error if there is any. func (c *FakeServicePlans) Get(name string, options v1.GetOptions) (result *servicecatalog.ServicePlan, err error) { obj, err := c.Fake. Invokes(testing.NewGetAction(serviceplansResource, c.ns, name), &servicecatalog.ServicePlan{}) if obj == nil { return nil, err } return obj.(*servicecatalog.ServicePlan), err } // List takes label and field selectors, and returns the list of ServicePlans that match those selectors. func (c *FakeServicePlans) List(opts v1.ListOptions) (result *servicecatalog.ServicePlanList, err error) { obj, err := c.Fake. Invokes(testing.NewListAction(serviceplansResource, serviceplansKind, c.ns, opts), &servicecatalog.ServicePlanList{}) if obj == nil { return nil, err } label, _, _ := testing.ExtractFromListOptions(opts) if label == nil { label = labels.Everything() }
if label.Matches(labels.Set(item.Labels)) { list.Items = append(list.Items, item) } } return list, err } // Watch returns a watch.Interface that watches the requested servicePlans. func (c *FakeServicePlans) Watch(opts v1.ListOptions) (watch.Interface, error) { return c.Fake. InvokesWatch(testing.NewWatchAction(serviceplansResource, c.ns, opts)) } // Create takes the representation of a servicePlan and creates it. Returns the server's representation of the servicePlan, and an error, if there is any. func (c *FakeServicePlans) Create(servicePlan *servicecatalog.ServicePlan) (result *servicecatalog.ServicePlan, err error) { obj, err := c.Fake. Invokes(testing.NewCreateAction(serviceplansResource, c.ns, servicePlan), &servicecatalog.ServicePlan{}) if obj == nil { return nil, err } return obj.(*servicecatalog.ServicePlan), err } // Update takes the representation of a servicePlan and updates it. Returns the server's representation of the servicePlan, and an error, if there is any. func (c *FakeServicePlans) Update(servicePlan *servicecatalog.ServicePlan) (result *servicecatalog.ServicePlan, err error) { obj, err := c.Fake. Invokes(testing.NewUpdateAction(serviceplansResource, c.ns, servicePlan), &servicecatalog.ServicePlan{}) if obj == nil { return nil, err } return obj.(*servicecatalog.ServicePlan), err } // UpdateStatus was generated because the type contains a Status member. // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). func (c *FakeServicePlans) UpdateStatus(servicePlan *servicecatalog.ServicePlan) (*servicecatalog.ServicePlan, error) { obj, err := c.Fake. Invokes(testing.NewUpdateSubresourceAction(serviceplansResource, "status", c.ns, servicePlan), &servicecatalog.ServicePlan{}) if obj == nil { return nil, err } return obj.(*servicecatalog.ServicePlan), err } // Delete takes name of the servicePlan and deletes it. Returns an error if one occurs. func (c *FakeServicePlans) Delete(name string, options *v1.DeleteOptions) error { _, err := c.Fake. Invokes(testing.NewDeleteAction(serviceplansResource, c.ns, name), &servicecatalog.ServicePlan{}) return err } // DeleteCollection deletes a collection of objects. func (c *FakeServicePlans) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error { action := testing.NewDeleteCollectionAction(serviceplansResource, c.ns, listOptions) _, err := c.Fake.Invokes(action, &servicecatalog.ServicePlanList{}) return err } // Patch applies the patch and returns the patched servicePlan. func (c *FakeServicePlans) Patch(name string, pt types.PatchType, data []byte, subresources ...string) (result *servicecatalog.ServicePlan, err error) { obj, err := c.Fake. Invokes(testing.NewPatchSubresourceAction(serviceplansResource, c.ns, name, data, subresources...), &servicecatalog.ServicePlan{}) if obj == nil { return nil, err } return obj.(*servicecatalog.ServicePlan), err }
list := &servicecatalog.ServicePlanList{} for _, item := range obj.(*servicecatalog.ServicePlanList).Items {
pulsar.py
# Originally by Wolfgang Pfaff # Modified by Adriaan Rol 9/2015 # Modified by Ants Remm 5/2017 # Modified by Michael Kerschbaum 5/2019 import os import shutil import ctypes import numpy as np import logging from qcodes.instrument.base import Instrument from qcodes.instrument.parameter import ( ManualParameter, InstrumentRefParameter) import qcodes.utils.validators as vals import time from pycqed.instrument_drivers.virtual_instruments.virtual_awg5014 import \ VirtualAWG5014 from pycqed.instrument_drivers.virtual_instruments.virtual_AWG8 import \ VirtualAWG8 # exception catching removed because it does not work in python versions before # 3.6 try: from qcodes.instrument_drivers.tektronix.AWG5014 import Tektronix_AWG5014 except Exception: Tektronix_AWG5014 = type(None) try: from pycqed.instrument_drivers.physical_instruments.ZurichInstruments.\ UHFQuantumController import UHFQC except Exception: UHFQC = type(None) try: from pycqed.instrument_drivers.physical_instruments.ZurichInstruments. \ ZI_HDAWG8 import ZI_HDAWG8 except Exception: ZI_HDAWG8 = type(None) log = logging.getLogger(__name__) from pycqed.instrument_drivers.physical_instruments.ZurichInstruments. \ dummy_UHFQC import dummy_UHFQC class UHFQCPulsar: """ Defines the Zurich Instruments UHFQC specific functionality for the Pulsar class """ _supportedAWGtypes = (UHFQC, dummy_UHFQC) _uhf_sequence_string_template = ( "const WINT_EN = 0x03ff0000;\n" "const WINT_TRIG = 0x00000010;\n" "const IAVG_TRIG = 0x00000020;\n" "var RO_TRIG;\n" "if (getUserReg(1)) {{\n" " RO_TRIG = WINT_EN + IAVG_TRIG;\n" "}} else {{\n" " RO_TRIG = WINT_EN + WINT_TRIG;\n" "}}\n" "setTrigger(WINT_EN);\n" "\n" "{wave_definitions}\n" "\n" "var loop_cnt = getUserReg(0);\n" "\n" "repeat (loop_cnt) {{\n" " {playback_string}\n" "}}\n" ) def _create_awg_parameters(self, awg, channel_name_map): if not isinstance(awg, UHFQCPulsar._supportedAWGtypes): return super()._create_awg_parameters(awg, channel_name_map) name = awg.name self.add_parameter('{}_reuse_waveforms'.format(awg.name), initial_value=True, vals=vals.Bool(), parameter_class=ManualParameter) self.add_parameter('{}_minimize_sequencer_memory'.format(awg.name), initial_value=True, vals=vals.Bool(), parameter_class=ManualParameter, docstring="Minimizes the sequencer " "memory by repeating specific sequence " "patterns (eg. readout) passed in " "'repeat dictionary'") self.add_parameter('{}_enforce_single_element'.format(awg.name), initial_value=False, vals=vals.Bool(), parameter_class=ManualParameter, docstring="Group all the pulses on this AWG into " "a single element. Useful for making sure " "that the master AWG has only one waveform" " per segment.") self.add_parameter('{}_granularity'.format(awg.name), get_cmd=lambda: 16) self.add_parameter('{}_element_start_granularity'.format(awg.name), initial_value=8/(1.8e9), parameter_class=ManualParameter) self.add_parameter('{}_min_length'.format(awg.name), get_cmd=lambda: 16 /(1.8e9)) self.add_parameter('{}_inter_element_deadtime'.format(awg.name), # get_cmd=lambda: 80 / 2.4e9) get_cmd=lambda: 8 / (1.8e9)) # get_cmd=lambda: 0 / 2.4e9) self.add_parameter('{}_precompile'.format(awg.name), initial_value=False, vals=vals.Bool(), label='{} precompile segments'.format(awg.name), parameter_class=ManualParameter) self.add_parameter('{}_delay'.format(awg.name), initial_value=0, label='{} delay'.format(name), unit='s', parameter_class=ManualParameter, docstring='Global delay applied to this ' 'channel. Positive values move pulses' ' on this channel forward in time') self.add_parameter('{}_trigger_channels'.format(awg.name), initial_value=[], label='{} trigger channel'.format(awg.name), parameter_class=ManualParameter) self.add_parameter('{}_active'.format(awg.name), initial_value=True, label='{} active'.format(awg.name), vals=vals.Bool(), parameter_class=ManualParameter) self.add_parameter('{}_compensation_pulse_min_length'.format(name), initial_value=0, unit='s', parameter_class=ManualParameter) self.add_parameter('{}_trigger_source'.format(awg.name), initial_value='Dig1', vals=vals.Enum('Dig1', 'Dig2', 'DIO'), parameter_class=ManualParameter, docstring='Defines for which trigger source \ the AWG should wait, before playing \ the next waveform. Allowed values \ are: "Dig1", "Dig2", "DIO"') for ch_nr in range(2): id = 'ch{}'.format(ch_nr + 1) name = channel_name_map.get(id, awg.name + '_' + id) self._uhfqc_create_channel_parameters(id, name, awg) self.channels.add(name) def _uhfqc_create_channel_parameters(self, id, name, awg): self.add_parameter('{}_id'.format(name), get_cmd=lambda _=id: _) self.add_parameter('{}_awg'.format(name), get_cmd=lambda _=awg.name: _) self.add_parameter('{}_type'.format(name), get_cmd=lambda: 'analog') self.add_parameter('{}_amp'.format(name), label='{} amplitude'.format(name), unit='V', set_cmd=self._uhfqc_setter(awg, id, 'amp'), get_cmd=self._uhfqc_getter(awg, id, 'amp'), vals=vals.Numbers(0.075, 1.5), initial_value=0.75) self.add_parameter('{}_offset'.format(name), label='{} offset'.format(name), unit='V', set_cmd=self._uhfqc_setter(awg, id, 'offset'), get_cmd=self._uhfqc_getter(awg, id, 'offset'), vals=vals.Numbers(-1.5, 1.5), initial_value=0) self.add_parameter('{}_distortion'.format(name), label='{} distortion mode'.format(name), initial_value='off', vals=vals.Enum('off', 'precalculate'), parameter_class=ManualParameter) self.add_parameter('{}_distortion_dict'.format(name), label='{} distortion dictionary'.format(name), vals=vals.Dict(), parameter_class=ManualParameter) self.add_parameter('{}_charge_buildup_compensation'.format(name), parameter_class=ManualParameter, vals=vals.Bool(), initial_value=False) self.add_parameter('{}_compensation_pulse_scale'.format(name), parameter_class=ManualParameter, vals=vals.Numbers(0., 1.), initial_value=0.5) self.add_parameter('{}_compensation_pulse_delay'.format(name), initial_value=0, unit='s', parameter_class=ManualParameter) self.add_parameter('{}_compensation_pulse_gaussian_filter_sigma'.format(name), initial_value=0, unit='s', parameter_class=ManualParameter) @staticmethod def _uhfqc_setter(obj, id, par): if par == 'offset': def s(val): obj.set('sigouts_{}_offset'.format(int(id[2])-1), val) elif par == 'amp': def s(val): obj.set('sigouts_{}_range'.format(int(id[2])-1), val) else: raise NotImplementedError('Unknown parameter {}'.format(par)) return s def _uhfqc_getter(self, obj, id, par): if par == 'offset': def g(): return obj.get('sigouts_{}_offset'.format(int(id[2])-1)) elif par == 'amp': def g(): if self._awgs_prequeried_state: return obj.parameters['sigouts_{}_range' \ .format(int(id[2])-1)].get_latest()/2 else: return obj.get('sigouts_{}_range' \ .format(int(id[2])-1))/2 else: raise NotImplementedError('Unknown parameter {}'.format(par)) return g def _program_awg(self, obj, awg_sequence, waveforms, repeat_pattern=None): if not isinstance(obj, UHFQCPulsar._supportedAWGtypes): return super()._program_awg(obj, awg_sequence, waveforms, repeat_pattern) if not self._zi_waves_cleared: _zi_clear_waves() self._zi_waves_cleared = True waves_to_upload = {h: waveforms[h] for codewords in awg_sequence.values() if codewords is not None for cw, chids in codewords.items() if cw != 'metadata' for h in chids.values()} self._zi_write_waves(waves_to_upload) defined_waves = set() wave_definitions = [] playback_strings = [] ch_has_waveforms = {'ch1': False, 'ch2': False} current_segment = 'no_segment' def play_element(element, playback_strings, wave_definitions): if awg_sequence[element] is None: current_segment = element playback_strings.append(f'// Segment {current_segment}') return playback_strings, wave_definitions playback_strings.append(f'// Element {element}') metadata = awg_sequence[element].pop('metadata', {}) if list(awg_sequence[element].keys()) != ['no_codeword']: raise NotImplementedError('UHFQC sequencer does currently\ not support codewords!') chid_to_hash = awg_sequence[element]['no_codeword'] wave = (chid_to_hash.get('ch1', None), None, chid_to_hash.get('ch2', None), None) wave_definitions += self._zi_wave_definition(wave, defined_waves) acq = metadata.get('acq', False) playback_strings += self._zi_playback_string(name=obj.name, device='uhf', wave=wave, acq=acq) ch_has_waveforms['ch1'] |= wave[0] is not None ch_has_waveforms['ch2'] |= wave[2] is not None return playback_strings, wave_definitions if repeat_pattern is None: for element in awg_sequence: playback_strings, wave_definitions = play_element(element, playback_strings, wave_definitions) else: real_indicies = [] for index, element in enumerate(awg_sequence): if awg_sequence[element] is not None: real_indicies.append(index) el_total = len(real_indicies) def repeat_func(n, el_played, index, playback_strings, wave_definitions): if isinstance(n, tuple): el_played_list = [] if n[0] > 1: playback_strings.append('repeat ('+str(n[0])+') {') for t in n[1:]: el_cnt, playback_strings, wave_definitions = repeat_func(t, el_played, index + np.sum( el_played_list), playback_strings, wave_definitions) el_played_list.append(el_cnt) if n[0] > 1: playback_strings.append('}') return int(n[0] * np.sum(el_played_list)), playback_strings, wave_definitions else: for k in range(n): el_index = real_indicies[int(index)+k] element = list(awg_sequence.keys())[el_index] playback_strings, wave_definitions = play_element(element, playback_strings, wave_definitions) el_played = el_played + 1 return el_played, playback_strings, wave_definitions el_played, playback_strings, wave_definitions = repeat_func(repeat_pattern, 0, 0, playback_strings, wave_definitions) if int(el_played) != int(el_total): log.error(el_played, ' is not ', el_total) raise ValueError('Check number of sequences in repeat pattern') if not (ch_has_waveforms['ch1'] or ch_has_waveforms['ch2']): return self.awgs_with_waveforms(obj.name) awg_str = self._uhf_sequence_string_template.format( wave_definitions='\n'.join(wave_definitions), playback_string='\n '.join(playback_strings), ) # Necessary hack to pass the UHFQC drivers sanity check # in acquisition_initialize() obj._awg_program_features['loop_cnt'] = True obj._awg_program_features['avg_cnt'] = False # Hack needed to have obj._awg_needs_configuration[0] = False obj._awg_program[0] = True obj.configure_awg_from_string(awg_nr=0, program_string=awg_str, timeout=600) def _is_awg_running(self, obj): if not isinstance(obj, UHFQCPulsar._supportedAWGtypes): return super()._is_awg_running(obj) return obj.awgs_0_enable() != 0 def _clock(self, obj, cid=None): if not isinstance(obj, UHFQCPulsar._supportedAWGtypes): return super()._clock(obj) return obj.clock_freq() class HDAWG8Pulsar: """ Defines the Zurich Instruments HDAWG8 specific functionality for the Pulsar class """ _supportedAWGtypes = (ZI_HDAWG8, VirtualAWG8, ) _hdawg_sequence_string_template = ( "{wave_definitions}\n" "\n" "{codeword_table_defs}\n" "\n" "while (1) {{\n" " {playback_string}\n" "}}\n" ) def _create_awg_parameters(self, awg, channel_name_map): if not isinstance(awg, HDAWG8Pulsar._supportedAWGtypes): return super()._create_awg_parameters(awg, channel_name_map) name = awg.name self.add_parameter('{}_reuse_waveforms'.format(awg.name), initial_value=True, vals=vals.Bool(), parameter_class=ManualParameter) self.add_parameter('{}_minimize_sequencer_memory'.format(awg.name), initial_value=False, vals=vals.Bool(), parameter_class=ManualParameter, docstring="Minimizes the sequencer " "memory by repeating specific sequence " "patterns (eg. readout) passed in " "'repeat dictionary'") self.add_parameter('{}_enforce_single_element'.format(awg.name), initial_value=False, vals=vals.Bool(), parameter_class=ManualParameter, docstring="Group all the pulses on this AWG into " "a single element. Useful for making sure " "that the master AWG has only one waveform" " per segment.") self.add_parameter('{}_granularity'.format(awg.name), get_cmd=lambda: 16) self.add_parameter('{}_element_start_granularity'.format(awg.name), initial_value=8/(2.4e9), parameter_class=ManualParameter) self.add_parameter('{}_min_length'.format(awg.name), initial_value=16 /(2.4e9), parameter_class=ManualParameter) self.add_parameter('{}_inter_element_deadtime'.format(awg.name), # get_cmd=lambda: 80 / 2.4e9) get_cmd=lambda: 8 / (2.4e9)) # get_cmd=lambda: 0 / 2.4e9) self.add_parameter('{}_precompile'.format(awg.name), initial_value=False, vals=vals.Bool(), label='{} precompile segments'.format(awg.name), parameter_class=ManualParameter) self.add_parameter('{}_delay'.format(awg.name), initial_value=0, label='{} delay'.format(name), unit='s', parameter_class=ManualParameter, docstring='Global delay applied to this ' 'channel. Positive values move pulses' ' on this channel forward in time') self.add_parameter('{}_trigger_channels'.format(awg.name), initial_value=[], label='{} trigger channel'.format(awg.name), parameter_class=ManualParameter) self.add_parameter('{}_active'.format(awg.name), initial_value=True, label='{} active'.format(awg.name), vals=vals.Bool(), parameter_class=ManualParameter) self.add_parameter('{}_compensation_pulse_min_length'.format(name), initial_value=0, unit='s', parameter_class=ManualParameter) self.add_parameter('{}_trigger_source'.format(awg.name), initial_value='Dig1', vals=vals.Enum('Dig1', 'DIO', 'ZSync'), parameter_class=ManualParameter, docstring='Defines for which trigger source \ the AWG should wait, before playing \ the next waveform. Allowed values \ are: "Dig1", "DIO", "ZSync"') for ch_nr in range(8): id = 'ch{}'.format(ch_nr + 1) name = channel_name_map.get(id, awg.name + '_' + id) self._hdawg_create_analog_channel_parameters(id, name, awg) self.channels.add(name) id = 'ch{}m'.format(ch_nr + 1) name = channel_name_map.get(id, awg.name + '_' + id) self._hdawg_create_marker_channel_parameters(id, name, awg) self.channels.add(name) def _hdawg_create_analog_channel_parameters(self, id, name, awg): self.add_parameter('{}_id'.format(name), get_cmd=lambda _=id: _) self.add_parameter('{}_awg'.format(name), get_cmd=lambda _=awg.name: _) self.add_parameter('{}_type'.format(name), get_cmd=lambda: 'analog') self.add_parameter('{}_offset'.format(name), label='{} offset'.format(name), unit='V', set_cmd=self._hdawg_setter(awg, id, 'offset'), get_cmd=self._hdawg_getter(awg, id, 'offset'), vals=vals.Numbers()) self.add_parameter('{}_amp'.format(name), label='{} amplitude'.format(name), unit='V', set_cmd=self._hdawg_setter(awg, id, 'amp'), get_cmd=self._hdawg_getter(awg, id, 'amp'), vals=vals.Numbers(0.01, 5.0)) self.add_parameter('{}_distortion'.format(name), label='{} distortion mode'.format(name), initial_value='off', vals=vals.Enum('off', 'precalculate'), parameter_class=ManualParameter) self.add_parameter('{}_distortion_dict'.format(name), label='{} distortion dictionary'.format(name), vals=vals.Dict(), parameter_class=ManualParameter) self.add_parameter('{}_charge_buildup_compensation'.format(name), parameter_class=ManualParameter, vals=vals.Bool(), initial_value=False) self.add_parameter('{}_compensation_pulse_scale'.format(name), parameter_class=ManualParameter, vals=vals.Numbers(0., 1.), initial_value=0.5) self.add_parameter('{}_compensation_pulse_delay'.format(name), initial_value=0, unit='s', parameter_class=ManualParameter) self.add_parameter('{}_compensation_pulse_gaussian_filter_sigma'.format(name), initial_value=0, unit='s', parameter_class=ManualParameter) self.add_parameter('{}_internal_modulation'.format(name), initial_value=False, vals=vals.Bool(), parameter_class=ManualParameter) def _hdawg_create_marker_channel_parameters(self, id, name, awg): self.add_parameter('{}_id'.format(name), get_cmd=lambda _=id: _) self.add_parameter('{}_awg'.format(name), get_cmd=lambda _=awg.name: _) self.add_parameter('{}_type'.format(name), get_cmd=lambda: 'marker') self.add_parameter('{}_offset'.format(name), label='{} offset'.format(name), unit='V', set_cmd=self._hdawg_setter(awg, id, 'offset'), get_cmd=self._hdawg_getter(awg, id, 'offset'), vals=vals.Numbers()) self.add_parameter('{}_amp'.format(name), label='{} amplitude'.format(name), unit='V', set_cmd=self._hdawg_setter(awg, id, 'amp'), get_cmd=self._hdawg_getter(awg, id, 'amp'), vals=vals.Numbers(0.01, 5.0)) @staticmethod def _hdawg_setter(obj, id, par): if par == 'offset': if id[-1] != 'm': def s(val): obj.set('sigouts_{}_offset'.format(int(id[2])-1), val) else: s = None elif par == 'amp': if id[-1] != 'm': def s(val): obj.set('sigouts_{}_range'.format(int(id[2])-1), 2*val) else: s = None else: raise NotImplementedError('Unknown parameter {}'.format(par)) return s def _hdawg_getter(self, obj, id, par): if par == 'offset': if id[-1] != 'm': def g(): return obj.get('sigouts_{}_offset'.format(int(id[2])-1)) else: return lambda: 0 elif par == 'amp': if id[-1] != 'm': def g(): if self._awgs_prequeried_state: return obj.parameters['sigouts_{}_range' \ .format(int(id[2])-1)].get_latest()/2 else: return obj.get('sigouts_{}_range' \ .format(int(id[2])-1))/2 else: return lambda: 1 else: raise NotImplementedError('Unknown parameter {}'.format(par)) return g def get_divisor(self, chid, awg): ''' Divisor is 1 for non modulated channels and 2 for modulated non marker channels. ''' if chid[-1]=='m': return 1 name = self._id_channel(chid, awg) if self.get(f"{name}_internal_modulation"): return 2 else: return 1 def _program_awg(self, obj, awg_sequence, waveforms, repeat_pattern=None): if not isinstance(obj, HDAWG8Pulsar._supportedAWGtypes): return super()._program_awg(obj, awg_sequence, waveforms, repeat_pattern) if not self._zi_waves_cleared: _zi_clear_waves() self._zi_waves_cleared = True chids = [f'ch{i+1}{m}' for i in range(8) for m in ['','m']] divisor = {chid: self.get_divisor(chid, obj.name) for chid in chids} waves_to_upload = {h: divisor[chid]*waveforms[h][::divisor[chid]] for codewords in awg_sequence.values() if codewords is not None for cw, chids in codewords.items() if cw != 'metadata' for chid, h in chids.items()} self._zi_write_waves(waves_to_upload) ch_has_waveforms = {'ch{}{}'.format(i + 1, m): False for i in range(8) for m in ['','m']} for awg_nr in self._hdawg_active_awgs(obj): defined_waves = set() codeword_table = {} wave_definitions = [] codeword_table_defs = [] playback_strings = [] interleaves = [] prev_dio_valid_polarity = obj.get( 'awgs_{}_dio_valid_polarity'.format(awg_nr)) added_cw = set() ch1id = 'ch{}'.format(awg_nr * 2 + 1) ch1mid = 'ch{}m'.format(awg_nr * 2 + 1) ch2id = 'ch{}'.format(awg_nr * 2 + 2) ch2mid = 'ch{}m'.format(awg_nr * 2 + 2) chids = [ch1id, ch2id] channels = [self._id_channel(chid, obj.name) for chid in chids] codeword_el = set() if all([self.get( f'{chan}_internal_modulation') for chan in channels]): internal_mod = True elif not any([self.get( f'{chan}_internal_modulation') for chan in channels]): internal_mod = False else: raise NotImplementedError('Internal modulation can only be' 'specified per sub AWG!') counter = 1 current_segment = 'no_segment' for element in awg_sequence: if awg_sequence[element] is None: current_segment = element playback_strings.append(f'// Segment {current_segment}') continue playback_strings.append(f'// Element {element}') metadata = awg_sequence[element].pop('metadata', {}) nr_cw = len(set(awg_sequence[element].keys()) - \ {'no_codeword'}) if nr_cw == 1: log.warning( f'Only one codeword has been set for {element}') else: for cw in awg_sequence[element]: if cw == 'no_codeword': if nr_cw != 0: continue chid_to_hash = awg_sequence[element][cw] wave = tuple(chid_to_hash.get(ch, None) for ch in [ch1id, ch1mid, ch2id, ch2mid]) wave_definitions += self._zi_wave_definition(wave, defined_waves) if nr_cw != 0: w1, w2 = self._zi_waves_to_wavenames(wave) if cw not in codeword_table: codeword_table_defs += \ self._zi_codeword_table_entry(cw, wave) codeword_table[cw] = (w1, w2) elif codeword_table[cw] != (w1, w2) \ and self.reuse_waveforms(): log.warning('Same codeword used for different ' 'waveforms. Using first waveform. ' f'Ignoring element {element}.') ch_has_waveforms[ch1id] |= wave[0] is not None ch_has_waveforms[ch1mid] |= wave[1] is not None ch_has_waveforms[ch2id] |= wave[2] is not None ch_has_waveforms[ch2mid] |= wave[3] is not None if not internal_mod: playback_strings += self._zi_playback_string(name=obj.name, device='hdawg', wave=wave, codeword=(nr_cw != 0), append_zeros=self.append_zeros()) else: pb_string, interleave_string = \ self._zi_interleaved_playback_string(name=obj.name, device='hdawg', counter=counter, wave=wave, codeword=(nr_cw != 0)) counter += 1 playback_strings += pb_string interleaves += interleave_string if not any([ch_has_waveforms[ch] for ch in [ch1id, ch1mid, ch2id, ch2mid]]): continue awg_str = self._hdawg_sequence_string_template.format( wave_definitions='\n'.join(wave_definitions+interleaves), codeword_table_defs='\n'.join(codeword_table_defs), playback_string='\n '.join(playback_strings)) # Hack needed to pass the sanity check of the ZI_base_instrument # class in obj._awg_needs_configuration[awg_nr] = False obj._awg_program[awg_nr] = True obj.configure_awg_from_string(awg_nr, awg_str, timeout=600) obj.set('awgs_{}_dio_valid_polarity'.format(awg_nr), prev_dio_valid_polarity) for ch in range(8): obj.set('sigouts_{}_on'.format(ch), ch_has_waveforms[f'ch{ch+1}']) if any(ch_has_waveforms.values()): self.awgs_with_waveforms(obj.name) def _is_awg_running(self, obj): if not isinstance(obj, HDAWG8Pulsar._supportedAWGtypes): return super()._is_awg_running(obj) return any([obj.get('awgs_{}_enable'.format(awg_nr)) for awg_nr in self._hdawg_active_awgs(obj)]) def _clock(self, obj, cid): if not isinstance(obj, HDAWG8Pulsar._supportedAWGtypes): return super()._clock(obj, cid) return obj.clock_freq() def _hdawg_active_awgs(self, obj): return [0,1,2,3] class AWG5014Pulsar: """ Defines the Tektronix AWG5014 specific functionality for the Pulsar class """ _supportedAWGtypes = (Tektronix_AWG5014, VirtualAWG5014, ) def _create_awg_parameters(self, awg, channel_name_map): if not isinstance(awg, AWG5014Pulsar._supportedAWGtypes): return super()._create_awg_parameters(awg, channel_name_map) self.add_parameter('{}_reuse_waveforms'.format(awg.name), initial_value=True, vals=vals.Bool(), parameter_class=ManualParameter) self.add_parameter('{}_minimize_sequencer_memory'.format(awg.name), initial_value=False, vals=vals.Bool(), parameter_class=ManualParameter, docstring="Minimizes the sequencer " "memory by repeating specific sequence " "patterns (eg. readout) passed in " "'repeat dictionary'") self.add_parameter('{}_enforce_single_element'.format(awg.name), initial_value=False, vals=vals.Bool(), parameter_class=ManualParameter, docstring="Group all the pulses on this AWG into " "a single element. Useful for making sure " "that the master AWG has only one waveform" " per segment.") self.add_parameter('{}_granularity'.format(awg.name), get_cmd=lambda: 4) self.add_parameter('{}_element_start_granularity'.format(awg.name), initial_value=4/(1.2e9), parameter_class=ManualParameter) self.add_parameter('{}_min_length'.format(awg.name), get_cmd=lambda: 256/(1.2e9)) # Can not be triggered # faster than 210 ns. self.add_parameter('{}_inter_element_deadtime'.format(awg.name), get_cmd=lambda: 0) self.add_parameter('{}_precompile'.format(awg.name), initial_value=False, label='{} precompile segments'.format(awg.name), parameter_class=ManualParameter, vals=vals.Bool()) self.add_parameter('{}_delay'.format(awg.name), initial_value=0, label='{} delay'.format(awg.name), unit='s', parameter_class=ManualParameter, docstring="Global delay applied to this channel. " "Positive values move pulses on this " "channel forward in time") self.add_parameter('{}_trigger_channels'.format(awg.name), initial_value=[], label='{} trigger channels'.format(awg.name), parameter_class=ManualParameter) self.add_parameter('{}_active'.format(awg.name), initial_value=True, label='{} active'.format(awg.name), vals=vals.Bool(), parameter_class=ManualParameter) self.add_parameter('{}_compensation_pulse_min_length'.format(awg.name), initial_value=0, unit='s', parameter_class=ManualParameter) for ch_nr in range(4): id = 'ch{}'.format(ch_nr + 1) name = channel_name_map.get(id, awg.name + '_' + id) self._awg5014_create_analog_channel_parameters(id, name, awg) self.channels.add(name) id = 'ch{}m1'.format(ch_nr + 1) name = channel_name_map.get(id, awg.name + '_' + id) self._awg5014_create_marker_channel_parameters(id, name, awg) self.channels.add(name) id = 'ch{}m2'.format(ch_nr + 1) name = channel_name_map.get(id, awg.name + '_' + id) self._awg5014_create_marker_channel_parameters(id, name, awg) self.channels.add(name) def _awg5014_create_analog_channel_parameters(self, id, name, awg): self.add_parameter('{}_id'.format(name), get_cmd=lambda _=id: _) self.add_parameter('{}_awg'.format(name), get_cmd=lambda _=awg.name: _) self.add_parameter('{}_type'.format(name), get_cmd=lambda: 'analog') self.add_parameter('{}_offset_mode'.format(name), parameter_class=ManualParameter, vals=vals.Enum('software', 'hardware')) offset_mode_func = self.parameters['{}_offset_mode'.format(name)] self.add_parameter('{}_offset'.format(name), label='{} offset'.format(name), unit='V', set_cmd=self._awg5014_setter(awg, id, 'offset', offset_mode_func), get_cmd=self._awg5014_getter(awg, id, 'offset', offset_mode_func), vals=vals.Numbers()) self.add_parameter('{}_amp'.format(name), label='{} amplitude'.format(name), unit='V', set_cmd=self._awg5014_setter(awg, id, 'amp'), get_cmd=self._awg5014_getter(awg, id, 'amp'), vals=vals.Numbers(0.01, 2.25)) self.add_parameter('{}_distortion'.format(name), label='{} distortion mode'.format(name), initial_value='off', vals=vals.Enum('off', 'precalculate'), parameter_class=ManualParameter) self.add_parameter('{}_distortion_dict'.format(name), label='{} distortion dictionary'.format(name), vals=vals.Dict(), parameter_class=ManualParameter) self.add_parameter('{}_charge_buildup_compensation'.format(name), parameter_class=ManualParameter, vals=vals.Bool(), initial_value=False) self.add_parameter('{}_compensation_pulse_scale'.format(name), parameter_class=ManualParameter, vals=vals.Numbers(0., 1.), initial_value=0.5) self.add_parameter('{}_compensation_pulse_delay'.format(name), initial_value=0, unit='s', parameter_class=ManualParameter) self.add_parameter('{}_compensation_pulse_gaussian_filter_sigma'.format(name), initial_value=0, unit='s', parameter_class=ManualParameter) def _awg5014_create_marker_channel_parameters(self, id, name, awg): self.add_parameter('{}_id'.format(name), get_cmd=lambda _=id: _) self.add_parameter('{}_awg'.format(name), get_cmd=lambda _=awg.name: _) self.add_parameter('{}_type'.format(name), get_cmd=lambda: 'marker') self.add_parameter('{}_offset'.format(name), label='{} offset'.format(name), unit='V', set_cmd=self._awg5014_setter(awg, id, 'offset'), get_cmd=self._awg5014_getter(awg, id, 'offset'), vals=vals.Numbers(-2.7, 2.7)) self.add_parameter('{}_amp'.format(name), label='{} amplitude'.format(name), unit='V', set_cmd=self._awg5014_setter(awg, id, 'amp'), get_cmd=self._awg5014_getter(awg, id, 'amp'), vals=vals.Numbers(-5.4, 5.4)) @staticmethod def _awg5014_setter(obj, id, par, offset_mode_func=None): if id in ['ch1', 'ch2', 'ch3', 'ch4']: if par == 'offset': def s(val): if offset_mode_func() == 'software': obj.set('{}_offset'.format(id), val) elif offset_mode_func() == 'hardware': obj.set('{}_DC_out'.format(id), val) else: raise ValueError('Invalid offset mode for AWG5014: ' '{}'.format(offset_mode_func())) elif par == 'amp': def s(val): obj.set('{}_amp'.format(id), 2*val) else: raise NotImplementedError('Unknown parameter {}'.format(par)) else: id_raw = id[:3] + '_' + id[3:] # convert ch1m1 to ch1_m1 if par == 'offset': def s(val): h = obj.get('{}_high'.format(id_raw)) l = obj.get('{}_low'.format(id_raw)) obj.set('{}_high'.format(id_raw), val + h - l) obj.set('{}_low'.format(id_raw), val) elif par == 'amp': def s(val): l = obj.get('{}_low'.format(id_raw)) obj.set('{}_high'.format(id_raw), l + val) else: raise NotImplementedError('Unknown parameter {}'.format(par)) return s def _awg5014_getter(self, obj, id, par, offset_mode_func=None): if id in ['ch1', 'ch2', 'ch3', 'ch4']: if par == 'offset': def g(): if offset_mode_func() == 'software': return obj.get('{}_offset'.format(id)) elif offset_mode_func() == 'hardware': return obj.get('{}_DC_out'.format(id)) else: raise ValueError('Invalid offset mode for AWG5014: ' '{}'.format(offset_mode_func())) elif par == 'amp': def g(): if self._awgs_prequeried_state: return obj.parameters['{}_amp'.format(id)] \ .get_latest()/2 else: return obj.get('{}_amp'.format(id))/2 else: raise NotImplementedError('Unknown parameter {}'.format(par)) else: id_raw = id[:3] + '_' + id[3:] # convert ch1m1 to ch1_m1 if par == 'offset': def g(): return obj.get('{}_low'.format(id_raw)) elif par == 'amp': def g(): if self._awgs_prequeried_state: h = obj.get('{}_high'.format(id_raw)) l = obj.get('{}_low'.format(id_raw)) else: h = obj.parameters['{}_high'.format(id_raw)]\ .get_latest() l = obj.parameters['{}_low'.format(id_raw)]\ .get_latest() return h - l else: raise NotImplementedError('Unknown parameter {}'.format(par)) return g def _program_awg(self, obj, awg_sequence, waveforms, repeat_pattern=None): if not isinstance(obj, AWG5014Pulsar._supportedAWGtypes): return super()._program_awg(obj, awg_sequence, waveforms, repeat_pattern) pars = { 'ch{}_m{}_low'.format(ch + 1, m + 1) for ch in range(4) for m in range(2) } pars |= { 'ch{}_m{}_high'.format(ch + 1, m + 1) for ch in range(4) for m in range(2) } pars |= { 'ch{}_offset'.format(ch + 1) for ch in range(4) } old_vals = {} for par in pars: old_vals[par] = obj.get(par) packed_waveforms = {} wfname_l = [] grp_has_waveforms = {f'ch{i+1}': False for i in range(4)} for element in awg_sequence: if awg_sequence[element] is None: continue metadata = awg_sequence[element].pop('metadata', {}) if list(awg_sequence[element].keys()) != ['no_codeword']: raise NotImplementedError('AWG5014 sequencer does ' 'not support codewords!') chid_to_hash = awg_sequence[element]['no_codeword'] if not any(chid_to_hash): continue # no waveforms maxlen = max([len(waveforms[h]) for h in chid_to_hash.values()]) maxlen = max(maxlen, 256) wfname_l.append([]) for grp in [f'ch{i + 1}' for i in range(4)]: wave = (chid_to_hash.get(grp, None), chid_to_hash.get(grp + 'm1', None), chid_to_hash.get(grp + 'm2', None)) grp_has_waveforms[grp] |= (wave != (None, None, None)) wfname = self._hash_to_wavename((maxlen, wave)) grp_wfs = [np.pad(waveforms.get(h, [0]), (0, maxlen - len(waveforms.get(h, [0]))), 'constant', constant_values=0) for h in wave] packed_waveforms[wfname] = obj.pack_waveform(*grp_wfs) wfname_l[-1].append(wfname) if any([wf[0] != 0 for wf in grp_wfs]): log.warning(f'Element {element} starts with non-zero ' f'entry on {obj.name}.') if not any(grp_has_waveforms.values()): for grp in ['ch1', 'ch2', 'ch3', 'ch4']: obj.set('{}_state'.format(grp), grp_has_waveforms[grp]) return None self.awgs_with_waveforms(obj.name) nrep_l = [1] * len(wfname_l) goto_l = [0] * len(wfname_l) goto_l[-1] = 1 wait_l = [1] * len(wfname_l) logic_jump_l = [0] * len(wfname_l) filename = 'pycqed_pulsar.awg' awg_file = obj.generate_awg_file(packed_waveforms, np.array(wfname_l).transpose().copy(), nrep_l, wait_l, goto_l, logic_jump_l, self._awg5014_chan_cfg(obj.name)) obj.send_awg_file(filename, awg_file) obj.load_awg_file(filename) for par in pars: obj.set(par, old_vals[par]) time.sleep(.1) # Waits for AWG to be ready obj.is_awg_ready() for grp in ['ch1', 'ch2', 'ch3', 'ch4']: obj.set('{}_state'.format(grp), 1*grp_has_waveforms[grp]) hardware_offsets = 0 for grp in ['ch1', 'ch2', 'ch3', 'ch4']: cname = self._id_channel(grp, obj.name) offset_mode = self.get('{}_offset_mode'.format(cname)) if offset_mode == 'hardware': hardware_offsets = 1 obj.DC_output(hardware_offsets) return awg_file def _is_awg_running(self, obj): if not isinstance(obj, AWG5014Pulsar._supportedAWGtypes): return super()._is_awg_running(obj) return obj.get_state() != 'Idle' def _clock(self, obj, cid=None): if not isinstance(obj, AWG5014Pulsar._supportedAWGtypes): return super()._clock(obj, cid) return obj.clock_freq() @staticmethod def _awg5014_group_ids(cid): """ Returns all id-s corresponding to a single channel group. For example `Pulsar._awg5014_group_ids('ch2')` returns `['ch2', 'ch2m1', 'ch2m2']`. Args: cid: An id of one of the AWG5014 channels. Returns: A list of id-s corresponding to the same group as `cid`. """ return [cid[:3], cid[:3] + 'm1', cid[:3] + 'm2'] def _awg5014_chan_cfg(self, awg): channel_cfg = {} for channel in self.channels: if self.get('{}_awg'.format(channel)) != awg: continue cid = self.get('{}_id'.format(channel)) amp = self.get('{}_amp'.format(channel)) off = self.get('{}_offset'.format(channel)) if self.get('{}_type'.format(channel)) == 'analog': offset_mode = self.get('{}_offset_mode'.format(channel)) channel_cfg['ANALOG_METHOD_' + cid[2]] = 1 channel_cfg['ANALOG_AMPLITUDE_' + cid[2]] = amp * 2 if offset_mode == 'software': channel_cfg['ANALOG_OFFSET_' + cid[2]] = off channel_cfg['DC_OUTPUT_LEVEL_' + cid[2]] = 0 channel_cfg['EXTERNAL_ADD_' + cid[2]] = 0 else: channel_cfg['ANALOG_OFFSET_' + cid[2]] = 0 channel_cfg['DC_OUTPUT_LEVEL_' + cid[2]] = off channel_cfg['EXTERNAL_ADD_' + cid[2]] = 1 else: channel_cfg['MARKER1_METHOD_' + cid[2]] = 2 channel_cfg['MARKER2_METHOD_' + cid[2]] = 2 channel_cfg['MARKER{}_LOW_{}'.format(cid[-1], cid[2])] = \ off channel_cfg['MARKER{}_HIGH_{}'.format(cid[-1], cid[2])] = \ off + amp channel_cfg['CHANNEL_STATE_' + cid[2]] = 0 for channel in self.channels: if self.get('{}_awg'.format(channel)) != awg: continue if self.get('{}_active'.format(awg)): cid = self.get('{}_id'.format(channel)) channel_cfg['CHANNEL_STATE_' + cid[2]] = 1 return channel_cfg class Pulsar(AWG5014Pulsar, HDAWG8Pulsar, UHFQCPulsar, Instrument): """ A meta-instrument responsible for all communication with the AWGs. Contains information about all the available awg-channels in the setup. Starting, stopping and programming and changing the parameters of the AWGs should be done through Pulsar. Supports Tektronix AWG5014 and partially ZI UHFLI. Args: master_awg: Name of the AWG that triggers all the other AWG-s and should be started last (after other AWG-s are already waiting for a trigger. """ def __init__(self, name='Pulsar', master_awg=None): super().__init__(name) self.add_parameter('master_awg', parameter_class=InstrumentRefParameter, initial_value=master_awg) self.add_parameter('inter_element_spacing', vals=vals.MultiType(vals.Numbers(0), vals.Enum('auto')), set_cmd=self._set_inter_element_spacing, get_cmd=self._get_inter_element_spacing) self.add_parameter('reuse_waveforms', initial_value=False, parameter_class=ManualParameter, vals=vals.Bool()) self.add_parameter('append_zeros', initial_value=0, vals=vals.Ints(), parameter_class=ManualParameter) self.add_parameter('flux_crosstalk_cancellation', initial_value=False, parameter_class=ManualParameter, vals=vals.Bool()) self.add_parameter('flux_channels', initial_value=[], parameter_class=ManualParameter, vals=vals.Lists()) self.add_parameter('flux_crosstalk_cancellation_mtx', initial_value=None, parameter_class=ManualParameter) self.add_parameter('flux_crosstalk_cancellation_shift_mtx', initial_value=None, parameter_class=ManualParameter) self._inter_element_spacing = 'auto' self.channels = set() # channel names self.awgs = set() # AWG names self.last_sequence = None self.last_elements = None self._awgs_with_waveforms = set() self._awgs_prequeried_state = False self._zi_waves_cleared = False self._hash_to_wavename_table = {} self.num_seg = 0 Pulsar._instance = self @staticmethod def get_instance(): return Pulsar._instance # channel handling def define_awg_channels(self, awg, channel_name_map=None): """ The AWG object must be created before creating channels for that AWG Args: awg: AWG object to add to the pulsar. channel_name_map: A dictionary that maps channel ids to channel names. (default {}) """ if channel_name_map is None: channel_name_map = {} for channel_name in channel_name_map.values(): if channel_name in self.channels: raise KeyError("Channel named '{}' already defined".format( channel_name)) if awg.name in self.awgs: raise KeyError("AWG '{}' already added to pulsar".format(awg.name)) fail = None super()._create_awg_parameters(awg, channel_name_map) # try: # super()._create_awg_parameters(awg, channel_name_map) # except AttributeError as e: # fail = e # if fail is not None: # raise TypeError('Unsupported AWG instrument: {}. ' # .format(awg.name) + str(fail)) self.awgs.add(awg.name) def find_awg_channels(self, awg): channel_list = [] for channel in self.channels: if self.get('{}_awg'.format(channel)) == awg: channel_list.append(channel) return channel_list def AWG_obj(self, **kw): """ Return the AWG object corresponding to a channel or an AWG name. Args: awg: Name of the AWG Instrument. channel: Name of the channel Returns: An instance of Instrument class corresponding to the AWG requested. """ awg = kw.get('awg', None) chan = kw.get('channel', None) if awg is not None and chan is not None: raise ValueError('Both `awg` and `channel` arguments passed to ' 'Pulsar.AWG_obj()') elif awg is None and chan is not None: name = self.get('{}_awg'.format(chan)) elif awg is not None and chan is None: name = awg else: raise ValueError('Either `awg` or `channel` argument needs to be ' 'passed to Pulsar.AWG_obj()') return Instrument.find_instrument(name) def clock(self, channel=None, awg=None): """ Returns the clock rate of channel or AWG 'instrument_ref' Args: isntrument_ref: name of the channel or AWG Returns: clock rate in samples per second """ if channel is not None and awg is not None: raise ValueError('Both channel and awg arguments passed to ' 'Pulsar.clock()') if channel is None and awg is None: raise ValueError('Neither channel nor awg arguments passed to ' 'Pulsar.clock()') if channel is not None: awg = self.get('{}_awg'.format(channel)) if self._awgs_prequeried_state: return self._clocks[awg] else: fail = None obj = self.AWG_obj(awg=awg) try: return super()._clock(obj) except AttributeError as e: fail = e if fail is not None: raise TypeError('Unsupported AWG instrument: {} of type {}. ' .format(obj.name, type(obj)) + str(fail)) def active_awgs(self): """ Returns: A set of the names of the active AWGs registered Inactive AWGs don't get started or stopped. Also the waveforms on inactive AWGs don't get updated. """ return {awg for awg in self.awgs if self.get('{}_active'.format(awg))} def awgs_with_waveforms(self, awg=None): """ Adds an awg to the set of AWGs with waveforms programmed, or returns set of said AWGs. """ if awg == None: return self._awgs_with_waveforms else: self._awgs_with_waveforms.add(awg) def start(self, exclude=None): """ Start the active AWGs. If multiple AWGs are used in a setup where the slave AWGs are triggered by the master AWG, then the slave AWGs must be running and waiting for trigger when the master AWG is started to ensure synchronous playback. """ if exclude is None: exclude = [] # Start only the AWGs which have at least one channel programmed, i.e. # where at least one channel has state = 1. awgs_with_waveforms = self.awgs_with_waveforms() used_awgs = set(self.active_awgs()) & awgs_with_waveforms for awg in used_awgs: self._stop_awg(awg) if self.master_awg() is None: for awg in used_awgs: if awg not in exclude: self._start_awg(awg) else: if self.master_awg() not in exclude: self.master_awg.get_instr().stop() for awg in used_awgs: if awg != self.master_awg() and awg not in exclude: self._start_awg(awg) tstart = time.time() for awg in used_awgs: if awg == self.master_awg() or awg in exclude: continue good = False while not (good or time.time() > tstart + 10): if self._is_awg_running(awg): good = True else: time.sleep(0.1) if not good: raise Exception('AWG {} did not start in 10s' .format(awg)) if self.master_awg() not in exclude: self.master_awg.get_instr().start() def stop(self): """ Stop all active AWGs. """ awgs_with_waveforms = set(self.awgs_with_waveforms()) used_awgs = set(self.active_awgs()) & awgs_with_waveforms for awg in used_awgs: self._stop_awg(awg) def program_awgs(self, sequence, awgs='all'): # Stores the last uploaded sequence for easy access and plotting self.last_sequence = sequence if awgs == 'all': awgs = self.active_awgs() # initializes the set of AWGs with waveforms
# prequery all AWG clock values and AWG amplitudes self.AWGs_prequeried(True) log.info(f'Starting compilation of sequence {sequence.name}') t0 = time.time() waveforms, awg_sequences = sequence.generate_waveforms_sequences() log.info(f'Finished compilation of sequence {sequence.name} in ' f'{time.time() - t0}') channels_used = self._channels_in_awg_sequences(awg_sequences) repeat_dict = self._generate_awg_repeat_dict(sequence.repeat_patterns, channels_used) self._zi_waves_cleared = False self._hash_to_wavename_table = {} for awg in awgs: log.info(f'Started programming {awg}') t0 = time.time() if awg in repeat_dict.keys(): self._program_awg(self.AWG_obj(awg=awg), awg_sequences.get(awg, {}), waveforms, repeat_pattern=repeat_dict[awg]) else: self._program_awg(self.AWG_obj(awg=awg), awg_sequences.get(awg, {}), waveforms) log.info(f'Finished programming {awg} in {time.time() - t0}') self.num_seg = len(sequence.segments) self.AWGs_prequeried(False) def _program_awg(self, obj, awg_sequence, waveforms, repeat_pattern=None): """ Program the AWG with a sequence of segments. Args: obj: the instance of the AWG to program sequence: the `Sequence` object that determines the segment order, repetition and trigger wait el_wfs: A dictionary from element name to a dictionary from channel id to the waveform. loop: Boolean flag, whether the segments should be looped over. Default is `True`. """ # fail = None # try: # super()._program_awg(obj, awg_sequence, waveforms) # except AttributeError as e: # fail = e # if fail is not None: # raise TypeError('Unsupported AWG instrument: {} of type {}. ' # .format(obj.name, type(obj)) + str(fail)) if repeat_pattern is not None: super()._program_awg(obj, awg_sequence, waveforms, repeat_pattern=repeat_pattern) else: super()._program_awg(obj, awg_sequence, waveforms) def _hash_to_wavename(self, h): alphabet = 'abcdefghijklmnopqrstuvwxyz' if h not in self._hash_to_wavename_table: hash_int = abs(hash(h)) wname = ''.join(to_base(hash_int, len(alphabet), alphabet))[::-1] while wname in self._hash_to_wavename_table.values(): hash_int += 1 wname = ''.join(to_base(hash_int, len(alphabet), alphabet)) \ [::-1] self._hash_to_wavename_table[h] = wname return self._hash_to_wavename_table[h] def _zi_wave_definition(self, wave, defined_waves=None): if defined_waves is None: defined_waves = set() wave_definition = [] w1, w2 = self._zi_waves_to_wavenames(wave) for analog, marker, wc in [(wave[0], wave[1], w1), (wave[2], wave[3], w2)]: if analog is not None: wa = self._hash_to_wavename(analog) if wa not in defined_waves: wave_definition.append(f'wave {wa} = "{wa}";') defined_waves.add(wa) if marker is not None: wm = self._hash_to_wavename(marker) if wm not in defined_waves: wave_definition.append(f'wave {wm} = "{wm}";') defined_waves.add(wm) if analog is not None and marker is not None: if wc not in defined_waves: wave_definition.append(f'wave {wc} = {wa} + {wm};') defined_waves.add(wc) return wave_definition def _zi_playback_string(self, name, device, wave, acq=False, codeword=False, append_zeros=0): playback_string = [] w1, w2 = self._zi_waves_to_wavenames(wave) trig_source = self.get('{}_trigger_source'.format(name)) if trig_source == 'Dig1': playback_string.append( 'waitDigTrigger(1{});'.format(', 1' if device == 'uhf' else '')) elif trig_source == 'Dig2': playback_string.append('waitDigTrigger(2,1);') else: playback_string.append(f'wait{trig_source}Trigger();') if codeword and not (w1 is None and w2 is None): playback_string.append('playWaveDIO();') else: if w1 is None and w2 is not None: # This hack is needed due to a bug on the HDAWG. # Remove this if case once the bug is fixed. playback_string.append(f'playWave(marker(1,0)*0*{w2}, {w2});') elif w1 is not None and w2 is None: # This hack is needed due to a bug on the HDAWG. # Remove this if case once the bug is fixed. playback_string.append(f'playWave({w1}, marker(1,0)*0*{w1});') elif w1 is not None or w2 is not None: playback_string.append('playWave({});'.format( _zi_wavename_pair_to_argument(w1, w2))) if acq: playback_string.append('setTrigger(RO_TRIG);') playback_string.append('setTrigger(WINT_EN);') if append_zeros: playback_string.append(f'playZero({append_zeros});') return playback_string def _zi_interleaved_playback_string(self, name, device, counter, wave, acq=False, codeword=False): playback_string = [] w1, w2 = self._zi_waves_to_wavenames(wave) if w1 is None or w2 is None: raise ValueError('When using HDAWG modulation both I and Q need ' 'to be defined') wname = f'wave{counter}' interleaves = [f'wave {wname} = interleave({w1}, {w2});'] if not codeword: if not acq: playback_string.append(f'prefetch({wname},{wname});') trig_source = self.get('{}_trigger_source'.format(name)) if trig_source == 'Dig1': playback_string.append( 'waitDigTrigger(1{});'.format(', 1' if device == 'uhf' else '')) elif trig_source == 'Dig2': playback_string.append('waitDigTrigger(2,1);') else: playback_string.append(f'wait{trig_source}Trigger();') if codeword: # playback_string.append('playWaveDIO();') raise NotImplementedError('Modulation in combination with codeword' 'pulses has not yet been implemented!') else: playback_string.append(f'playWave({wname},{wname});') if acq: playback_string.append('setTrigger(RO_TRIG);') playback_string.append('setTrigger(WINT_EN);') return playback_string, interleaves def _zi_codeword_table_entry(self, codeword, wave): w1, w2 = self._zi_waves_to_wavenames(wave) if w1 is None and w2 is not None: # This hack is needed due to a bug on the HDAWG. # Remove this if case once the bug is fixed. return [f'setWaveDIO({codeword}, zeros(1) + marker(1, 0), {w2});'] elif not (w1 is None and w2 is None): return ['setWaveDIO({}, {});'.format(codeword, _zi_wavename_pair_to_argument(w1, w2))] else: return [] def _zi_waves_to_wavenames(self, wave): wavenames = [] for analog, marker in [(wave[0], wave[1]), (wave[2], wave[3])]: if analog is None and marker is None: wavenames.append(None) elif analog is None and marker is not None: wavenames.append(self._hash_to_wavename(marker)) elif analog is not None and marker is None: wavenames.append(self._hash_to_wavename(analog)) else: wavenames.append(self._hash_to_wavename((analog, marker))) return wavenames def _zi_write_waves(self, waveforms): wave_dir = _zi_wave_dir() for h, wf in waveforms.items(): filename = os.path.join(wave_dir, self._hash_to_wavename(h)+'.csv') fmt = '%.18e' if wf.dtype == np.float else '%d' np.savetxt(filename, wf, delimiter=",", fmt=fmt) def _start_awg(self, awg): obj = self.AWG_obj(awg=awg) obj.start() def _stop_awg(self, awg): obj = self.AWG_obj(awg=awg) obj.stop() def _is_awg_running(self, awg): fail = None obj = self.AWG_obj(awg=awg) try: return super()._is_awg_running(obj) except AttributeError as e: fail = e if fail is not None: raise TypeError('Unsupported AWG instrument: {} of type {}. ' .format(obj.name, type(obj)) + str(fail)) def _set_inter_element_spacing(self, val): self._inter_element_spacing = val def _get_inter_element_spacing(self): if self._inter_element_spacing != 'auto': return self._inter_element_spacing else: max_spacing = 0 for awg in self.awgs: max_spacing = max(max_spacing, self.get( '{}_inter_element_deadtime'.format(awg))) return max_spacing def AWGs_prequeried(self, status=None): if status is None: return self._awgs_prequeried_state elif status: self._awgs_prequeried_state = False self._clocks = {} for awg in self.awgs: self._clocks[awg] = self.clock(awg=awg) for c in self.channels: # prequery also the output amplitude values self.get(c + '_amp') self._awgs_prequeried_state = True else: self._awgs_prequeried_state = False def _id_channel(self, cid, awg): """ Returns the channel name corresponding to the channel with id `cid` on the AWG `awg`. Args: cid: An id of one of the channels. awg: The name of the AWG. Returns: The corresponding channel name. If the channel is not found, returns `None`. """ for cname in self.channels: if self.get('{}_awg'.format(cname)) == awg and \ self.get('{}_id'.format(cname)) == cid: return cname return None @staticmethod def _channels_in_awg_sequences(awg_sequences): """ identifies all channels used in the given awg keyed sequence :param awg_sequences (dict): awg sequences keyed by awg name, i.e. as returned by sequence.generate_sequence_waveforms() :return: dictionary keyed by awg of with all channel used during the sequence """ channels_used = dict() for awg in awg_sequences: channels_used[awg] = set() for segname in awg_sequences[awg]: if awg_sequences[awg][segname] is None: continue elements = awg_sequences[awg][segname] for cw in elements: if cw != "metadata": channels_used[awg] |= elements[cw].keys() return channels_used def _generate_awg_repeat_dict(self, repeat_dict_per_ch, channels_used): """ Translates a repeat dictionary keyed by channels to a repeat dictionary keyed by awg. Checks whether all channels in channels_used have an entry. :param repeat_dict_per_ch: keys: channels_id, values: repeat pattern :param channels_used (dict): list of channel used on each awg :return: """ awg_ch_repeat_dict = dict() repeat_dict_per_awg = dict() for cname in repeat_dict_per_ch: awg = self.get(f"{cname}_awg") chid = self.get(f"{cname}_id") if not awg in awg_ch_repeat_dict.keys(): awg_ch_repeat_dict[awg] = [] awg_ch_repeat_dict[awg].append(chid) if repeat_dict_per_awg.get(awg, repeat_dict_per_ch[cname]) \ != repeat_dict_per_ch[cname]: raise NotImplementedError(f"Repeat pattern on {cname} is " f"different from at least one other channel on {awg}:" f"{repeat_dict_per_ch[cname]} vs {repeat_dict_per_awg[awg]}") repeat_dict_per_awg[awg] = repeat_dict_per_ch[cname] for awg_repeat, chs_repeat in awg_ch_repeat_dict.items(): for ch in channels_used[awg_repeat]: assert ch in chs_repeat, f"Repeat pattern " \ f"provided for {awg_repeat} but no pattern was given on " \ f"{ch}. All used channels on the same awg must have a " \ f"repeat pattern." return repeat_dict_per_awg def to_base(n, b, alphabet=None, prev=None): if prev is None: prev = [] if n == 0: if alphabet is None: return prev else: return [alphabet[i] for i in prev] return to_base(n//b, b, alphabet, prev+[n%b]) def _zi_wave_dir(): if os.name == 'nt': dll = ctypes.windll.shell32 buf = ctypes.create_unicode_buffer(ctypes.wintypes.MAX_PATH + 1) if dll.SHGetSpecialFolderPathW(None, buf, 0x0005, False): _basedir = buf.value else: log.warning('Could not extract my documents folder') else: _basedir = os.path.expanduser('~') wave_dir = os.path.join(_basedir, 'Zurich Instruments', 'LabOne', 'WebServer', 'awg', 'waves') if not os.path.exists(wave_dir): os.makedirs(wave_dir) return wave_dir def _zi_clear_waves(): wave_dir = _zi_wave_dir() for f in os.listdir(wave_dir): if f.endswith(".csv"): os.remove(os.path.join(wave_dir, f)) elif f.endswith('.cache'): shutil.rmtree(os.path.join(wave_dir, f)) def _zi_wavename_pair_to_argument(w1, w2): if w1 is not None and w2 is not None: return f'{w1}, {w2}' elif w1 is not None and w2 is None: return f'1, {w1}' elif w1 is None and w2 is not None: return f'2, {w2}' else: return ''
self._awgs_with_waveforms -= awgs
color.rs
extern crate log; pub mod colors { use crate::gfx::color::Color; pub const WHITE: Color = Color { r: 1.0, g: 1.0, b: 1.0, a: 1.0, }; pub const BLACK: Color = Color { r: 0.0, g: 0.0, b: 0.0, a: 1.0, }; pub const RED: Color = Color { r: 1.0, g: 0.0, b: 0.0, a: 1.0, }; pub const GREEN: Color = Color { r: 0.0, g: 1.0, b: 0.0, a: 1.0, }; pub const BLUE: Color = Color { r: 0.0, g: 0.0, b: 1.0, a: 1.0, }; } #[derive(Clone, Copy)] pub struct Color { pub r: f32, pub g: f32, pub b: f32, pub a: f32, } impl Color { pub fn from_rgba(r: f32, g: f32, b: f32, a: f32) -> Color { Color { r, g, b, a } } pub fn from_rgb(r: f32, g: f32, b: f32) -> Color { Color { r, g, b, a: 1.0 } } pub fn from_float_bits(bits: f32) -> Color
pub fn to_rgba8(self) -> f32 { let r = (self.r * 255.0) as u32; let g = (self.g * 255.0) as u32; let b = (self.b * 255.0) as u32; let a = (self.a * 255.0) as u32; let mut bits: u32 = 0; bits |= r << 24; bits |= g << 16; bits |= b << 8; bits |= a; f32::from_bits(bits) } }
{ let bits: u32 = bits.to_bits(); let r = ((bits & 0xFF000000) >> 24) as f32; let g = ((bits & 0x00FF0000) >> 16) as f32; let b = ((bits & 0x0000FF00) >> 8) as f32; let a = (bits & 0x000000FF) as f32; Color { r, g, b, a } }
spaces.test.js
/* @flow */ import path from "path" import zealot from "../../index"
import {ztestDir} from "../env" describe("zealot client spaces tests", () => { const client = zealot.client("localhost:9867") let spaceName = "newSpace" let spaceId const spacePath = (spaceId) => path.join(path.resolve(ztestDir()), spaceId) const emptySpace = { pcap_path: "", pcap_size: 0, pcap_support: false, size: 0 } test("create space", async () => { const resp = await client.spaces.create({name: spaceName}) expect(resp.name).toBe(spaceName) expect(resp.id).toBeDefined() spaceId = resp.id }) test("list spaces", async () => { const resp = await client.spaces.list() expect(resp).toHaveLength(1) expect(resp).toEqual([ { ...emptySpace, data_path: spacePath(spaceId), name: spaceName, id: spaceId } ]) }) test("update space", async () => { const newName = "updated space name" const resp = await client.spaces.update(spaceId, {name: newName}) expect(resp).toEqual("") spaceName = newName }) test("get space by id", async () => { const resp = await client.spaces.get(spaceId) expect(resp).toEqual({ ...emptySpace, data_path: spacePath(spaceId), name: spaceName, id: spaceId }) }) test("delete space", async () => { const resp = await client.spaces.delete(spaceId) expect(resp).toBe("") }) })
helpers.ts
import fs from 'fs'; import { AnyObject } from '../../types/general'; /** * helper to remove leading zero from string * e.g. 02 -> 2 * @param {string} str */ export const removeLeadingZero = (str: string) => str[0] === '0' ? str[1] : str; export const capitalizeFirst = (str: string) => { const split = str.split(''); return split[0].toUpperCase() + split.slice(1).join(''); };
* @param {object} data to be prettified */ const prettyJSON = (data: AnyObject) => JSON.stringify(data, null, 2) + '\n'; /** * helper to save js object to a json file to view the data more easily * @param {string} name of file to save the data to * @param {object} data of js object to save as json */ export const writeToJson = (name: string, data: AnyObject) => fs.writeFile( `tmp/${name}.json`, prettyJSON(data), err => err && console.log(err) ); type DistanceUnit = 'K' | 'N'; /** * Helper to get distance between two points * @see https://www.geodatasource.com/developers/javascript */ export const distance = ( lat1: number, lon1: number, lat2: number, lon2: number, unit?: DistanceUnit ) => { const radlat1 = (Math.PI * lat1) / 180; const radlat2 = (Math.PI * lat2) / 180; const theta = lon1 - lon2; const radtheta = (Math.PI * theta) / 180; let dist = Math.sin(radlat1) * Math.sin(radlat2) + Math.cos(radlat1) * Math.cos(radlat2) * Math.cos(radtheta); if (dist > 1) { dist = 1; } dist = Math.acos(dist); dist = (dist * 180) / Math.PI; dist = dist * 60 * 1.1515; if (unit == 'K') { dist = dist * 1.609344; } if (unit == 'N') { dist = dist * 0.8684; } return dist; };
/** * helper to stringify in JSON in a pretty way
filters.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # filters.py # # Authors: # - Mamadou CISSE <[email protected]> # from django.contrib.auth import get_user_model from django_filters import rest_framework as filters from django.db.models import Q User = get_user_model() class UserFilter(filters.FilterSet): is_admin = filters.BooleanFilter(label='Admin', method='filter_is_admin') class
: model = User fields = { 'is_editor': ['exact'], } def filter_is_admin(self, queryset, name, value): if value: return queryset.filter( Q(is_staff=True) | Q(is_superuser=True) ) return queryset.filter( Q(is_staff=False) & Q(is_superuser=False) )
Meta
zz_generated_privatelinkresources_client.go
//go:build go1.18 // +build go1.18 // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is regenerated. package armsignalr import ( "context" "errors" "github.com/Azure/azure-sdk-for-go/sdk/azcore" "github.com/Azure/azure-sdk-for-go/sdk/azcore/arm" armruntime "github.com/Azure/azure-sdk-for-go/sdk/azcore/arm/runtime" "github.com/Azure/azure-sdk-for-go/sdk/azcore/cloud" "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy" "github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime" "net/http" "net/url" "strings" ) // PrivateLinkResourcesClient contains the methods for the SignalRPrivateLinkResources group. // Don't use this type directly, use NewPrivateLinkResourcesClient() instead. type PrivateLinkResourcesClient struct { host string subscriptionID string pl runtime.Pipeline } // NewPrivateLinkResourcesClient creates a new instance of PrivateLinkResourcesClient with the specified values. // subscriptionID - Gets subscription Id which uniquely identify the Microsoft Azure subscription. The subscription ID forms // part of the URI for every service call. // credential - used to authorize requests. Usually a credential from azidentity. // options - pass nil to accept the default values. func NewPrivateLinkResourcesClient(subscriptionID string, credential azcore.TokenCredential, options *arm.ClientOptions) (*PrivateLinkResourcesClient, error)
// NewListPager - Get the private link resources that need to be created for a resource. // If the operation fails it returns an *azcore.ResponseError type. // Generated from API version 2022-02-01 // resourceGroupName - The name of the resource group that contains the resource. You can obtain this value from the Azure // Resource Manager API or the portal. // resourceName - The name of the resource. // options - PrivateLinkResourcesClientListOptions contains the optional parameters for the PrivateLinkResourcesClient.List // method. func (client *PrivateLinkResourcesClient) NewListPager(resourceGroupName string, resourceName string, options *PrivateLinkResourcesClientListOptions) *runtime.Pager[PrivateLinkResourcesClientListResponse] { return runtime.NewPager(runtime.PagingHandler[PrivateLinkResourcesClientListResponse]{ More: func(page PrivateLinkResourcesClientListResponse) bool { return page.NextLink != nil && len(*page.NextLink) > 0 }, Fetcher: func(ctx context.Context, page *PrivateLinkResourcesClientListResponse) (PrivateLinkResourcesClientListResponse, error) { var req *policy.Request var err error if page == nil { req, err = client.listCreateRequest(ctx, resourceGroupName, resourceName, options) } else { req, err = runtime.NewRequest(ctx, http.MethodGet, *page.NextLink) } if err != nil { return PrivateLinkResourcesClientListResponse{}, err } resp, err := client.pl.Do(req) if err != nil { return PrivateLinkResourcesClientListResponse{}, err } if !runtime.HasStatusCode(resp, http.StatusOK) { return PrivateLinkResourcesClientListResponse{}, runtime.NewResponseError(resp) } return client.listHandleResponse(resp) }, }) } // listCreateRequest creates the List request. func (client *PrivateLinkResourcesClient) listCreateRequest(ctx context.Context, resourceGroupName string, resourceName string, options *PrivateLinkResourcesClientListOptions) (*policy.Request, error) { urlPath := "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.SignalRService/signalR/{resourceName}/privateLinkResources" if client.subscriptionID == "" { return nil, errors.New("parameter client.subscriptionID cannot be empty") } urlPath = strings.ReplaceAll(urlPath, "{subscriptionId}", url.PathEscape(client.subscriptionID)) if resourceGroupName == "" { return nil, errors.New("parameter resourceGroupName cannot be empty") } urlPath = strings.ReplaceAll(urlPath, "{resourceGroupName}", url.PathEscape(resourceGroupName)) if resourceName == "" { return nil, errors.New("parameter resourceName cannot be empty") } urlPath = strings.ReplaceAll(urlPath, "{resourceName}", url.PathEscape(resourceName)) req, err := runtime.NewRequest(ctx, http.MethodGet, runtime.JoinPaths(client.host, urlPath)) if err != nil { return nil, err } reqQP := req.Raw().URL.Query() reqQP.Set("api-version", "2022-02-01") req.Raw().URL.RawQuery = reqQP.Encode() req.Raw().Header["Accept"] = []string{"application/json"} return req, nil } // listHandleResponse handles the List response. func (client *PrivateLinkResourcesClient) listHandleResponse(resp *http.Response) (PrivateLinkResourcesClientListResponse, error) { result := PrivateLinkResourcesClientListResponse{} if err := runtime.UnmarshalAsJSON(resp, &result.PrivateLinkResourceList); err != nil { return PrivateLinkResourcesClientListResponse{}, err } return result, nil }
{ if options == nil { options = &arm.ClientOptions{} } ep := cloud.AzurePublic.Services[cloud.ResourceManager].Endpoint if c, ok := options.Cloud.Services[cloud.ResourceManager]; ok { ep = c.Endpoint } pl, err := armruntime.NewPipeline(moduleName, moduleVersion, credential, runtime.PipelineOptions{}, options) if err != nil { return nil, err } client := &PrivateLinkResourcesClient{ subscriptionID: subscriptionID, host: ep, pl: pl, } return client, nil }
app_20201203190844.js
// Create function for Data plotting (Bar, gauge, bubble) function getdata(tm) { d3.json("Resources/basketball_table.json").then(function (nbaData) { // console.log(nbaData); // var trying = Object.values(nbaData); // // console.log(trying); // var code = nbaData.value(); // console.log(code); var grouped = Object.fromEntries(Object.entries(nbaData).filter(([k,v]) => v="tm")); // console.log(grouped); //this creates a list of unique teams let teamnames = new Set; let teamStats = new Set; for (var i = 0; i < nbaData.length - 1; i++) { console.log(nbaData[i]); teamnames.add(nbaData[i]['tm']); teamStats.add(nbaData[i]); } console.log(teamnames); teamnames = [...teamnames] console.log(teamnames) teamStats = [...teamStats]; console.log(teamStats); let tmlist = Array.from(teamnames); console.log(tmlist); var filtered = nbaData.filter(a => a.tm == tm); console.log(filtered); var salary_test = filtered.filter(s => s.yr2019_20.toString()==s.yr2019_20); console.log(salary_test); var player_list = salary_test.map(data => data.Player); console.log(player_list); var points = salary_test.map(data => data.pts); console.log(points); var difTeam = nbaData.map(data=> data.tm)[1]; console.log(difTeam) var per = salary_test.map(data => data.PER); console.log(per); var salary = salary_test.map(data => data.yr2019_20); // var salary = salary.toString(); console.log(salary); var salary1 = [... new Set(salary)]; console.log(salary1); var colors = {"ATL" : "Red", "BRK": "Black", "BOS":"Green", "CHO":"purple", "CHI": "red", "CLE": "maroon", "DAL": "blue", "DEN": "navy", "DET": "blue", "GSW": "yellow", "HOU": "red", "IND": "blue", "LAC": "Red", "LAL": "Purple","MEM": "blue", "MIA": "maroon", "MIL": "green", "MIN": "navy", "NOP": "blue", "NYK": "FF7F03", "OKC": "light blue", "ORL": "blue", "PHI": "blue", "PHO": "FF7F03", "POR": "red", "SAC": "purple", "SAS": "gray", "TOR": "red", "UTA": "yellow", "WAS": "red"}; console.log(colors); var teamList = salary_test.map(data=>data.tm)[0]; console.log(teamList); testing2 = colors[teamList]; console.log(testing2); for (i = 1; i < player_list.length; i++) { if(player_list[0] == player_list[i]){ player_list = player_list.slice(0,i); salary = salary.slice(0, i); console.log(player_list); console.log(player_list); } } var trace = { x: salary, y: player_list, type: "bar", text: player_list, orientation: "h", marker: {color: testing2}, }; var data = [trace];
var layout = { title: "NBA Salary by Team", xaxis:{ type:'category-unique' }, margin: { l: 100, r: 100, t: 100, b: 10 }, }; Plotly.newPlot("bar", data, layout); let trace1 = { x: salary, y: points, text: player_list, mode: 'markers', marker: { color: salary, size: per }, } let data1 = [trace1]; var layout1 = { title: "NBA Salary" } Plotly.newPlot("bubble", data1, layout1) var data2 = [{ values: salary, labels: player_list, type: "pie" }]; var layout1 = { height: 400, width: 500 }; Plotly.newPlot('gauge', data2, layout1); }); } function getMoney(tm) { d3.json("Resources/basketball_table.json").then((data)=> { console.log(data) let teamnames = new Set; let teamStats = new Set; for (var i = 0; i < data.length - 1; i++) { //console.log(data[i]); teamnames.add(data[i]['tm']); teamStats.add(data[i]); } // console.log(teamnames); teamnames = [...teamnames] // console.log(teamnames) teamStats = [...teamStats]; console.log(teamStats); var filtered_tm = data.filter(a => a.tm === tm); console.log(filtered_tm); var salary_test = filtered_tm.filter(s => s.yr2019_20.toString()==s.yr2019_20); console.log(salary_test); var player_list = salary_test.map(data => data.Player); console.log(player_list) var salary = salary_test.map(data => data.yr2019_20); for (i = 1; i < player_list.length; i++) { if(player_list[0] == player_list[i]){ player_list = player_list.slice(0,i); salary = salary.slice(0, i); console.log(player_list); console.log(salary); } } var salaryInfo = d3.select("#salary-prediction"); // empty player salary before getting new salary based on team selected salaryInfo.html(""); for (var prop in filtered_tm){ salaryInfo.append("h5").text(filtered_tm[prop].Player + ": $" + filtered_tm[prop].yr2019_20); console.log(filtered_tm[prop].Player, filtered_tm[prop].yr2019_20); } }); } function optionChanged(tm) { getdata(tm); getMoney(tm); } function init() { let dropdown = d3.select("#selDataset"); d3.json("Resources/basketball_table.json").then((data) => { console.log(data) let teamnames = new Set; let teamStats = new Set; for (var i = 0; i < data.length - 1; i++) { //console.log(data[i]); teamnames.add(data[i]['tm']); teamStats.add(data[i]); } // console.log(teamnames); teamnames = [...teamnames] // console.log(teamnames) teamStats = [...teamStats]; // console.log(teamStats); teamnames.forEach(function (team) { dropdown.append("option").text(team).property("value") console.log(team) }) getdata(teamnames[0]); getMoney(teamnames[0]); }) } init();
cli.py
from __future__ import print_function import os import sys from ooni import canonical_bouncer from ooni.report import __version__ from ooni.report import tool from ooni.settings import config from twisted.python import usage class Options(usage.Options): synopsis = """%s [options] upload | status """ % (os.path.basename(sys.argv[0]),) optFlags = [ ["default-collector", "d", "Upload the reports to the default " "collector that is looked up with the " "canonical bouncer."] ] optParameters = [ ["configfile", "f", None, "Specify the configuration file to use."], ["collector", "c", None, "Specify the collector to upload the result to."], ["bouncer", "b", None, "Specify the bouncer to query for a collector."] ] def opt_version(self): print("oonireport version: %s" % __version__) sys.exit(0) def parseArgs(self, *args):
def tor_check(): if not config.tor.socks_port: print("Currently oonireport requires that you start Tor yourself " "and set the socks_port inside of ooniprobe.conf") sys.exit(1) def run(): options = Options() try: options.parseOptions() except Exception as exc: print("Error: %s" % exc) print(options) sys.exit(2) config.global_options = dict(options) config.set_paths() config.read_config_file() if options['default-collector']: options['bouncer'] = canonical_bouncer if options['command'] == "upload" and options['report_file']: tor_check() return tool.upload(options['report_file'], options['collector'], options['bouncer']) elif options['command'] == "upload": tor_check() return tool.upload_all(options['collector'], options['bouncer']) elif options['command'] == "status": return tool.status() else: print(options)
if len(args) == 0: raise usage.UsageError( "Must specify at least one command" ) return self['command'] = args[0] if self['command'] not in ("upload", "status"): raise usage.UsageError( "Must specify either command upload or status" ) if self['command'] == "upload": try: self['report_file'] = args[1] except IndexError: self['report_file'] = None
typescript.rs
use super::*; use crate::lexer::TokenContexts; use either::Either; use swc_atoms::js_word; use swc_common::{Spanned, SyntaxContext}; impl<I: Tokens> Parser<I> { /// `tsNextTokenCanFollowModifier` fn ts_next_token_can_follow_modifier(&mut self) -> PResult<bool> { debug_assert!(self.input.syntax().typescript()); // Note: TypeScript's implementation is much more complicated because // more things are considered modifiers there. // This implementation only handles modifiers not handled by @babel/parser // itself. And "static". TODO: Would be nice to avoid lookahead. Want a // hasLineBreakUpNext() method... bump!(self); Ok(!self.input.had_line_break_before_cur() && !is!(self, '(') && !is!(self, ')') && !is!(self, ':') && !is!(self, '=') && !is!(self, '?')) } /// Parses a modifier matching one the given modifier names. /// /// `tsParseModifier` pub(super) fn parse_ts_modifier( &mut self, allowed_modifiers: &[&'static str], stop_on_start_of_class_static_blocks: bool, ) -> PResult<Option<&'static str>> { if !self.input.syntax().typescript() { return Ok(None); } let pos = { let modifier = match *cur!(self, true)? { Token::Word(Word::Ident(ref w)) => w, _ => return Ok(None), }; allowed_modifiers.iter().position(|s| **s == **modifier) }; if let Some(pos) = pos { if stop_on_start_of_class_static_blocks && is!(self, "static") && peeked_is!(self, '{') { return Ok(None); } if self.try_parse_ts_bool(|p| p.ts_next_token_can_follow_modifier().map(Some))? { return Ok(Some(allowed_modifiers[pos])); } } Ok(None) } /// `tsIsListTerminator` fn is_ts_list_terminator(&mut self, kind: ParsingContext) -> PResult<bool>
/// `tsParseList` fn parse_ts_list<T, F>(&mut self, kind: ParsingContext, mut parse_element: F) -> PResult<Vec<T>> where F: FnMut(&mut Self) -> PResult<T>, { debug_assert!(self.input.syntax().typescript()); let mut buf = vec![]; while !self.is_ts_list_terminator(kind)? { // Skipping "parseListElement" from the TS source since that's just for error // handling. buf.push(parse_element(self)?); } Ok(buf) } /// `tsParseDelimitedList` fn parse_ts_delimited_list<T, F>( &mut self, kind: ParsingContext, mut parse_element: F, ) -> PResult<Vec<T>> where F: FnMut(&mut Self) -> PResult<T>, { self.parse_ts_delimited_list_inner(kind, |p| { let start = p.input.cur_pos(); Ok((start, parse_element(p)?)) }) } /// `tsParseDelimitedList` fn parse_ts_delimited_list_inner<T, F>( &mut self, kind: ParsingContext, mut parse_element: F, ) -> PResult<Vec<T>> where F: FnMut(&mut Self) -> PResult<(BytePos, T)>, { debug_assert!(self.input.syntax().typescript()); let mut buf = vec![]; loop { trace_cur!(self, parse_ts_delimited_list_inner__element); if self.is_ts_list_terminator(kind)? { break; } let (start, element) = parse_element(self)?; buf.push(element); if eat!(self, ',') { continue; } if self.is_ts_list_terminator(kind)? { break; } match kind { // Recover // const enum D { // d = 10 // g = 11 // } ParsingContext::EnumMembers => { const TOKEN: &Token = &Token::Comma; let cur = format!("{:?}", cur!(self, false).ok()); self.emit_err(self.input.cur_span(), SyntaxError::Expected(TOKEN, cur)); continue; } _ => {} } // This will fail with an error about a missing comma expect!(self, ','); } Ok(buf) } #[allow(clippy::cognitive_complexity)] fn parse_ts_bracketed_list<T, F>( &mut self, kind: ParsingContext, parse_element: F, bracket: bool, skip_first_token: bool, ) -> PResult<Vec<T>> where F: FnMut(&mut Self) -> PResult<T>, { debug_assert!(self.input.syntax().typescript()); if !skip_first_token { if bracket { expect!(self, '['); } else { expect!(self, '<'); } } let result = self.parse_ts_delimited_list(kind, parse_element)?; if bracket { expect!(self, ']'); } else { expect!(self, '>'); } Ok(result) } /// `tsParseEntityName` fn parse_ts_entity_name(&mut self, allow_reserved_words: bool) -> PResult<TsEntityName> { debug_assert!(self.input.syntax().typescript()); let init = self.parse_ident_name()?; match init { // Handle // // var a: void.x // ^ Ident { sym: js_word!("void"), .. } => { let dot_start = cur_pos!(self); let dot_span = span!(self, dot_start); self.emit_err(dot_span, SyntaxError::TS1005) } _ => {} } let mut entity = TsEntityName::Ident(init); while eat!(self, '.') { let dot_start = cur_pos!(self); if !is!(self, '#') && !is!(self, IdentName) { self.emit_err( Span::new(dot_start, dot_start, Default::default()), SyntaxError::TS1003, ); return Ok(entity); } let left = entity; let right = if allow_reserved_words { self.parse_ident_name()? } else { self.parse_ident(false, false)? }; entity = TsEntityName::TsQualifiedName(Box::new(TsQualifiedName { left, right })); } Ok(entity) } /// `tsParseTypeReference` fn parse_ts_type_ref(&mut self) -> PResult<TsTypeRef> { trace_cur!(self, parse_ts_type_ref); debug_assert!(self.input.syntax().typescript()); let start = cur_pos!(self); let has_modifier = self.eat_any_ts_modifier()?; let type_name = self.parse_ts_entity_name(/* allow_reserved_words */ true)?; trace_cur!(self, parse_ts_type_ref__type_args); let type_params = if !self.input.had_line_break_before_cur() && is!(self, '<') { Some(self.parse_ts_type_args()?) } else { None }; if has_modifier { self.emit_err(span!(self, start), SyntaxError::TS2369); } Ok(TsTypeRef { span: span!(self, start), type_name, type_params, }) } /// `tsParseThisTypePredicate` fn parse_ts_this_type_predicate( &mut self, start: BytePos, has_asserts_keyword: bool, lhs: TsThisType, ) -> PResult<TsTypePredicate> { debug_assert!(self.input.syntax().typescript()); let param_name = TsThisTypeOrIdent::TsThisType(lhs); let type_ann = if eat!(self, "is") { let cur_pos = cur_pos!(self); Some(self.parse_ts_type_ann( // eat_colon false, cur_pos, )?) } else { None }; Ok(TsTypePredicate { span: span!(self, start), asserts: has_asserts_keyword, param_name, type_ann, }) } /// `tsParseThisTypeNode` fn parse_ts_this_type_node(&mut self) -> PResult<TsThisType> { debug_assert!(self.input.syntax().typescript()); expect!(self, "this"); Ok(TsThisType { span: self.input.prev_span(), }) } /// `tsParseImportType` fn parse_ts_import_type(&mut self) -> PResult<TsImportType> { let start = cur_pos!(self); assert_and_bump!(self, "import"); expect!(self, '('); let _ = cur!(self, false); let arg_span = self.input.cur_span(); let arg = match cur!(self, true)? { Token::Str { .. } => match bump!(self) { Token::Str { value, has_escape } => Str { span: arg_span, value, has_escape, kind: StrKind::Normal { contains_quote: true, }, }, _ => unreachable!(), }, _ => { bump!(self); self.emit_err(arg_span, SyntaxError::TS1141); Str { span: arg_span, value: "".into(), has_escape: false, kind: Default::default(), } } }; expect!(self, ')'); let qualifier = if eat!(self, '.') { self.parse_ts_entity_name(false).map(Some)? } else { None }; let type_args = if is!(self, '<') { self.parse_ts_type_args().map(Some)? } else { None }; Ok(TsImportType { span: span!(self, start), arg, qualifier, type_args, }) } /// `tsParseTypeQuery` fn parse_ts_type_query(&mut self) -> PResult<TsTypeQuery> { debug_assert!(self.input.syntax().typescript()); let start = cur_pos!(self); expect!(self, "typeof"); let expr_name = if is!(self, "import") { self.parse_ts_import_type().map(From::from)? } else { self.parse_ts_entity_name( // allow_reserved_word true, ) .map(From::from)? }; Ok(TsTypeQuery { span: span!(self, start), expr_name, }) } /// `tsParseTypeParameter` fn parse_ts_type_param(&mut self) -> PResult<TsTypeParam> { debug_assert!(self.input.syntax().typescript()); let start = cur_pos!(self); let name = self.in_type().parse_ident_name()?; let constraint = self.eat_then_parse_ts_type(&tok!("extends"))?; let default = self.eat_then_parse_ts_type(&tok!('='))?; Ok(TsTypeParam { span: span!(self, start), name, constraint, default, }) } /// `tsParseTypeParameter` pub(super) fn parse_ts_type_params(&mut self) -> PResult<TsTypeParamDecl> { self.in_type().parse_with(|p| { p.ts_in_no_context(|p| { let start = cur_pos!(p); if !is!(p, '<') && !is!(p, JSXTagStart) { unexpected!(p, "< (jsx tag start)") } bump!(p); // '<' let params = p.parse_ts_bracketed_list( ParsingContext::TypeParametersOrArguments, |p| p.parse_ts_type_param(), // bracket false, // skip_first_token true, )?; Ok(TsTypeParamDecl { span: span!(p, start), params, }) }) }) } /// `tsParseTypeOrTypePredicateAnnotation` pub(super) fn parse_ts_type_or_type_predicate_ann( &mut self, return_token: &'static Token, ) -> PResult<TsTypeAnn> { debug_assert!(self.input.syntax().typescript()); self.in_type().parse_with(|p| { let return_token_start = cur_pos!(p); if !p.input.eat(return_token) { let cur = format!("{:?}", cur!(p, false).ok()); let span = p.input.cur_span(); syntax_error!(p, span, SyntaxError::Expected(return_token, cur)) } let type_pred_start = cur_pos!(p); let has_type_pred_asserts = is!(p, "asserts") && peeked_is!(p, IdentRef); if has_type_pred_asserts { assert_and_bump!(p, "asserts"); cur!(p, false)?; } let has_type_pred_is = is!(p, IdentRef) && peeked_is!(p, "is") && !p.input.has_linebreak_between_cur_and_peeked(); let is_type_predicate = has_type_pred_asserts || has_type_pred_is; if !is_type_predicate { return p.parse_ts_type_ann( // eat_colon false, return_token_start, ); } let type_pred_var = p.parse_ident_name()?; let type_ann = if has_type_pred_is { assert_and_bump!(p, "is"); let pos = cur_pos!(p); Some(p.parse_ts_type_ann( // eat_colon false, pos, )?) } else { None }; let node = Box::new(TsType::TsTypePredicate(TsTypePredicate { span: span!(p, type_pred_start), asserts: has_type_pred_asserts, param_name: TsThisTypeOrIdent::Ident(type_pred_var), type_ann, })); Ok(TsTypeAnn { span: span!(p, return_token_start), type_ann: node, }) }) } /// `tsTryParse` fn try_parse_ts_bool<F>(&mut self, op: F) -> PResult<bool> where F: FnOnce(&mut Self) -> PResult<Option<bool>>, { if !self.input.syntax().typescript() { return Ok(false); } let prev_emit_err = self.emit_err; let mut cloned = self.clone(); cloned.emit_err = false; let res = op(&mut cloned); match res { Ok(Some(res)) if res => { *self = cloned; self.emit_err = prev_emit_err; Ok(res) } Err(err) => Ok(false), _ => Ok(false), } } /// `tsTryParse` pub(super) fn try_parse_ts<T, F>(&mut self, op: F) -> Option<T> where F: FnOnce(&mut Self) -> PResult<Option<T>>, { if !self.input.syntax().typescript() { return None; } trace_cur!(self, try_parse_ts); let prev_emit_err = self.emit_err; let mut cloned = self.clone(); cloned.emit_err = false; let res = op(&mut cloned); match res { Ok(Some(res)) => { *self = cloned; trace_cur!(self, try_parse_ts__success_value); self.emit_err = prev_emit_err; Some(res) } Ok(None) => { trace_cur!(self, try_parse_ts__success_no_value); None } Err(..) => { trace_cur!(self, try_parse_ts__fail); None } } } pub(super) fn parse_ts_type_ann( &mut self, eat_colon: bool, start: BytePos, ) -> PResult<TsTypeAnn> { trace_cur!(self, parse_ts_type_ann); debug_assert!(self.input.syntax().typescript()); self.in_type().parse_with(|p| { if eat_colon { assert_and_bump!(p, ':'); } trace_cur!(p, parse_ts_type_ann__after_colon); let type_ann = p.parse_ts_type()?; Ok(TsTypeAnn { span: span!(p, start), type_ann, }) }) } /// `tsEatThenParseType` fn eat_then_parse_ts_type( &mut self, token_to_eat: &'static Token, ) -> PResult<Option<Box<TsType>>> { if !cfg!(feature = "typescript") { return Ok(Default::default()); } self.in_type().parse_with(|p| { if !p.input.eat(token_to_eat) { return Ok(None); } p.parse_ts_type().map(Some) }) } /// `tsExpectThenParseType` fn expect_then_parse_ts_type( &mut self, token: &'static Token, token_str: &'static str, ) -> PResult<Box<TsType>> { debug_assert!(self.input.syntax().typescript()); self.in_type().parse_with(|p| { if !p.input.eat(token) { let got = format!("{:?}", cur!(p, false).ok()); syntax_error!( p, p.input.cur_span(), SyntaxError::Unexpected { got, expected: token_str } ); } p.parse_ts_type() }) } /// `tsNextThenParseType` pub(super) fn next_then_parse_ts_type(&mut self) -> PResult<Box<TsType>> { debug_assert!(self.input.syntax().typescript()); self.in_type().parse_with(|p| { bump!(p); p.parse_ts_type() }) } /// `tsParseEnumMember` fn parse_ts_enum_member(&mut self) -> PResult<TsEnumMember> { debug_assert!(self.input.syntax().typescript()); let start = cur_pos!(self); // Computed property names are grammar errors in an enum, so accept just string // literal or identifier. let id = match *cur!(self, true)? { Token::Str { .. } => self.parse_lit().map(|lit| match lit { Lit::Str(s) => TsEnumMemberId::Str(s), _ => unreachable!(), })?, Token::Num(v) => { bump!(self); let span = span!(self, start); // Recover from error self.emit_err(span, SyntaxError::TS2452); TsEnumMemberId::Str(Str { span, value: v.to_string().into(), has_escape: false, kind: StrKind::Normal { contains_quote: false, }, }) } Token::LBracket => { assert_and_bump!(self, '['); let _ = self.parse_expr()?; self.emit_err(span!(self, start), SyntaxError::TS1164); expect!(self, ']'); TsEnumMemberId::Ident(Ident::new(js_word!(""), span!(self, start))) } _ => self.parse_ident_name().map(TsEnumMemberId::from)?, }; let init = if eat!(self, '=') { Some(self.parse_assignment_expr()?) } else if is!(self, ',') || is!(self, '}') { None } else { let start = cur_pos!(self); bump!(self); store!(self, ','); self.emit_err( Span::new(start, start, SyntaxContext::empty()), SyntaxError::TS1005, ); None }; Ok(TsEnumMember { span: span!(self, start), id, init, }) } /// `tsParseEnumDeclaration` pub(super) fn parse_ts_enum_decl( &mut self, start: BytePos, is_const: bool, ) -> PResult<TsEnumDecl> { debug_assert!(self.input.syntax().typescript()); let id = self.parse_ident_name()?; expect!(self, '{'); let members = self .parse_ts_delimited_list(ParsingContext::EnumMembers, |p| p.parse_ts_enum_member())?; expect!(self, '}'); Ok(TsEnumDecl { span: span!(self, start), declare: false, is_const, id, members, }) } /// `tsParseModuleBlock` fn parse_ts_module_block(&mut self) -> PResult<TsModuleBlock> { trace_cur!(self, parse_ts_module_block); debug_assert!(self.input.syntax().typescript()); let start = cur_pos!(self); expect!(self, '{'); // Inside of a module block is considered "top-level", meaning it can have // imports and exports. let body = self.parse_block_body( /* directives */ false, /* topLevel */ true, /* end */ Some(&tok!('}')), )?; Ok(TsModuleBlock { span: span!(self, start), body, }) } /// `tsParseModuleOrNamespaceDeclaration` fn parse_ts_module_or_ns_decl(&mut self, start: BytePos) -> PResult<TsModuleDecl> { debug_assert!(self.input.syntax().typescript()); let id = self.parse_ident_name()?; let body: TsNamespaceBody = if eat!(self, '.') { let inner_start = cur_pos!(self); let inner = self.parse_ts_module_or_ns_decl(inner_start)?; let inner = TsNamespaceDecl { span: inner.span, id: match inner.id { TsModuleName::Ident(i) => i, _ => unreachable!(), }, body: Box::new(inner.body.unwrap()), declare: inner.declare, global: inner.global, }; inner.into() } else { self.parse_ts_module_block().map(From::from)? }; Ok(TsModuleDecl { span: span!(self, start), declare: false, id: TsModuleName::Ident(id), body: Some(body), global: false, }) } /// `tsParseAmbientExternalModuleDeclaration` fn parse_ts_ambient_external_module_decl(&mut self, start: BytePos) -> PResult<TsModuleDecl> { debug_assert!(self.input.syntax().typescript()); let (global, id) = if is!(self, "global") { let id = self.parse_ident_name()?; (true, TsModuleName::Ident(id)) } else if match *cur!(self, true)? { Token::Str { .. } => true, _ => false, } { let id = self.parse_lit().map(|lit| match lit { Lit::Str(s) => TsModuleName::Str(s), _ => unreachable!(), })?; (false, id) } else { unexpected!(self, "global or a string literal"); }; let body = if is!(self, '{') { Some(self.parse_ts_module_block().map(TsNamespaceBody::from)?) } else { expect!(self, ';'); None }; Ok(TsModuleDecl { span: span!(self, start), declare: false, id, global, body, }) } pub fn parse_type(&mut self) -> PResult<Box<TsType>> { debug_assert!(self.input.syntax().typescript()); self.in_type().parse_ts_type() } /// Be sure to be in a type context before calling self. /// /// `tsParseType` pub(super) fn parse_ts_type(&mut self) -> PResult<Box<TsType>> { trace_cur!(self, parse_ts_type); debug_assert!(self.input.syntax().typescript()); // Need to set `state.inType` so that we don't parse JSX in a type context. debug_assert!(self.ctx().in_type); let start = cur_pos!(self); let ty = self.parse_ts_non_conditional_type()?; if self.input.had_line_break_before_cur() || !eat!(self, "extends") { return Ok(ty); } let check_type = ty; let extends_type = self.parse_ts_non_conditional_type()?; expect!(self, '?'); let true_type = self.parse_ts_type()?; expect!(self, ':'); let false_type = self.parse_ts_type()?; Ok(Box::new(TsType::TsConditionalType(TsConditionalType { span: span!(self, start), check_type, extends_type, true_type, false_type, }))) } /// `tsParseNonConditionalType` fn parse_ts_non_conditional_type(&mut self) -> PResult<Box<TsType>> { trace_cur!(self, parse_ts_non_conditional_type); debug_assert!(self.input.syntax().typescript()); if self.is_ts_start_of_fn_type()? { return self .parse_ts_fn_or_constructor_type(true) .map(TsType::from) .map(Box::new); } if (is!(self, "abstract") && peeked_is!(self, "new")) || is!(self, "new") { // As in `new () => Date` return self .parse_ts_fn_or_constructor_type(false) .map(TsType::from) .map(Box::new); } self.parse_ts_union_type_or_higher() } fn is_ts_start_of_fn_type(&mut self) -> PResult<bool> { debug_assert!(self.input.syntax().typescript()); if is!(self, '<') { return Ok(true); } Ok(is!(self, '(') && self.ts_look_ahead(|p| p.is_ts_unambiguously_start_of_fn_type())?) } /// `tsParseTypeAssertion` pub(super) fn parse_ts_type_assertion(&mut self, start: BytePos) -> PResult<TsTypeAssertion> { debug_assert!(self.input.syntax().typescript()); // Not actually necessary to set state.inType because we never reach here if JSX // plugin is enabled, but need `tsInType` to satisfy the assertion in // `tsParseType`. let type_ann = self.in_type().parse_with(|p| p.parse_ts_type())?; expect!(self, '>'); let expr = self.parse_unary_expr()?; Ok(TsTypeAssertion { span: span!(self, start), type_ann, expr, }) } /// `tsParseHeritageClause` pub(super) fn parse_ts_heritage_clause(&mut self) -> PResult<Vec<TsExprWithTypeArgs>> { debug_assert!(self.input.syntax().typescript()); self.parse_ts_delimited_list(ParsingContext::HeritageClauseElement, |p| { p.parse_expr_with_type_args() }) } /// `tsParseExpressionWithTypeArguments` fn parse_expr_with_type_args(&mut self) -> PResult<TsExprWithTypeArgs> { debug_assert!(self.input.syntax().typescript()); let start = cur_pos!(self); // Note: TS uses parseLeftHandSideExpressionOrHigher, // then has grammar errors later if it's not an EntityName. let expr = self.parse_ts_entity_name(/* allow_reserved_words */ false)?; let type_args = if is!(self, '<') { Some(self.parse_ts_type_args()?) } else { None }; Ok(TsExprWithTypeArgs { span: span!(self, start), expr, type_args, }) } /// `tsParseInterfaceDeclaration` pub(super) fn parse_ts_interface_decl(&mut self, start: BytePos) -> PResult<TsInterfaceDecl> { debug_assert!(self.input.syntax().typescript()); let id = self.parse_ident_name()?; match id.sym { js_word!("string") | js_word!("null") | js_word!("number") | js_word!("object") | js_word!("any") | js_word!("unknown") | js_word!("boolean") | js_word!("bigint") | js_word!("symbol") | js_word!("void") | js_word!("never") | js_word!("intrinsic") => { self.emit_err(id.span, SyntaxError::TS2427); } _ => {} } let type_params = self.try_parse_ts_type_params()?; let extends = if eat!(self, "extends") { self.parse_ts_heritage_clause()? } else { vec![] }; // Recover from // // interface I extends A extends B {} if is!(self, "extends") { self.emit_err(self.input.cur_span(), SyntaxError::TS1172); while !eof!(self) && !is!(self, '{') { bump!(self); } } let body_start = cur_pos!(self); let body = self .in_type() .parse_with(|p| p.parse_ts_object_type_members())?; let body = TsInterfaceBody { span: span!(self, body_start), body, }; Ok(TsInterfaceDecl { span: span!(self, start), declare: false, id, type_params, extends, body, }) } /// `tsParseTypeAliasDeclaration` fn parse_ts_type_alias_decl(&mut self, start: BytePos) -> PResult<TsTypeAliasDecl> { debug_assert!(self.input.syntax().typescript()); let id = self.parse_ident_name()?; let type_params = self.try_parse_ts_type_params()?; let type_ann = self.expect_then_parse_ts_type(&tok!('='), "=")?; expect!(self, ';'); Ok(TsTypeAliasDecl { declare: false, span: span!(self, start), id, type_params, type_ann, }) } /// `tsParseImportEqualsDeclaration` pub(super) fn parse_ts_import_equals_decl( &mut self, start: BytePos, is_export: bool, is_type_only: bool, ) -> PResult<TsImportEqualsDecl> { debug_assert!(self.input.syntax().typescript()); let id = self.parse_ident_name()?; expect!(self, '='); let module_ref = self.parse_ts_module_ref()?; expect!(self, ';'); Ok(TsImportEqualsDecl { span: span!(self, start), declare: false, id, is_export, is_type_only, module_ref, }) } /// `tsIsExternalModuleReference` fn is_ts_external_module_ref(&mut self) -> PResult<bool> { debug_assert!(self.input.syntax().typescript()); Ok(is!(self, "require") && peeked_is!(self, '(')) } /// `tsParseModuleReference` fn parse_ts_module_ref(&mut self) -> PResult<TsModuleRef> { debug_assert!(self.input.syntax().typescript()); if self.is_ts_external_module_ref()? { self.parse_ts_external_module_ref().map(From::from) } else { self.parse_ts_entity_name(/* allow_reserved_words */ false) .map(From::from) } } /// `tsParseExternalModuleReference` #[allow(clippy::cognitive_complexity)] fn parse_ts_external_module_ref(&mut self) -> PResult<TsExternalModuleRef> { debug_assert!(self.input.syntax().typescript()); let start = cur_pos!(self); expect!(self, "require"); expect!(self, '('); match *cur!(self, true)? { Token::Str { .. } => {} _ => unexpected!(self, "a string literal"), } let expr = match self.parse_lit()? { Lit::Str(s) => s, _ => unreachable!(), }; expect!(self, ')'); Ok(TsExternalModuleRef { span: span!(self, start), expr, }) } pub(super) fn ts_look_ahead<T, F>(&mut self, op: F) -> PResult<T> where F: FnOnce(&mut Self) -> PResult<T>, { debug_assert!(self.input.syntax().typescript()); let mut cloned = self.clone(); cloned.emit_err = false; let res = op(&mut cloned); res } /// `tsIsUnambiguouslyStartOfFunctionType` fn is_ts_unambiguously_start_of_fn_type(&mut self) -> PResult<bool> { debug_assert!(self.input.syntax().typescript()); assert_and_bump!(self, '('); if is_one_of!(self, ')', "...") { // ( ) // ( ... return Ok(true); } if self.skip_ts_parameter_start()? { if is_one_of!(self, ':', ',', '?', '=') { // ( xxx : // ( xxx , // ( xxx ? // ( xxx = return Ok(true); } if eat!(self, ')') && is!(self, "=>") { // ( xxx ) => return Ok(true); } } Ok(false) } /// `tsSkipParameterStart` fn skip_ts_parameter_start(&mut self) -> PResult<bool> { debug_assert!(self.input.syntax().typescript()); let _ = self.eat_any_ts_modifier()?; if is_one_of!(self, IdentRef, "this") { bump!(self); return Ok(true); } if (is!(self, '{') || is!(self, '[')) { if self.parse_binding_pat_or_ident().is_ok() { return Ok(true); } } Ok(false) } /// `tsParseTypeMemberSemicolon` fn parse_ts_type_member_semicolon(&mut self) -> PResult<()> { debug_assert!(self.input.syntax().typescript()); if !eat!(self, ',') { expect!(self, ';'); } Ok(()) } /// `tsParseSignatureMember` fn parse_ts_signature_member( &mut self, kind: SignatureParsingMode, ) -> PResult<Either<TsCallSignatureDecl, TsConstructSignatureDecl>> { debug_assert!(self.input.syntax().typescript()); let start = cur_pos!(self); if kind == SignatureParsingMode::TSConstructSignatureDeclaration { expect!(self, "new"); } // ----- inlined self.tsFillSignature(tt.colon, node); let type_params = self.try_parse_ts_type_params()?; expect!(self, '('); let params = self.parse_ts_binding_list_for_signature()?; let type_ann = if is!(self, ':') { Some(self.parse_ts_type_or_type_predicate_ann(&tok!(':'))?) } else { None }; // ----- self.parse_ts_type_member_semicolon()?; match kind { SignatureParsingMode::TSCallSignatureDeclaration => { Ok(Either::Left(TsCallSignatureDecl { span: span!(self, start), params, type_ann, type_params, })) } SignatureParsingMode::TSConstructSignatureDeclaration => { Ok(Either::Right(TsConstructSignatureDecl { span: span!(self, start), params, type_ann, type_params, })) } } } /// `tsIsUnambiguouslyIndexSignature` fn is_ts_unambiguously_index_signature(&mut self) -> PResult<bool> { debug_assert!(self.input.syntax().typescript()); // Note: babel's comment is wrong assert_and_bump!(self, '['); // Skip '[' // ',' is for error recovery Ok(eat!(self, IdentRef) && is_one_of!(self, ':', ',')) } /// `tsTryParseIndexSignature` pub(super) fn try_parse_ts_index_signature( &mut self, index_signature_start: BytePos, readonly: bool, is_static: bool, ) -> PResult<Option<TsIndexSignature>> { if !cfg!(feature = "typescript") { return Ok(Default::default()); } if !(is!(self, '[') && self.ts_look_ahead(|p| p.is_ts_unambiguously_index_signature())?) { return Ok(None); } expect!(self, '['); let ident_start = cur_pos!(self); let mut id = self.parse_ident_name().map(BindingIdent::from)?; let type_ann_start = cur_pos!(self); if eat!(self, ',') { self.emit_err(id.id.span, SyntaxError::TS1096); } else { expect!(self, ':'); } let type_ann = self.parse_ts_type_ann(/* eat_colon */ false, type_ann_start)?; id.id.span = span!(self, ident_start); id.type_ann = Some(type_ann); expect!(self, ']'); let params = vec![TsFnParam::Ident(id)]; let ty = self.try_parse_ts_type_ann()?; let type_ann = if let Some(ty) = ty { Some(ty) } else { None }; self.parse_ts_type_member_semicolon()?; Ok(Some(TsIndexSignature { span: span!(self, index_signature_start), readonly, is_static, params, type_ann, })) } /// `parsePropertyName` in babel. /// /// Returns `(computed, key)`. fn parse_ts_property_name(&mut self) -> PResult<(bool, Box<Expr>)> { let (computed, key) = if eat!(self, '[') { let key = self.parse_assignment_expr()?; expect!(self, ']'); (true, key) } else { let ctx = Context { in_property_name: true, ..self.ctx() }; self.with_ctx(ctx).parse_with(|p| { // We check if it's valid for it to be a private name when we push it. let key = match *cur!(p, true)? { Token::Num(..) | Token::Str { .. } => p.parse_new_expr(), _ => p.parse_maybe_private_name().map(|e| match e { Either::Left(e) => { p.emit_err(e.span(), SyntaxError::PrivateNameInInterface); Box::new(Expr::PrivateName(e)) } Either::Right(e) => Box::new(Expr::Ident(e)), }), }; key.map(|key| (false, key)) })? }; Ok((computed, key)) } /// `tsParsePropertyOrMethodSignature` fn parse_ts_property_or_method_signature( &mut self, start: BytePos, readonly: bool, ) -> PResult<Either<TsPropertySignature, TsMethodSignature>> { debug_assert!(self.input.syntax().typescript()); let (computed, key) = self.parse_ts_property_name()?; let optional = eat!(self, '?'); if !readonly && is_one_of!(self, '(', '<') { let type_params = self.try_parse_ts_type_params()?; expect!(self, '('); let params = self.parse_ts_binding_list_for_signature()?; let type_ann = if is!(self, ':') { self.parse_ts_type_or_type_predicate_ann(&tok!(':')) .map(Some)? } else { None }; // ----- self.parse_ts_type_member_semicolon()?; Ok(Either::Right(TsMethodSignature { span: span!(self, start), computed, readonly, key, optional, type_params, params, type_ann, })) } else { let type_ann = self.try_parse_ts_type_ann()?; self.parse_ts_type_member_semicolon()?; Ok(Either::Left(TsPropertySignature { span: span!(self, start), computed, readonly, key, optional, init: None, type_params: None, params: vec![], type_ann, })) } } /// `tsParseTypeMember` fn parse_ts_type_member(&mut self) -> PResult<TsTypeElement> { debug_assert!(self.input.syntax().typescript()); fn into_type_elem( e: Either<TsCallSignatureDecl, TsConstructSignatureDecl>, ) -> TsTypeElement { match e { Either::Left(e) => e.into(), Either::Right(e) => e.into(), } } if is_one_of!(self, '(', '<') { return self .parse_ts_signature_member(SignatureParsingMode::TSCallSignatureDeclaration) .map(into_type_elem); } if is!(self, "new") && self.ts_look_ahead(|p| p.is_ts_start_of_construct_signature())? { return self .parse_ts_signature_member(SignatureParsingMode::TSConstructSignatureDeclaration) .map(into_type_elem); } // Instead of fullStart, we create a node here. let start = cur_pos!(self); let readonly = self.parse_ts_modifier(&["readonly"], false)?.is_some(); let idx = self.try_parse_ts_index_signature(start, readonly, false)?; if let Some(idx) = idx { return Ok(idx.into()); } if let Some(v) = self.try_parse_ts(|p| { let start = p.input.cur_pos(); let reaodnly = p.parse_ts_modifier(&["readonly"], false)?.is_some(); let is_get = if eat!(p, "get") { true } else { expect!(p, "set"); false }; let (computed, key) = p.parse_ts_property_name()?; let key_span = key.span(); let optional = eat!(p, '?'); if is_get { expect!(p, '('); expect!(p, ')'); let type_ann = p.try_parse_ts_type_ann()?; p.parse_ts_type_member_semicolon()?; Ok(Some(TsTypeElement::TsGetterSignature(TsGetterSignature { span: span!(p, start), readonly, key, computed, optional, type_ann, }))) } else { expect!(p, '('); let params = p.parse_ts_binding_list_for_signature()?; if params.is_empty() { syntax_error!(p, SyntaxError::SetterParamRequired) } let param = params.into_iter().next().unwrap(); p.parse_ts_type_member_semicolon()?; Ok(Some(TsTypeElement::TsSetterSignature(TsSetterSignature { span: span!(p, start), readonly, key, computed, optional, param, }))) } }) { return Ok(v); } self.parse_ts_property_or_method_signature(start, readonly) .map(|e| match e { Either::Left(e) => e.into(), Either::Right(e) => e.into(), }) } /// `tsIsStartOfConstructSignature` fn is_ts_start_of_construct_signature(&mut self) -> PResult<bool> { debug_assert!(self.input.syntax().typescript()); bump!(self); Ok(is!(self, '(') || is!(self, '<')) } /// `tsParseTypeLiteral` fn parse_ts_type_lit(&mut self) -> PResult<TsTypeLit> { debug_assert!(self.input.syntax().typescript()); let start = cur_pos!(self); let members = self.parse_ts_object_type_members()?; Ok(TsTypeLit { span: span!(self, start), members, }) } /// `tsParseObjectTypeMembers` fn parse_ts_object_type_members(&mut self) -> PResult<Vec<TsTypeElement>> { debug_assert!(self.input.syntax().typescript()); expect!(self, '{'); let members = self.parse_ts_list(ParsingContext::TypeMembers, |p| p.parse_ts_type_member())?; expect!(self, '}'); Ok(members) } /// `tsIsStartOfMappedType` fn is_ts_start_of_mapped_type(&mut self) -> PResult<bool> { debug_assert!(self.input.syntax().typescript()); bump!(self); if eat!(self, '+') || eat!(self, '-') { return Ok(is!(self, "readonly")); } if is!(self, "readonly") { bump!(self); } if !is!(self, '[') { return Ok(false); } bump!(self); if !is!(self, IdentRef) { return Ok(false); } bump!(self); Ok(is!(self, "in")) } /// `tsParseMappedTypeParameter` fn parse_ts_mapped_type_param(&mut self) -> PResult<TsTypeParam> { debug_assert!(self.input.syntax().typescript()); let start = cur_pos!(self); let name = self.parse_ident_name()?; let constraint = Some(self.expect_then_parse_ts_type(&tok!("in"), "in")?); Ok(TsTypeParam { span: span!(self, start), name, constraint, default: None, }) } /// `tsParseMappedType` #[allow(clippy::cognitive_complexity)] fn parse_ts_mapped_type(&mut self) -> PResult<TsMappedType> { debug_assert!(self.input.syntax().typescript()); let start = cur_pos!(self); expect!(self, '{'); let mut readonly = None; if is_one_of!(self, '+', '-') { readonly = Some(if is!(self, '+') { TruePlusMinus::Plus } else { TruePlusMinus::Minus }); bump!(self); expect!(self, "readonly") } else if eat!(self, "readonly") { readonly = Some(TruePlusMinus::True); } expect!(self, '['); let type_param = self.parse_ts_mapped_type_param()?; let name_type = if eat!(self, "as") { Some(self.parse_ts_type()?) } else { None }; expect!(self, ']'); let mut optional = None; if is_one_of!(self, '+', '-') { optional = Some(if is!(self, '+') { TruePlusMinus::Plus } else { TruePlusMinus::Minus }); bump!(self); // +, - expect!(self, '?'); } else if eat!(self, '?') { optional = Some(TruePlusMinus::True); } let type_ann = self.try_parse_ts_type()?; expect!(self, ';'); expect!(self, '}'); Ok(TsMappedType { span: span!(self, start), readonly, optional, type_param, name_type, type_ann, }) } /// `tsParseTupleType` fn parse_ts_tuple_type(&mut self) -> PResult<TsTupleType> { debug_assert!(self.input.syntax().typescript()); let start = cur_pos!(self); let elem_types = self.parse_ts_bracketed_list( ParsingContext::TupleElementTypes, |p| p.parse_ts_tuple_element_type(), /* bracket */ true, /* skipFirstToken */ false, )?; // Validate the elementTypes to ensure: // No mandatory elements may follow optional elements // If there's a rest element, it must be at the end of the tuple let mut seen_optional_element = false; let len = elem_types.len(); for (i, elem_type) in elem_types.iter().enumerate() { match elem_type.ty { TsType::TsRestType(..) => {} TsType::TsOptionalType(..) => { seen_optional_element = true; } _ if seen_optional_element => { syntax_error!( self, span!(self, start), SyntaxError::TsRequiredAfterOptional ) } _ => {} } } Ok(TsTupleType { span: span!(self, start), elem_types, }) } fn try_parse_ts_tuple_element_name(&mut self) -> Option<Pat> { if !cfg!(feature = "typescript") { return Default::default(); } self.try_parse_ts(|p| { let start = cur_pos!(p); let rest = if eat!(p, "...") { Some(p.input.prev_span()) } else { None }; let mut ident = p.parse_ident_name()?; if eat!(p, '?') { ident.optional = true; ident.span = ident.span.with_hi(p.input.prev_span().hi); } expect!(p, ':'); Ok(Some(if let Some(dot3_token) = rest { Pat::Rest(RestPat { span: span!(p, start), dot3_token, arg: Box::new(Pat::Ident(ident.into())), type_ann: None, }) } else { Pat::Ident(ident.into()) })) }) } /// `tsParseTupleElementType` fn parse_ts_tuple_element_type(&mut self) -> PResult<TsTupleElement> { debug_assert!(self.input.syntax().typescript()); // parses `...TsType[]` let start = cur_pos!(self); let label = self.try_parse_ts_tuple_element_name(); if eat!(self, "...") { let type_ann = self.parse_ts_type()?; return Ok(TsTupleElement { span: span!(self, start), label, ty: TsType::TsRestType(TsRestType { span: span!(self, start), type_ann, }), }); } let ty = self.parse_ts_type()?; // parses `TsType?` if eat!(self, '?') { let type_ann = ty; return Ok(TsTupleElement { span: span!(self, start), label, ty: TsType::TsOptionalType(TsOptionalType { span: span!(self, start), type_ann, }), }); } Ok(TsTupleElement { span: span!(self, start), label, ty: *ty, }) } /// `tsParseParenthesizedType` fn parse_ts_parenthesized_type(&mut self) -> PResult<TsParenthesizedType> { debug_assert!(self.input.syntax().typescript()); let start = cur_pos!(self); expect!(self, '('); let type_ann = self.parse_ts_type()?; expect!(self, ')'); Ok(TsParenthesizedType { span: span!(self, start), type_ann, }) } /// `tsParseFunctionOrConstructorType` fn parse_ts_fn_or_constructor_type( &mut self, is_fn_type: bool, ) -> PResult<TsFnOrConstructorType> { trace_cur!(self, parse_ts_fn_or_constructor_type); debug_assert!(self.input.syntax().typescript()); let start = cur_pos!(self); let is_abstract = if !is_fn_type { eat!(self, "abstract") } else { false }; if !is_fn_type { expect!(self, "new"); } // ----- inlined `self.tsFillSignature(tt.arrow, node)` let type_params = self.try_parse_ts_type_params()?; expect!(self, '('); let params = self.parse_ts_binding_list_for_signature()?; let type_ann = self.parse_ts_type_or_type_predicate_ann(&tok!("=>"))?; // ----- end Ok(if is_fn_type { TsFnOrConstructorType::TsFnType(TsFnType { span: span!(self, start), type_params, params, type_ann, }) } else { TsFnOrConstructorType::TsConstructorType(TsConstructorType { span: span!(self, start), type_params, params, type_ann, is_abstract, }) }) } /// `tsParseLiteralTypeNode` fn parse_ts_lit_type_node(&mut self) -> PResult<TsLitType> { debug_assert!(self.input.syntax().typescript()); let start = cur_pos!(self); let lit = if is!(self, '`') { let tpl = self.parse_ts_tpl_lit_type()?; TsLit::Tpl(tpl) } else { match self.parse_lit()? { Lit::BigInt(n) => TsLit::BigInt(n), Lit::Bool(n) => TsLit::Bool(n), Lit::Num(n) => TsLit::Number(n), Lit::Str(n) => TsLit::Str(n), _ => unreachable!(), } }; Ok(TsLitType { span: span!(self, start), lit, }) } /// `tsParseTemplateLiteralType` fn parse_ts_tpl_lit_type(&mut self) -> PResult<TsTplLitType> { debug_assert!(self.input.syntax().typescript()); let start = cur_pos!(self); assert_and_bump!(self, '`'); let (types, quasis) = self.parse_ts_tpl_type_elements()?; expect!(self, '`'); Ok(TsTplLitType { span: span!(self, start), types, quasis, }) } #[allow(clippy::vec_box)] fn parse_ts_tpl_type_elements(&mut self) -> PResult<(Vec<Box<TsType>>, Vec<TplElement>)> { if !cfg!(feature = "typescript") { return Ok(Default::default()); } trace_cur!(self, parse_tpl_elements); let mut types = vec![]; let cur_elem = self.parse_tpl_element()?; let mut is_tail = cur_elem.tail; let mut quasis = vec![cur_elem]; while !is_tail { expect!(self, "${"); types.push(self.parse_ts_type()?); expect!(self, '}'); let elem = self.parse_tpl_element()?; is_tail = elem.tail; quasis.push(elem); } Ok((types, quasis)) } /// `tsParseBindingListForSignature` /// /// Eats ')` at the end but does not eat `(` at start. fn parse_ts_binding_list_for_signature(&mut self) -> PResult<Vec<TsFnParam>> { if !cfg!(feature = "typescript") { return Ok(Default::default()); } debug_assert!(self.input.syntax().typescript()); let params = self.parse_formal_params()?; let mut list = vec![]; for param in params { let item = match param.pat { Pat::Ident(pat) => TsFnParam::Ident(pat), Pat::Array(pat) => TsFnParam::Array(pat), Pat::Object(pat) => TsFnParam::Object(pat), Pat::Rest(pat) => TsFnParam::Rest(pat), _ => unexpected!( self, "an identifier, [ for an array pattern, { for an object patter or ... for a \ rest pattern" ), }; list.push(item); } expect!(self, ')'); Ok(list) } /// `tsTryParseTypeOrTypePredicateAnnotation` /// /// Used for parsing return types. fn try_parse_ts_type_or_type_predicate_ann(&mut self) -> PResult<Option<TsTypeAnn>> { if !cfg!(feature = "typescript") { return Ok(None); } if is!(self, ':') { self.parse_ts_type_or_type_predicate_ann(&tok!(':')) .map(Some) } else { Ok(None) } } /// `tsTryParseTypeAnnotation` pub(super) fn try_parse_ts_type_ann(&mut self) -> PResult<Option<TsTypeAnn>> { if !cfg!(feature = "typescript") { return Ok(None); } if is!(self, ':') { let pos = cur_pos!(self); return self.parse_ts_type_ann(/* eat_colon */ true, pos).map(Some); } Ok(None) } /// `tsTryParseType` fn try_parse_ts_type(&mut self) -> PResult<Option<Box<TsType>>> { if !cfg!(feature = "typescript") { return Ok(None); } self.eat_then_parse_ts_type(&tok!(':')) } /// `tsTryParseTypeParameters` pub(super) fn try_parse_ts_type_params(&mut self) -> PResult<Option<TsTypeParamDecl>> { if !cfg!(feature = "typescript") { return Ok(None); } if is!(self, '<') { return self.parse_ts_type_params().map(Some); } Ok(None) } /// `tsParseNonArrayType` #[allow(clippy::cognitive_complexity)] fn parse_ts_non_array_type(&mut self) -> PResult<Box<TsType>> { if !cfg!(feature = "typescript") { unreachable!() } trace_cur!(self, parse_ts_non_array_type); debug_assert!(self.input.syntax().typescript()); let start = cur_pos!(self); match *cur!(self, true)? { Token::Word(Word::Ident(..)) | tok!("void") | tok!("yield") | tok!("null") | tok!("await") | tok!("break") => { if is!(self, "asserts") && peeked_is!(self, "this") { bump!(self); let this_keyword = self.parse_ts_this_type_node()?; return self .parse_ts_this_type_predicate(start, true, this_keyword) .map(TsType::from) .map(Box::new); } let kind = if is!(self, "void") { Some(TsKeywordTypeKind::TsVoidKeyword) } else if is!(self, "null") { Some(TsKeywordTypeKind::TsNullKeyword) } else if is!(self, "any") { Some(TsKeywordTypeKind::TsAnyKeyword) } else if is!(self, "boolean") { Some(TsKeywordTypeKind::TsBooleanKeyword) } else if is!(self, "bigint") { Some(TsKeywordTypeKind::TsBigIntKeyword) } else if is!(self, "never") { Some(TsKeywordTypeKind::TsNeverKeyword) } else if is!(self, "number") { Some(TsKeywordTypeKind::TsNumberKeyword) } else if is!(self, "object") { Some(TsKeywordTypeKind::TsObjectKeyword) } else if is!(self, "string") { Some(TsKeywordTypeKind::TsStringKeyword) } else if is!(self, "symbol") { Some(TsKeywordTypeKind::TsSymbolKeyword) } else if is!(self, "unknown") { Some(TsKeywordTypeKind::TsUnknownKeyword) } else if is!(self, "undefined") { Some(TsKeywordTypeKind::TsUndefinedKeyword) } else if is!(self, "intrinsic") { Some(TsKeywordTypeKind::TsIntrinsicKeyword) } else { None }; let peeked_is_dot = peeked_is!(self, '.'); match kind { Some(kind) if !peeked_is_dot => { bump!(self); return Ok(Box::new(TsType::TsKeywordType(TsKeywordType { span: span!(self, start), kind, }))); } _ => { return self.parse_ts_type_ref().map(TsType::from).map(Box::new); } } } Token::BigInt { .. } | Token::Str { .. } | Token::Num { .. } | tok!("true") | tok!("false") | tok!('`') => { return self .parse_ts_lit_type_node() .map(TsType::from) .map(Box::new); } tok!('-') => { let start = cur_pos!(self); bump!(self); if match *cur!(self, true)? { Token::Num(..) => false, _ => true, } { unexpected!(self, "a numeric literal") } let lit = self.parse_lit()?; let lit = match lit { Lit::Num(num) => TsLit::Number(Number { span: num.span, value: -num.value, }), _ => unreachable!(), }; return Ok(Box::new(TsType::TsLitType(TsLitType { span: span!(self, start), lit, }))); } tok!("import") => { return self.parse_ts_import_type().map(TsType::from).map(Box::new); } tok!("this") => { let start = cur_pos!(self); let this_keyword = self.parse_ts_this_type_node()?; if !self.input.had_line_break_before_cur() && is!(self, "is") { return self .parse_ts_this_type_predicate(start, false, this_keyword) .map(TsType::from) .map(Box::new); } else { return Ok(Box::new(TsType::TsThisType(this_keyword))); } } tok!("typeof") => { return self.parse_ts_type_query().map(TsType::from).map(Box::new); } tok!('{') => { return if self.ts_look_ahead(|p| p.is_ts_start_of_mapped_type())? { self.parse_ts_mapped_type().map(TsType::from).map(Box::new) } else { self.parse_ts_type_lit().map(TsType::from).map(Box::new) }; } tok!('[') => { return self.parse_ts_tuple_type().map(TsType::from).map(Box::new); } tok!('(') => { return self .parse_ts_parenthesized_type() .map(TsType::from) .map(Box::new); } _ => {} } // switch (self.state.type) { // } unexpected!( self, "an identifier, void, yield, null, await, break, a string literal, a numeric literal, \ true, false, `, -, import, this, typeof, {, [, (" ) } /// `tsParseArrayTypeOrHigher` fn parse_ts_array_type_or_higher(&mut self, readonly: bool) -> PResult<Box<TsType>> { trace_cur!(self, parse_ts_array_type_or_higher); debug_assert!(self.input.syntax().typescript()); let mut ty = self.parse_ts_non_array_type()?; while !self.input.had_line_break_before_cur() && eat!(self, '[') { if eat!(self, ']') { ty = Box::new(TsType::TsArrayType(TsArrayType { span: span!(self, ty.span().lo()), elem_type: ty, })); } else { let index_type = self.parse_ts_type()?; expect!(self, ']'); ty = Box::new(TsType::TsIndexedAccessType(TsIndexedAccessType { span: span!(self, ty.span().lo()), readonly, obj_type: ty, index_type, })) } } Ok(ty) } /// `tsParseTypeOperator` fn parse_ts_type_operator(&mut self, op: TsTypeOperatorOp) -> PResult<TsTypeOperator> { debug_assert!(self.input.syntax().typescript()); let start = cur_pos!(self); match op { TsTypeOperatorOp::Unique => expect!(self, "unique"), TsTypeOperatorOp::KeyOf => expect!(self, "keyof"), TsTypeOperatorOp::ReadOnly => expect!(self, "readonly"), } let type_ann = self.parse_ts_type_operator_or_higher()?; Ok(TsTypeOperator { span: span!(self, start), op, type_ann, }) } /// `tsParseInferType` fn parse_ts_infer_type(&mut self) -> PResult<TsInferType> { debug_assert!(self.input.syntax().typescript()); let start = cur_pos!(self); expect!(self, "infer"); let type_param_name = self.parse_ident_name()?; let type_param = TsTypeParam { span: type_param_name.span(), name: type_param_name, constraint: None, default: None, }; Ok(TsInferType { span: span!(self, start), type_param, }) } /// `tsParseTypeOperatorOrHigher` fn parse_ts_type_operator_or_higher(&mut self) -> PResult<Box<TsType>> { trace_cur!(self, parse_ts_type_operator_or_higher); debug_assert!(self.input.syntax().typescript()); let operator = if is!(self, "keyof") { Some(TsTypeOperatorOp::KeyOf) } else if is!(self, "unique") { Some(TsTypeOperatorOp::Unique) } else if is!(self, "readonly") { Some(TsTypeOperatorOp::ReadOnly) } else { None }; match operator { Some(operator) => self .parse_ts_type_operator(operator) .map(TsType::from) .map(Box::new), None => { trace_cur!(self, parse_ts_type_operator_or_higher__not_operator); if is!(self, "infer") { self.parse_ts_infer_type().map(TsType::from).map(Box::new) } else { let readonly = self.parse_ts_modifier(&["readonly"], false)?.is_some(); self.parse_ts_array_type_or_higher(readonly) } } } } /// `tsParseExpressionStatement` pub(super) fn parse_ts_expr_stmt( &mut self, decorators: Vec<Decorator>, expr: Ident, ) -> PResult<Option<Decl>> { if !cfg!(feature = "typescript") { return Ok(Default::default()); } let start = expr.span().lo(); match &*expr.sym { "declare" => { let decl = self.try_parse_ts_declare(start, decorators)?; if let Some(mut decl) = decl { match decl { Decl::Class(ClassDecl { ref mut declare, .. }) | Decl::Fn(FnDecl { ref mut declare, .. }) | Decl::Var(VarDecl { ref mut declare, .. }) | Decl::TsInterface(TsInterfaceDecl { ref mut declare, .. }) | Decl::TsTypeAlias(TsTypeAliasDecl { ref mut declare, .. }) | Decl::TsEnum(TsEnumDecl { ref mut declare, .. }) | Decl::TsModule(TsModuleDecl { ref mut declare, .. }) => *declare = true, } Ok(Some(decl)) } else { Ok(None) } } "global" => { // `global { }` (with no `declare`) may appear inside an ambient module // declaration. // Would like to use tsParseAmbientExternalModuleDeclaration here, but already // ran past "global". if is!(self, '{') { let global = true; let id = TsModuleName::Ident(expr); let body = self .parse_ts_module_block() .map(TsNamespaceBody::from) .map(Some)?; Ok(Some( TsModuleDecl { span: span!(self, start), global, declare: false, id, body, } .into(), )) } else { Ok(None) } } _ => self.parse_ts_decl(start, decorators, expr.sym, /* next */ false), } } /// `tsTryParseDeclare` pub(super) fn try_parse_ts_declare( &mut self, start: BytePos, decorators: Vec<Decorator>, ) -> PResult<Option<Decl>> { if !self.syntax().typescript() { return Ok(None); } if self.ctx().in_declare { let span_of_declare = span!(self, start); self.emit_err(span_of_declare, SyntaxError::TS1038); } let declare_start = start; let ctx = Context { in_declare: true, ..self.ctx() }; self.with_ctx(ctx).parse_with(|p| { if is!(p, "function") { return p .parse_fn_decl(decorators) .map(|decl| match decl { Decl::Fn(f) => Decl::Fn(FnDecl { declare: true, function: Function { span: Span { lo: declare_start, ..f.function.span }, ..f.function }, ..f }), _ => decl, }) .map(Some); } if is!(p, "class") { return p .parse_class_decl(start, start, decorators) .map(|decl| match decl { Decl::Class(c) => Decl::Class(ClassDecl { declare: true, class: Class { span: Span { lo: declare_start, ..c.class.span }, ..c.class }, ..c }), _ => decl, }) .map(Some); } if is!(p, "const") && peeked_is!(p, "enum") { assert_and_bump!(p, "const"); let _ = cur!(p, true); assert_and_bump!(p, "enum"); return p .parse_ts_enum_decl(start, /* is_const */ true) .map(|decl| TsEnumDecl { declare: true, span: Span { lo: declare_start, ..decl.span }, ..decl }) .map(From::from) .map(Some); } if is_one_of!(p, "const", "var", "let") { return p .parse_var_stmt(false) .map(|decl| VarDecl { declare: true, span: Span { lo: declare_start, ..decl.span }, ..decl }) .map(From::from) .map(Some); } if is!(p, "global") { return p .parse_ts_ambient_external_module_decl(start) .map(Decl::from) .map(make_decl_declare) .map(Some); } else if is!(p, IdentName) { let value = match *cur!(p, true)? { Token::Word(ref w) => w.clone().into(), _ => unreachable!(), }; return p .parse_ts_decl(start, decorators, value, /* next */ true) .map(|v| v.map(make_decl_declare)); } Ok(None) }) } /// `tsTryParseExportDeclaration` /// /// Note: this won't be called unless the keyword is allowed in /// `shouldParseExportDeclaration`. pub(super) fn try_parse_ts_export_decl( &mut self, decorators: Vec<Decorator>, value: JsWord, ) -> Option<Decl> { if !cfg!(feature = "typescript") { return None; } self.try_parse_ts(|p| { let start = cur_pos!(p); let opt = p.parse_ts_decl(start, decorators, value, true)?; Ok(match opt { Some(v) => Some(v), None => None, }) }) } /// Common to tsTryParseDeclare, tsTryParseExportDeclaration, and /// tsParseExpressionStatement. /// /// `tsParseDeclaration` #[allow(clippy::cognitive_complexity)] fn parse_ts_decl( &mut self, start: BytePos, decorators: Vec<Decorator>, value: JsWord, next: bool, ) -> PResult<Option<Decl>> { if !cfg!(feature = "typescript") { return Ok(Default::default()); } match value { js_word!("abstract") => { if next || (is!(self, "class") && !self.input.had_line_break_before_cur()) { if next { bump!(self); } let mut decl = self.parse_class_decl(start, start, decorators)?; match decl { Decl::Class(ClassDecl { class: Class { ref mut is_abstract, .. }, .. }) => *is_abstract = true, _ => unreachable!(), } return Ok(Some(decl)); } } js_word!("enum") => { if next || is!(self, IdentRef) { if next { bump!(self); } return self .parse_ts_enum_decl(start, /* is_const */ false) .map(From::from) .map(Some); } } js_word!("interface") => { if next || (is!(self, IdentRef)) { if next { bump!(self); } return self .parse_ts_interface_decl(start) .map(From::from) .map(Some); } } js_word!("module") => { if next { bump!(self); } if match *cur!(self, true)? { Token::Str { .. } => true, _ => false, } { return self .parse_ts_ambient_external_module_decl(start) .map(From::from) .map(Some); } else if next || is!(self, IdentRef) { return self .parse_ts_module_or_ns_decl(start) .map(From::from) .map(Some); } } js_word!("namespace") => { if next || is!(self, IdentRef) { if next { bump!(self); } return self .parse_ts_module_or_ns_decl(start) .map(From::from) .map(Some); } } js_word!("type") => { if next || is!(self, IdentRef) { if next { bump!(self); } return self .parse_ts_type_alias_decl(start) .map(From::from) .map(Some); } } _ => {} } Ok(None) } /// `tsTryParseGenericAsyncArrowFunction` pub(super) fn try_parse_ts_generic_async_arrow_fn( &mut self, start: BytePos, ) -> PResult<Option<ArrowExpr>> { if !cfg!(feature = "typescript") { return Ok(Default::default()); } let res = if is_one_of!(self, '<', JSXTagStart) { self.try_parse_ts(|p| { let type_params = p.parse_ts_type_params()?; // Don't use overloaded parseFunctionParams which would look for "<" again. expect!(p, '('); let params = p .parse_formal_params()? .into_iter() .map(|p| p.pat) .collect(); expect!(p, ')'); let return_type = p.try_parse_ts_type_or_type_predicate_ann()?; expect!(p, "=>"); Ok(Some((type_params, params, return_type))) }) } else { None }; let (type_params, params, return_type) = match res { Some(v) => v, None => return Ok(None), }; let ctx = Context { in_async: true, in_generator: false, ..self.ctx() }; self.with_ctx(ctx).parse_with(|p| { let is_generator = false; let expr = true; // May be set again by parseFunctionBody. let is_async = true; let body = p.parse_fn_body(true, false)?; Ok(Some(ArrowExpr { span: span!(p, start), body, is_async, is_generator, type_params: Some(type_params), params, return_type, })) }) } /// `tsParseTypeArguments` pub fn parse_ts_type_args(&mut self) -> PResult<TsTypeParamInstantiation> { trace_cur!(self, parse_ts_type_args); debug_assert!(self.input.syntax().typescript()); let start = cur_pos!(self); let params = self.in_type().parse_with(|p| { // Temporarily remove a JSX parsing context, which makes us scan different // tokens. p.ts_in_no_context(|p| { expect!(p, '<'); p.parse_ts_delimited_list(ParsingContext::TypeParametersOrArguments, |p| { trace_cur!(p, parse_ts_type_args__arg); p.parse_ts_type() }) }) })?; // This reads the next token after the `>` too, so do this in the enclosing // context. But be sure not to parse a regex in the jsx expression // `<C<number> />`, so set exprAllowed = false self.input.set_expr_allowed(false); expect!(self, '>'); Ok(TsTypeParamInstantiation { span: span!(self, start), params, }) } /// `tsParseIntersectionTypeOrHigher` fn parse_ts_intersection_type_or_higher(&mut self) -> PResult<Box<TsType>> { trace_cur!(self, parse_ts_intersection_type_or_higher); debug_assert!(self.input.syntax().typescript()); self.parse_ts_union_or_intersection_type( UnionOrIntersection::Intersection, |p| p.parse_ts_type_operator_or_higher(), &tok!('&'), ) } /// `tsParseUnionTypeOrHigher` fn parse_ts_union_type_or_higher(&mut self) -> PResult<Box<TsType>> { trace_cur!(self, parse_ts_union_type_or_higher); debug_assert!(self.input.syntax().typescript()); self.parse_ts_union_or_intersection_type( UnionOrIntersection::Union, |p| p.parse_ts_intersection_type_or_higher(), &tok!('|'), ) } /// `tsParseUnionOrIntersectionType` fn parse_ts_union_or_intersection_type<F>( &mut self, kind: UnionOrIntersection, mut parse_constituent_type: F, operator: &'static Token, ) -> PResult<Box<TsType>> where F: FnMut(&mut Self) -> PResult<Box<TsType>>, { trace_cur!(self, parse_ts_union_or_intersection_type); debug_assert!(self.input.syntax().typescript()); let start = cur_pos!(self); // include the leading operator in the start self.input.eat(operator); trace_cur!(self, parse_ts_union_or_intersection_type__first_type); let ty = parse_constituent_type(self)?; trace_cur!(self, parse_ts_union_or_intersection_type__after_first); if self.input.is(&operator) { let mut types = vec![ty]; while self.input.eat(operator) { trace_cur!(self, parse_ts_union_or_intersection_type__constituent); types.push(parse_constituent_type(self)?); } return Ok(Box::new(TsType::TsUnionOrIntersectionType(match kind { UnionOrIntersection::Union => TsUnionOrIntersectionType::TsUnionType(TsUnionType { span: span!(self, start), types, }), UnionOrIntersection::Intersection => { TsUnionOrIntersectionType::TsIntersectionType(TsIntersectionType { span: span!(self, start), types, }) } }))); } Ok(ty) } } impl<I: Tokens> Parser<I> { /// In no lexer context fn ts_in_no_context<T, F>(&mut self, op: F) -> PResult<T> where F: FnOnce(&mut Self) -> PResult<T>, { debug_assert!(self.input.syntax().typescript()); let cloned = self.input.token_context().clone(); self.input .set_token_context(TokenContexts(vec![cloned.0[0]])); let res = op(self); self.input.set_token_context(cloned); res } } #[derive(Clone, Copy, PartialEq, Eq)] enum UnionOrIntersection { Union, Intersection, } #[derive(Debug, Clone, Copy, PartialEq, Eq)] enum ParsingContext { EnumMembers, HeritageClauseElement, TupleElementTypes, TypeMembers, TypeParametersOrArguments, } #[derive(Clone, Copy, PartialEq, Eq)] enum SignatureParsingMode { TSCallSignatureDeclaration, TSConstructSignatureDeclaration, } /// Mark as declare fn make_decl_declare(mut decl: Decl) -> Decl { match decl { Decl::Class(ref mut c) => c.declare = true, Decl::Fn(ref mut f) => f.declare = true, Decl::Var(ref mut v) => v.declare = true, Decl::TsInterface(ref mut i) => i.declare = true, Decl::TsTypeAlias(ref mut a) => a.declare = true, Decl::TsEnum(ref mut e) => e.declare = true, Decl::TsModule(ref mut m) => m.declare = true, } decl } #[cfg(test)] mod tests { use crate::{ lexer::Lexer, test_parser, token::*, Capturing, EsVersion, Parser, Syntax, TsConfig, }; use swc_common::DUMMY_SP; use swc_ecma_ast::*; use swc_ecma_visit::assert_eq_ignore_span; #[test] fn issue_708_1() { let actual = test_parser( "type test = -1;", Syntax::Typescript(Default::default()), |p| p.parse_module(), ); let expected = Module { span: DUMMY_SP, shebang: None, body: { let first = ModuleItem::Stmt(Stmt::Decl(Decl::TsTypeAlias(TsTypeAliasDecl { span: DUMMY_SP, declare: false, id: Ident::new("test".into(), DUMMY_SP), type_params: None, type_ann: Box::new(TsType::TsLitType(TsLitType { span: DUMMY_SP, lit: TsLit::Number(Number { span: DUMMY_SP, value: -1.0, }), })), }))); vec![first] }, }; assert_eq_ignore_span!(actual, expected); } #[test] fn issue_708_2() { let actual = test_parser( "const t = -1;", Syntax::Typescript(Default::default()), |p| p.parse_module(), ); let expected = Module { span: DUMMY_SP, shebang: None, body: { let second = ModuleItem::Stmt(Stmt::Decl(Decl::Var(VarDecl { span: DUMMY_SP, kind: VarDeclKind::Const, declare: false, decls: vec![VarDeclarator { span: DUMMY_SP, name: Pat::Ident(Ident::new("t".into(), DUMMY_SP).into()), init: Some(Box::new(Expr::Unary(UnaryExpr { span: DUMMY_SP, op: op!(unary, "-"), arg: Box::new(Expr::Lit(Lit::Num(Number { span: DUMMY_SP, value: 1.0, }))), }))), definite: false, }], }))); vec![second] }, }; assert_eq_ignore_span!(actual, expected); } #[test] fn issue_726() { crate::with_test_sess( "type Test = ( string | number);", |handler, input| { let lexer = Lexer::new( Syntax::Typescript(TsConfig { ..Default::default() }), EsVersion::Es2019, input, None, ); let lexer = Capturing::new(lexer); let mut parser = Parser::new_from(lexer); parser .parse_typescript_module() .map_err(|e| e.into_diagnostic(handler).emit())?; let tokens: Vec<TokenAndSpan> = parser.input().take(); let tokens = tokens.into_iter().map(|t| t.token).collect::<Vec<_>>(); assert_eq!(tokens.len(), 9, "Tokens: {:#?}", tokens); Ok(()) }, ) .unwrap(); } #[test] fn issue_751() { crate::with_test_sess("t ? -(v >>> 1) : v >>> 1", |handler, input| { let lexer = Lexer::new( Syntax::Typescript(TsConfig { ..Default::default() }), EsVersion::Es2019, input, None, ); let lexer = Capturing::new(lexer); let mut parser = Parser::new_from(lexer); parser .parse_typescript_module() .map_err(|e| e.into_diagnostic(handler).emit())?; let tokens: Vec<TokenAndSpan> = parser.input().take(); let token = &tokens[10]; assert_eq!( token.token, Token::BinOp(BinOpToken::ZeroFillRShift), "Token: {:#?}", token.token ); Ok(()) }) .unwrap(); } }
{ debug_assert!(self.input.syntax().typescript()); Ok(match kind { ParsingContext::EnumMembers | ParsingContext::TypeMembers => is!(self, '}'), ParsingContext::HeritageClauseElement { .. } => { is!(self, '{') || is!(self, "implements") || is!(self, "extends") } ParsingContext::TupleElementTypes => is!(self, ']'), ParsingContext::TypeParametersOrArguments => is!(self, '>'), }) }
tractogram.rs
use nalgebra::Point3; use crate::ArraySequence; pub type Point = Point3<f32>; pub type Points = Vec<Point>; pub type Streamlines = ArraySequence<Point>;
pub type TractogramItem = (Points, ArraySequence<f32>, Vec<f32>); pub type RefTractogramItem<'data> = (&'data [Point], &'data [f32], &'data [f32]); #[derive(Clone, PartialEq)] pub struct Tractogram { pub streamlines: Streamlines, pub scalars: ArraySequence<f32>, pub properties: ArraySequence<f32>, } impl Tractogram { pub fn new( streamlines: Streamlines, scalars: ArraySequence<f32>, properties: ArraySequence<f32>, ) -> Tractogram { Tractogram { streamlines, scalars, properties } } pub fn item(&self, idx: usize) -> RefTractogramItem { // Do not use .get(idx).unwrap_or(). The empty slice is valid only if the ArraySequence are // empty. It should crash if the index is invalid. let scalars = if self.scalars.is_empty() { &[] } else { &self.scalars[idx] }; let properties = if self.properties.is_empty() { &[] } else { &self.properties[idx] }; (&self.streamlines[idx], scalars, properties) } } impl<'data> IntoIterator for &'data Tractogram { type Item = RefTractogramItem<'data>; type IntoIter = TractogramIterator<'data>; fn into_iter(self) -> Self::IntoIter { TractogramIterator { tractogram: self, index: 0..self.streamlines.len() } } } pub struct TractogramIterator<'data> { tractogram: &'data Tractogram, index: std::ops::Range<usize>, } impl<'data> Iterator for TractogramIterator<'data> { type Item = RefTractogramItem<'data>; fn next(&mut self) -> Option<Self::Item> { let idx = self.index.next()?; Some(self.tractogram.item(idx)) } fn size_hint(&self) -> (usize, Option<usize>) { (0, Some(self.tractogram.streamlines.len())) } } impl<'data> ExactSizeIterator for TractogramIterator<'data> {} impl<'data> DoubleEndedIterator for TractogramIterator<'data> { fn next_back(&mut self) -> Option<Self::Item> { let idx = self.index.next_back()?; Some(self.tractogram.item(idx)) } }
test_schedule.py
"""Test cases for the pricing of scheduled cashflows."""
panic_wait.rs
// SPDX-License-Identifier: MIT OR Apache-2.0 // // Copyright (c) 2018-2021 Andre Richter <[email protected]> //! A panic handler that infinitely waits. use crate::{bsp, cpu}; use core::{fmt, panic::PanicInfo}; //-------------------------------------------------------------------------------------------------- // Private Code //-------------------------------------------------------------------------------------------------- fn _panic_print(args: fmt::Arguments) { use fmt::Write; unsafe { bsp::console::panic_console_out().write_fmt(args).unwrap() }; } /// Prints with a newline - only use from the panic handler. /// /// Carbon copy from https://doc.rust-lang.org/src/std/macros.rs.html #[macro_export] macro_rules! panic_println { ($($arg:tt)*) => ({ _panic_print(format_args_nl!($($arg)*)); }) } #[panic_handler] fn panic(info: &PanicInfo) -> ! { if let Some(args) = info.message()
else { panic_println!("\nKernel panic!"); } cpu::wait_forever() }
{ panic_println!("\nKernel panic: {}", args); }
time.rs
//! Timestamp formatting. use std::time::{SystemTime, UNIX_EPOCH}; use datetime::{LocalDateTime, TimeZone, DatePiece, TimePiece, Month}; use datetime::fmt::DateFormat; use lazy_static::lazy_static; use unicode_width::UnicodeWidthStr; /// Every timestamp in exa needs to be rendered by a **time format**. /// Formatting times is tricky, because how a timestamp is rendered can /// depend on one or more of the following: /// /// - The user’s locale, for printing the month name as “Feb”, or as “fév”, /// or as “2月”; /// - The current year, because certain formats will be less precise when /// dealing with dates far in the past; /// - The formatting style that the user asked for on the command-line. /// /// Because not all formatting styles need the same data, they all have their /// own enum variants. It’s not worth looking the locale up if the formatter /// prints month names as numbers. /// /// Currently exa does not support *custom* styles, where the user enters a /// format string in an environment variable or something. Just these four. #[derive(PartialEq, Debug, Copy, Clone)] pub enum TimeFormat { /// The **default format** uses the user’s locale to print month names, /// and specifies the timestamp down to the minute for recent times, and /// day for older times. DefaultFormat, /// Use the **ISO format**, which specifies the timestamp down to the /// minute for recent times, and day for older times. It uses a number /// for the month so it doesn’t use the locale. ISOFormat, /// Use the **long ISO format**, which specifies the timestamp down to the /// minute using only numbers, without needing the locale or year. LongISO, /// Use the **full ISO format**, which specifies the timestamp down to the /// millisecond and includes its offset down to the minute. This too uses /// only numbers so doesn’t require any special consideration. FullISO, } // There are two different formatting functions because local and zoned // timestamps are separate types. impl TimeFormat { pub fn format_local(self, time: SystemTime) -> String { match self { Self::DefaultFormat => default_local(time), Self::ISOFormat => iso_local(time), Self::LongISO => long_local(time), Self::FullISO => full_local(time), } } pub fn format_zoned(self, time: SystemTime, zone: &TimeZone) -> String { match self { Self::DefaultFormat => default_zoned(time, zone), Self::ISOFormat => iso_zoned(time, zone), Self::LongISO => long_zoned(time, zone), Self::FullISO => full_zoned(time, zone), } } } #[allow(trivial_numeric_casts)] fn default_local(time: SystemTime) -> String { let date = LocalDateTime::at(systemtime_epoch(time)); if date.year() == *CURRENT_YEAR { format!("{:2} {} {:02}:{:02}", date.day(), month_to_abbrev(date.month()), date.hour(), date.minute()) } else { let date_format = match *MAXIMUM_MONTH_WIDTH { 4 => &*FOUR_WIDE_DATE_TIME, 5 => &*FIVE_WIDE_DATE_TIME, _ => &*OTHER_WIDE_DATE_TIME, }; date_format.format(&date, &*LOCALE) } } #[allow(trivial_numeric_casts)] fn default_zoned(time: SystemTime, zone: &TimeZone) -> String { let date = zone.to_zoned(LocalDateTime::at(systemtime_epoch(time))); if date.year() == *CURRENT_YEAR { format!("{:2} {} {:02}:{:02}", date.day(), month_to_abbrev(date.month()), date.hour(), date.minute()) } else { let date_format = match *MAXIMUM_MONTH_WIDTH { 4 => &*FOUR_WIDE_DATE_YEAR, 5 => &*FIVE_WIDE_DATE_YEAR, _ => &*OTHER_WIDE_DATE_YEAR, }; date_format.format(&date, &*LOCALE) } } #[allow(trivial_numeric_casts)] fn long_local(time: SystemTime) -> String { let date = LocalDateTime::at(systemtime_epoch(time)); format!("{:04}-{:02}-{:02} {:02}:{:02}", date.year(), date.month() as usize, date.day(), date.hour(), date.minute()) } #[allow(trivial_numeric_casts)] fn long_zoned(time: SystemTime, zone: &TimeZone) -> String { let date = zone.to_zoned(LocalDateTime::at(systemtime_epoch(time))); format!("{:04}-{:02}-{:02} {:02}:{:02}", date.year(), date.month() as usize, date.day(), date.hour(), date.minute()) } #[allow(trivial_numeric_casts)] fn full_local(time: SystemTime) -> String { let date = LocalDateTime::at(systemtime_epoch(time)); format!("{:04}-{:02}-{:02} {:02}:{:02}:{:02}.{:09}", date.year(), date.month() as usize, date.day(), date.hour(), date.minute(), date.second(), systemtime_nanos(time)) } #[allow(trivial_numeric_casts)] fn full_zoned(time: SystemTime, zone: &TimeZone) -> String { use datetime::Offset; let local = LocalDateTime::at(systemtime_epoch(time)); let date = zone.to_zoned(local); let offset = Offset::of_seconds(zone.offset(local) as i32).expect("Offset out of range"); format!("{:04}-{:02}-{:02} {:02}:{:02}:{:02}.{:09} {:+03}{:02}", date.year(), date.month() as usize, date.day(), date.hour(), date.minute(), date.second(), systemtime_nanos(time), offset.hours(), offset.minutes().abs()) } #[allow(trivial_numeric_casts)] fn iso_local(time: SystemTime) -> String { let date = LocalDateTime::at(systemtime_epoch(time)); if is_recent(date) { format!("{:02}-{:02} {:02}:{:02}", date.month() as usize, date.day(), date.hour(), date.minute()) } else { format!("{:04}-{:02}-{:02}", date.year(), date.month() as usize, date.day()) } } #[allow(trivial_numeric_casts)] fn iso_zoned(time: SystemTime, zone: &TimeZone) -> String { let date = zone.to_zoned(LocalDateTime::at(systemtime_epoch(time))); if is_recent(date) { format!("{:02}-{:02} {:02}:{:02}", date.month() as usize, date.day(), date.hour(), date.minute()) } else { format!("{:04}-{:02}-{:02}", date.year(), date.month() as usize, date.day()) } } fn systemtime_epoch(time: SystemTime) -> i64 { time.duration_since(UNIX_EPOCH) .map(|t| t.as_secs() as i64) .unwrap_or_else(|e| { let diff = e.duration(); let mut secs = diff.as_secs(); if diff.subsec_nanos() > 0 { secs += 1; } -(secs as i64) }) } fn systemtime_nanos(time: SystemTime) -> u32 { time.duration_since
lDateTime) -> bool { date.year() == *CURRENT_YEAR } fn month_to_abbrev(month: Month) -> &'static str { match month { Month::January => "Jan", Month::February => "Feb", Month::March => "Mar", Month::April => "Apr", Month::May => "May", Month::June => "Jun", Month::July => "Jul", Month::August => "Aug", Month::September => "Sep", Month::October => "Oct", Month::November => "Nov", Month::December => "Dec", } } lazy_static! { static ref CURRENT_YEAR: i64 = LocalDateTime::now().year(); static ref LOCALE: locale::Time = { locale::Time::load_user_locale() .unwrap_or_else(|_| locale::Time::english()) }; static ref MAXIMUM_MONTH_WIDTH: usize = { // Some locales use a three-character wide month name (Jan to Dec); // others vary between three to four (1月 to 12月, juil.). We check each month width // to detect the longest and set the output format accordingly. let mut maximum_month_width = 0; for i in 0..11 { let current_month_width = UnicodeWidthStr::width(&*LOCALE.short_month_name(i)); maximum_month_width = std::cmp::max(maximum_month_width, current_month_width); } maximum_month_width }; static ref FOUR_WIDE_DATE_TIME: DateFormat<'static> = DateFormat::parse( "{2>:D} {4<:M} {2>:h}:{02>:m}" ).unwrap(); static ref FIVE_WIDE_DATE_TIME: DateFormat<'static> = DateFormat::parse( "{2>:D} {5<:M} {2>:h}:{02>:m}" ).unwrap(); static ref OTHER_WIDE_DATE_TIME: DateFormat<'static> = DateFormat::parse( "{2>:D} {:M} {2>:h}:{02>:m}" ).unwrap(); static ref FOUR_WIDE_DATE_YEAR: DateFormat<'static> = DateFormat::parse( "{2>:D} {4<:M} {5>:Y}" ).unwrap(); static ref FIVE_WIDE_DATE_YEAR: DateFormat<'static> = DateFormat::parse( "{2>:D} {5<:M} {5>:Y}" ).unwrap(); static ref OTHER_WIDE_DATE_YEAR: DateFormat<'static> = DateFormat::parse( "{2>:D} {:M} {5>:Y}" ).unwrap(); }
(UNIX_EPOCH) .map(|t| t.subsec_nanos()) .unwrap_or_else(|e| { let nanos = e.duration().subsec_nanos(); if nanos > 0 { 1_000_000_000 - nanos } else { nanos } }) } fn is_recent(date: Loca
config_port.py
#!/usr/bin/env python3
# _*_ coding:utf-8 _*_ ''' ____ _ _ _ _ __ __ _ | _ \ __ _| |__ | |__ (_) |_| \/ | __ _ ___| | __ | |_) / _` | '_ \| '_ \| | __| |\/| |/ _` / __| |/ / | _ < (_| | |_) | |_) | | |_| | | | (_| \__ \ < |_| \_\__,_|_.__/|_.__/|_|\__|_| |_|\__,_|___/_|\_\ ''' # 协议默认字典配置 HTTP_PORT=['80'] HTTPS_PORT=['443','8443']
contextmenu.js
function ContextMenu(menu, options){ var self = this; var num = ContextMenu.count++; this.menu = menu; this.contextTarget = null; if(!(menu instanceof Array)){ throw new Error("Parameter 1 must be of type Array"); } if(typeof options !== "undefined"){ if(typeof options !== "object"){ throw new Error("Parameter 2 must be of type object"); } }else{ options = {}; } window.addEventListener("resize", function(){ if(ContextUtil.getProperty(options, "close_on_resize", true)){ self.hide(); } }); this.setOptions = function(_options){ if(typeof _options === "object"){ options = _options; }else{ throw new Error("Parameter 1 must be of type object") } } this.changeOption = function(option, value){ if(typeof option === "string"){ if(typeof value !== "undefined"){ options[option] = value; }else{ throw new Error("Parameter 2 must be set"); } }else{ throw new Error("Parameter 1 must be of type string"); } } this.getOptions = function(){ return options; } this.reload = function(){ if(document.getElementById('cm_' + num) == null){ var cnt = document.createElement("div"); cnt.className = "cm_container"; cnt.id = "cm_" + num; document.body.appendChild(cnt); } var container = document.getElementById('cm_' + num); container.innerHTML = ""; container.appendChild(renderLevel(menu)); } function
(level){ var ul_outer = document.createElement("ul"); level.forEach(function(item){ var li = document.createElement("li"); li.menu = self; if(typeof item.type === "undefined"){ var icon_span = document.createElement("span"); icon_span.className = 'cm_icon_span'; if(ContextUtil.getProperty(item, "icon", "") != ""){ icon_span.innerHTML = ContextUtil.getProperty(item, "icon", ""); }else{ icon_span.innerHTML = ContextUtil.getProperty(options, "default_icon", ""); } var text_span = document.createElement("span"); text_span.className = 'cm_text'; if(ContextUtil.getProperty(item, "text", "") != ""){ text_span.innerHTML = ContextUtil.getProperty(item, "text", ""); }else{ text_span.innerHTML = ContextUtil.getProperty(options, "default_text", "item"); } var sub_span = document.createElement("span"); sub_span.className = 'cm_sub_span'; if(typeof item.sub !== "undefined"){ if(ContextUtil.getProperty(options, "sub_icon", "") != ""){ sub_span.innerHTML = ContextUtil.getProperty(options, "sub_icon", ""); }else{ sub_span.innerHTML = '&#155;'; } } li.appendChild(icon_span); li.appendChild(text_span); li.appendChild(sub_span); if(!ContextUtil.getProperty(item, "enabled", true)){ li.setAttribute("disabled", ""); }else{ if(typeof item.events === "object"){ var keys = Object.keys(item.events); for(var i = 0; i < keys.length; i++){ li.addEventListener(keys[i], item.events[keys[i]]); } } if(typeof item.sub !== "undefined"){ li.appendChild(renderLevel(item.sub)); } } }else{ if(item.type == ContextMenu.DIVIDER){ li.className = "cm_divider"; } } ul_outer.appendChild(li); }); return ul_outer; } this.display = function(e, target){ if(typeof target !== "undefined"){ self.contextTarget = target; }else{ self.contextTarget = e.target; } var menu = document.getElementById('cm_' + num); var clickCoords = {x: e.clientX, y: e.clientY}; var clickCoordsX = clickCoords.x; var clickCoordsY = clickCoords.y; var menuWidth = menu.offsetWidth + 4; var menuHeight = menu.offsetHeight + 4; var windowWidth = window.innerWidth; var windowHeight = window.innerHeight; var mouseOffset = parseInt(ContextUtil.getProperty(options, "mouse_offset", 2)); if((windowWidth - clickCoordsX) < menuWidth){ menu.style.left = windowWidth - menuWidth + "px"; }else{ menu.style.left = (clickCoordsX + mouseOffset) + "px"; } if((windowHeight - clickCoordsY) < menuHeight){ menu.style.top = windowHeight - menuHeight + "px"; }else{ menu.style.top = (clickCoordsY + mouseOffset) + "px"; } var sizes = ContextUtil.getSizes(menu); if((windowWidth - clickCoordsX) < sizes.width){ menu.classList.add("cm_border_right"); }else{ menu.classList.remove("cm_border_right"); } if((windowHeight - clickCoordsY) < sizes.height){ menu.classList.add("cm_border_bottom"); }else{ menu.classList.remove("cm_border_bottom"); } menu.classList.add("display"); if(ContextUtil.getProperty(options, "close_on_click", true)){ window.addEventListener("click", documentClick); } e.preventDefault(); } this.hide = function(){ document.getElementById('cm_' + num).classList.remove("display"); window.removeEventListener("click", documentClick); } function documentClick(){ self.hide(); } this.reload(); } ContextMenu.count = 0; ContextMenu.DIVIDER = "cm_divider"; const ContextUtil = { getProperty: function(options, opt, def){ if(typeof options[opt] !== "undefined"){ return options[opt]; }else{ return def; } }, getSizes: function(obj){ var lis = obj.getElementsByTagName('li'); var width_def = 0; var height_def = 0; for(var i = 0; i < lis.length; i++){ var li = lis[i]; if(li.offsetWidth > width_def){ width_def = li.offsetWidth; } if(li.offsetHeight > height_def){ height_def = li.offsetHeight; } } var width = width_def; var height = height_def; for(var i = 0; i < lis.length; i++){ var li = lis[i]; var ul = li.getElementsByTagName('ul'); if(typeof ul[0] !== "undefined"){ var ul_size = ContextUtil.getSizes(ul[0]); if(width_def + ul_size.width > width){ width = width_def + ul_size.width; } if(height_def + ul_size.height > height){ height = height_def + ul_size.height; } } } return { "width": width, "height": height }; } };
renderLevel
ccinfocache_test.go
//此源码被清华学神尹成大魔王专业翻译分析并修改 //尹成QQ77025077 //尹成微信18510341407 //尹成所在QQ群721929980 //尹成邮箱 [email protected] //尹成毕业于清华大学,微软区块链领域全球最有价值专家 //https://mvp.microsoft.com/zh-cn/PublicProfile/4033620 /* 版权所有IBM Corp.2017保留所有权利。 根据Apache许可证2.0版(以下简称“许可证”)获得许可; 除非符合许可证,否则您不能使用此文件。 您可以在以下网址获得许可证副本: http://www.apache.org/licenses/license-2.0 除非适用法律要求或书面同意,软件 根据许可证分发是按“原样”分发的, 无任何明示或暗示的保证或条件。 有关管理权限和 许可证限制。 **/ package ccprovider import ( "archive/tar" "bytes" "compress/gzip" "io/ioutil" "os" "path/filepath" "testing" "github.com/golang/protobuf/proto" "github.com/hyperledger/fabric/core/container/util" "github.com/hyperledger/fabric/protos/peer" "github.com/stretchr/testify/assert" ) func getDepSpec(name string, path string, version string, initArgs [][]byte) (*peer.ChaincodeDeploymentSpec, error) { spec := &peer.ChaincodeSpec{Type: 1, ChaincodeId: &peer.ChaincodeID{Name: name, Path: path, Version: version}, Input: &peer.ChaincodeInput{Args: initArgs}} codePackageBytes := bytes.NewBuffer(nil) gz := gzip.NewWriter(codePackageBytes) tw := tar.NewWriter(gz) err := util.WriteBytesToPackage("src/garbage.go", []byte(name+path+version), tw) if err != nil { return nil, err } tw.Close() gz.Close() return &peer.ChaincodeDeploymentSpec{ChaincodeSpec: spec, CodePackage: codePackageBytes.Bytes()}, nil } func buildPackage(name string, path string, version string, initArgs [][]byte) (CCPackage, error) { depSpec, err := getDepSpec(name, path, version, initArgs) if err != nil { return nil, err } buf, err := proto.Marshal(depSpec) if err != nil { return nil, err } cccdspack := &CDSPackage{} if _, err := cccdspack.InitFromBuffer(buf); err != nil { return nil, err } return cccdspack, nil } type mockCCInfoFSStorageMgrImpl struct { CCMap map[string]CCPackage } func (m *mockCCInfoFSStorageMgrImpl) GetChaincode(ccname string, ccversion string) (CCPackage, error) { return m.CCMap[ccname+ccversion], nil } //这里我们测试缓存实现本身 func TestCCInfoCache(t *testing.T) { ccname := "foo" ccver := "1.0" ccpath := "github.com/hyperledger/fabric/examples/chaincode/go/example02/cmd" ccinfoFs := &mockCCInfoFSStorageMgrImpl{CCMap: map[string]CCPackage{}} cccache := NewCCInfoCache(ccinfoFs) //测试GeT端 //CC数据尚不在缓存中 _, err := cccache.GetChaincodeData(ccname, ccver) assert.Error(t, err) //放入文件系统 pack, err := buildPackage(ccname, ccpath, ccver, [][]byte{[]byte("init"), []byte("a"), []byte("100"), []byte("b"), []byte("200")}) assert.NoError(t, err) ccinfoFs.CCMap[ccname+ccver] = pack //希望它现在在缓存中 cd1, err := cccache.GetChaincodeData(ccname, ccver) assert.NoError(t, err) //它应该还在缓存中 cd2, err := cccache.GetChaincodeData(ccname, ccver) assert.NoError(t, err) //它们不是空的 assert.NotNil(t, cd1) assert.NotNil(t, cd2) //现在测试Put侧。 ccver = "2.0" //放入文件系统 pack, err = buildPackage(ccname, ccpath, ccver, [][]byte{[]byte("init"), []byte("a"), []byte("100"), []byte("b"), []byte("200")}) assert.NoError(t, err) ccinfoFs.CCMap[ccname+ccver] = pack //创建要放置的DEP规范 _, err = getDepSpec(ccname, ccpath, ccver, [][]byte{[]byte("init"), []byte("a"), []byte("100"), []byte("b"), []byte("200")}) assert.NoError(t, err) //希望它被缓存 cd1, err = cccache.GetChaincodeData(ccname, ccver) assert.NoError(t, err) //它应该还在缓存中 cd2, err = cccache.GetChaincodeData(ccname, ccver) assert.NoError(t, err) //它们不是空的 assert.NotNil(t, cd1) assert.NotNil(t, cd2) } func TestPutChaincode(t *testing.T) { ccname := "" ccver := "1.0" ccpath := "github.com/hyperledger/fabric/examples/chaincode/go/example02/cmd" ccinfoFs := &mockCCInfoFSStorageMgrImpl{CCMap: map[string]CCPackage{}} NewCCInfoCache(ccinfoFs) //错误案例1:ccname为空 //创建要放置的DEP规范 _, err := getDepSpec(ccname, ccpath, ccver, [][]byte{[]byte("init"), []byte("a"), []byte("100"), []byte("b"), []byte("200")}) assert.NoError(t, err) //错误案例2:ccver为空 ccname = "foo" ccver = "" _, err = getDepSpec(ccname, ccpath, ccver, [][]byte{[]byte("init"), []byte("a"), []byte("100"), []byte("b"), []byte("200")}) assert.NoError(t, err) //错误案例3:ccfs.putchaincode返回错误 ccinfoFs = &mockCCInfoFSStorageMgrImpl{CCMap: map[string]CCPackage{}} NewCCInfoCache(ccinfoFs) ccname = "foo" ccver = "1.0" _, err = getDepSpec(ccname, ccpath, ccver, [][]byte{[]byte("init"), []byte("a"), []byte("100"), []byte("b"), []byte("200")}) assert.NoError(t, err) } //在这里,我们在启用对等缓存之后测试它的内置缓存 func TestCCInfoFSPeerInstance(t *testing.T) { ccname := "bar" ccver := "1.0" ccpath := "github.com/hyperledger/fabric/examples/chaincode/go/example02/cmd" //CC数据尚不在缓存中 _, err := GetChaincodeFromFS(ccname, ccver) assert.Error(t, err) //创建要放置的DEP规范 ds, err := getDepSpec(ccname, ccpath, ccver, [][]byte{[]byte("init"), []byte("a"), []byte("100"), []byte("b"), []byte("200")}) assert.NoError(t, err) //放它 err = PutChaincodeIntoFS(ds) assert.NoError(t, err) //获取所有已安装的链码,不应返回0个链码 resp, err := GetInstalledChaincodes() assert.NoError(t, err) assert.NotNil(t, resp) assert.NotZero(t, len(resp.Chaincodes), "GetInstalledChaincodes should not have returned 0 chaincodes") //获取链码数据 _, err = GetChaincodeData(ccname, ccver) assert.NoError(t, err) } func TestGetInstalledChaincodesErrorPaths(t *testing.T) { //获取现有的chaincode安装路径值并设置它 //做完测试后再回来 cip := chaincodeInstallPath defer SetChaincodesPath(cip) //创建一个临时目录并在末尾将其删除 dir, err := ioutil.TempDir(os.TempDir(), "chaincodes") assert.NoError(t, err) defer os.RemoveAll(dir) //将上面创建的目录设置为chaincode安装路径 SetChaincodesPath(dir) err = ioutil.WriteFile(filepath.Join(dir, "idontexist.1.0"), []byte("test"), 0777) assert.NoError(t, err) resp, err := GetInstalledChaincodes() assert.NoError(t, err) assert.Equal(t, 0, len(resp.Chaincodes), "Expected 0 chaincodes but GetInstalledChaincodes returned %s chaincodes", len(resp.Chaincodes)) } func TestChaincodePackageExists(t *testing.T) { _, err := ChaincodePackageExists("foo1", "1.0") assert.Erro
ath(t *testing.T) { dir, err := ioutil.TempDir(os.TempDir(), "setchaincodes") if err != nil { assert.Fail(t, err.Error(), "Unable to create temp dir") } defer os.RemoveAll(dir) t.Logf("created temp dir %s", dir) //获取现有的chaincode安装路径值并设置它 //做完测试后再回来 cip := chaincodeInstallPath defer SetChaincodesPath(cip) f, err := ioutil.TempFile(dir, "chaincodes") assert.NoError(t, err) assert.Panics(t, func() { SetChaincodesPath(f.Name()) }, "SetChaincodesPath should have paniced if a file is passed to it") //以下代码适用于Mac,但不适用于CI ////使目录为只读 //err=os.chmod(目录,0444) //断言.noError(t,err) //cdir:=filepath.join(dir,“chaincodesdir”)。 //断言.panics(t,func() //设置链码速度(cdir) //,“如果setchaincodespath无法统计dir”,它应该会恐慌。 ////读取并执行目录 //err=os.chmod(目录,0555) //断言.noError(t,err) //断言.panics(t,func() //设置链码速度(cdir) //,“如果setchaincodespath无法创建dir”,则它应该会恐慌。 } var ccinfocachetestpath = "/tmp/ccinfocachetest" func TestMain(m *testing.M) { os.RemoveAll(ccinfocachetestpath) SetChaincodesPath(ccinfocachetestpath) rc := m.Run() os.RemoveAll(ccinfocachetestpath) os.Exit(rc) }
r(t, err) } func TestSetChaincodesP
WebServices.py
#$Header: /opt/cvs/python/packages/share1.5/AutoDockTools/WebServices.py,v 1.21 2009/08/12 21:40:05 lclement Exp $ #$Id: WebServices.py,v 1.21 2009/08/12 21:40:05 lclement Exp $ # Author: Sargis Dallakyan ([email protected]) import tkinter, Pmw, os, http.client, webbrowser, urllib.request, urllib.parse, urllib.error, re from ViewerFramework.VFCommand import CommandGUI, Command from Pmv.mvCommand import MVCommand from .autostartCommands import menuText from mglutil.gui.InputForm.Tk.gui import InputFormDescr from mglutil.util.packageFilePath import getResourceFolderWithVersion from tkinter.filedialog import * from tkinter.messagebox import * from mglutil.gui.BasicWidgets.Tk.progressBar import ProgressBar from mglutil.web.services.AppService_client import AppServiceLocator, launchJobRequest, \ getOutputsRequest, queryStatusRequest from mglutil.web.services.AppService_types import ns0 import os class WebServices(MVCommand): def __init__(self): MVCommand.__init__(self) rc = getResourceFolderWithVersion() + os.sep + 'ws' + os.sep if not os.path.exists(rc): os.mkdir(rc) self.proxy_gama = rc + 'proxy_gama' self.rc_ad = rc + "rc_ad" self.login = False if hasattr(self, 'vf.GUI.ROOT'): self.dpf = tkinter.StringVar(self.vf.GUI.ROOT) self.gpf = tkinter.StringVar(self.vf.GUI.ROOT) self.prev_dir = tkinter.StringVar(self.vf.GUI.ROOT) self.ad_radio = tkinter.IntVar(self.vf.GUI.ROOT) else: self.dpf = tkinter.StringVar() self.gpf = tkinter.StringVar() self.prev_dir = tkinter.StringVar() self.ad_radio = tkinter.IntVar() self.current_job = None def guiCallback(self, event=None): mainform = self.showForm('default', modal=0, blocking=1., initFunc=self.initForm) def buildFormDescr(self, formName): ifd = InputFormDescr(title = "AutoGrid/AutoDock Web Services") #Web Services Login ifd.append({'name':"LoginGroup", 'widgetType':Pmw.Group, 'container':{'LoginGroup':'w.interior()'}, 'wcfg':{'tag_text':'Web Services Location'}, 'gridcfg':{'sticky':'nswe'} }) ifd.append({'widgetType':Pmw.ComboBox, 'name':'WS_address', 'parent':'LoginGroup', 'wcfg':{'scrolledlist_items': ('http://ws.nbcr.net/opal2/services',), 'listheight':50, 'dropdown':1, 'history':1, 'autoclear':1}, 'gridcfg':{'sticky':'ew', 'row':0, 'column':0, 'columnspan':3} }) # ifd.append({'widgetType':Tkinter.Label, 'name':'New_User', # 'parent':'LoginGroup', 'wcfg':{'text':' New Users?', # 'fg':'Blue','cursor':'hand1'}, # 'gridcfg':{'sticky':'w', 'row':1, 'column':0} # }) # ifd.append({'widgetType':Tkinter.Label, 'name':'UserName_Label', # 'parent':'LoginGroup', 'wcfg':{'text':'User Name'}, # 'gridcfg':{'sticky':'e', 'row':1, 'column':1} # }) # ifd.append({'widgetType':Tkinter.Entry, 'name':'UserName_Entry', # 'parent':'LoginGroup','wcfg':{}, # 'gridcfg':{'sticky':'ew', 'row':1, 'column':2} # }) # ifd.append({'widgetType':Tkinter.Label, 'name':'Password_Label', # 'parent':'LoginGroup', 'wcfg':{'text':'Password'}, # 'gridcfg':{'sticky':'e', 'row':2, 'column':1} # }) # ifd.append({'widgetType':Tkinter.Entry, 'name':'Password_Entry', # 'parent':'LoginGroup', 'wcfg':{'show':'*'}, # 'gridcfg':{'sticky':'ew', 'row':2, 'column':2} # }) # ifd.append({'widgetType':Tkinter.Label, 'name':'Remember_Label', # 'parent':'LoginGroup', # 'wcfg':{'text':'Remember User Name and Password'}, # 'gridcfg':{'sticky':'e', 'row':3, 'column':0,'columnspan':2} # }) # self.RememberLogin_var = Tkinter.BooleanVar() # ifd.append({'widgetType':Tkinter.Checkbutton, 'name':'Remember_Checkbutton', # 'parent':'LoginGroup', 'variable':self.RememberLogin_var, # 'gridcfg':{'sticky':'w', 'row':3, 'column':2} # }) #AutoGrid group ifd.append({'name':"AutoGrid", 'widgetType':Pmw.Group, 'container':{'AutoGrid':'w.interior()'}, 'wcfg':{'tag_text':'AutoGrid'}, 'gridcfg':{'sticky':'nswe'} }) ifd.append({'widgetType':tkinter.Button, 'name':'Run_autogrid', 'parent':'AutoGrid', 'wcfg':{'text':'Run AutoGrid ', 'command':self.startAutogrid}, 'gridcfg':{'sticky':'w', 'row':0, 'column':0} }) ifd.append( {'name': 'gpf_entry', 'parent':'AutoGrid', 'widgetType':tkinter.Entry, 'wcfg':{'width':30,'textvariable':self.gpf}, 'gridcfg':{'sticky':'w','row':0,'column':1} }) ifd.append({'name': 'browse_gpf', 'widgetType': tkinter.Button, 'parent':'AutoGrid', 'text':'Browse', 'command':self.browse_gpf, 'gridcfg':{'sticky':'w','row':0, 'column':2} }) #AutoDock group ifd.append({'name':"AutoDock", 'widgetType':Pmw.Group, 'container':{'AutoDock':'w.interior()'}, 'wcfg':{'tag_text':'AutoDock'}, 'gridcfg':{'sticky':'nswe'} }) ifd.append({'widgetType':tkinter.Button, 'name':'Run_autodock', 'parent':'AutoDock', 'wcfg':{'text':'Run AutoDock', 'command':self.startAutodock}, 'gridcfg':{'sticky':'w', 'row':0, 'column':0} }) ifd.append( {'name': 'dpf_entry', 'parent':'AutoDock', 'widgetType':tkinter.Entry, 'wcfg':{'width':30,'textvariable':self.dpf}, 'gridcfg':{'sticky':'w','row':0,'column':1} }) ifd.append({'name': 'browse_dpf', 'widgetType': tkinter.Button, 'parent':'AutoDock', 'text':'Browse', 'command':self.browse_dpf, 'gridcfg':{'sticky':'w','row':0, 'column':2} }) ifd.append({'name': 'ag_local', 'widgetType': tkinter.Radiobutton, 'parent':'AutoDock', 'text':'Use local grids', 'tooltip':"This option sends locally stored grid files with Web Services request", 'wcfg':{'variable':self.ad_radio,'value':0}, 'gridcfg':{'sticky':'w','row':1, 'column':0,'columnspan':2} }) # ifd.append({'name': 'ag_before', 'widgetType': Tkinter.Radiobutton, # 'parent':'AutoDock', 'text':'Run AutoGrid first', # 'tooltip':"This option runs AutoGrid Web Services and uses resulting map files for AutoDock", # 'wcfg':{'variable':self.ad_radio,'value':1,'state':'disabled'}, # 'gridcfg':{'sticky':'w','row':2, 'column':0,'columnspan':2} # }) ifd.append({'name': 'use_remote', 'widgetType': tkinter.Radiobutton, 'parent':'AutoDock', 'text':'Use grids from server directory', 'tooltip':"This option copies map files from previous AutoGrid run", 'wcfg':{'variable':self.ad_radio,'value':2,}, 'gridcfg':{'sticky':'w','row':3, 'column':0,'columnspan':2} }) ifd.append( {'name': 'remote_dir', 'parent':'AutoDock', 'widgetType':tkinter.Entry, 'wcfg':{'width':23,'textvariable':self.prev_dir}, 'gridcfg':{'sticky':'e','row':3,'column':1,'columnspan':2} }) #Status ifd.append({'name':"StatusGroup", 'widgetType':Pmw.Group, 'container':{'StatusGroup':'w.interior()'}, 'wcfg':{'tag_text':'Web Services Status'}, 'gridcfg':{'sticky':'nswe'} }) ifd.append({'widgetType':tkinter.Label, 'name':'status0', 'parent':'StatusGroup', 'wcfg':{'text':' ',}, 'gridcfg':{'sticky':'w', 'row':0, 'column':0} }) ifd.append({'widgetType':tkinter.Label, 'name':'status1', 'parent':'StatusGroup', 'wcfg':{'text':' ',}, 'gridcfg':{'sticky':'w', 'row':1, 'column':0} }) ifd.append({'name':'WS_ProgressBar', 'widgetType':tkinter.Frame, 'parent':'StatusGroup', 'wcfg':{'height':30}, 'gridcfg':{'sticky':'ew', 'row':2,'column':0} }) ifd.append({'widgetType':tkinter.Label, 'name':'down_label', 'parent':'StatusGroup', 'wcfg':{'text':' ',}, 'gridcfg':{'sticky':'w', 'row':3, 'column':0}
return ifd def browse_gpf(self): filename = askopenfilename(filetypes=[('Grid Parameter File','*.gpf')],\ title="Please Select Grid Parameter File", parent=self.cmdForms['default'].root) if filename: self.gpf.set(filename) self.cmdForms['default'].descr.entryByName['Run_autogrid']['widget'].configure(state='normal') #self.cmdForms['default'].descr.entryByName['ag_before']['widget'].configure(state='normal') def browse_dpf(self): filename = askopenfilename(filetypes=[('Dock Parameter File','*.dpf')],\ title="Please Select Dock Parameter File", parent=self.cmdForms['default'].root) if filename: self.dpf.set(filename) self.cmdForms['default'].descr.entryByName['Run_autodock']['widget'].configure(state='normal') def initForm(self, cmdForm=None): cmdForm.descr.entryByName['WS_address']['widget'].selectitem(0) # if not os.path.exists(self.rc_ad): # open(self.rc_ad,'w') # else: # file = open(self.rc_ad) # text = file.read() # text = text.split() # for line in text: # tmp_line = line.split('User:') # if len(tmp_line) > 1: # cmdForm.descr.entryByName['UserName_Entry']['wcfg']\ # ['textvariable'].set(tmp_line[1]) # tmp_line = line.split('Password:') # if len(tmp_line) > 1: # cmdForm.descr.entryByName['Password_Entry']['wcfg']\ # ['textvariable'].set(tmp_line[1]) # file.close() # def openurl(event): # webbrowser.open('https://nbcr.net:8443/worksphere/start?cid=apply') # cmdForm.descr.entryByName['New_User']['widget'].bind(sequence="<Button-1>", # func=openurl) if hasattr(self.vf,'dpo') and self.vf.dpo.dpf_filename: self.dpf.set(self.vf.dpo.dpf_filename) cmdForm.descr.entryByName['Run_autodock']['widget'].configure(state='normal') else: if not self.dpf.get(): cmdForm.descr.entryByName['Run_autodock']['widget'].configure(state='disabled') if hasattr(self.vf,'gpo') and self.vf.gpo.gpf_filename: self.gpf.set(self.vf.gpo.gpf_filename) cmdForm.descr.entryByName['Run_autogrid']['widget'].configure(state='normal') #cmdForm.descr.entryByName['ag_before']['widget'].configure(state='normal') else: if not self.gpf.get(): cmdForm.descr.entryByName['Run_autogrid']['widget'].configure(state='disabled') self.progressBar = ProgressBar( cmdForm.descr.entryByName['WS_ProgressBar']['widget'], labelside=None, width=200, height=20, mode='percent') self.progressBar.setLabelText('Progress...') self.progressBar.set(0) cmdForm.descr.entryByName['WS_ProgressBar']['widget'].grid_forget() def startAutogrid(self): self.cmdForms['default'].descr.entryByName['Run_autogrid']['widget']\ .configure(state='disabled') gpf_file = self.gpf.get() if not os.path.exists(gpf_file): self.cmdForms['default'].descr.entryByName['status0']['widget'].\ configure(text = 'ERROR: gpf file ' + gpf_file + ' does not exist!') return self.host = self.cmdForms['default'].descr.entryByName['WS_address']['widget'].get() # if not self.login : # self.cmdForms['default'].descr.entryByName['status0']['widget'].\ # configure(text='Connecting to '+ self.host + ". Please wait...") # self.vf.GUI.ROOT.update() # f = self.validate_login() # if f == "Failed": # return self.appLocator = AppServiceLocator() self.req = launchJobRequest() input_file = os.path.basename(gpf_file) options = '-p ' + input_file + ' -l ' + os.path.splitext(input_file)[0] + '.glg' self.req._argList = options #input_gpf = ns0.InputFileType_Def('inputFile') #input_gpf._name = input_file gpfFile = open(gpf_file, 'r') gpfFileString = gpfFile.read() gpfFile.close() #input_gpf._contents = gpfFileString gpfFileString = gpfFileString.split('\n') for line in gpfFileString: if line[0:9] == 'receptor ': pdbqs = line.split()[1] #input_pdbqs = ns0.InputFileType_Def('inputFile') #input_pdbqs._name = pdbqs pdbqs = os.path.join(os.path.split(gpf_file)[0],pdbqs) #pdbqsFile = open(pdbqs, 'r') #pdbqsFileString = pdbqsFile.read() #pdbqsFile.close() #input_pdbqs._contents = pdbqsFileString inputFiles = [] #inputFiles.append(input_gpf) #inputFiles.append(input_pdbqs) inputFiles.append(self.uploadFile(gpf_file)) inputFiles.append(self.uploadFile(pdbqs)) self.req._inputFile = inputFiles self.appServicePort = self.appLocator.getAppServicePort( self.host+'/AutogridOpalService') resp = self.appServicePort.launchJob(self.req) self.JobID = resp._jobID self.cmdForms['default'].descr.entryByName['status0']['widget'].\ configure(text = 'Running Autogrid Job ID: ' + self.JobID) self.vf.GUI.ROOT.update() self.vf.GUI.ROOT.after(5, self.checkStatus) self.cmdForms['default'].descr.entryByName['Run_autogrid']['widget'].configure(state='normal') self.prev_dir.set(self.JobID) self.cmdForms['default'].descr.entryByName['use_remote']['widget'].configure(state='normal') def startAutodock(self): self.cmdForms['default'].descr.entryByName['Run_autodock']['widget']\ .configure(state='disabled') dpf_file = self.dpf.get() if not os.path.exists(dpf_file): self.cmdForms['default'].descr.entryByName['status0']['widget'].\ configure(text = 'ERROR: dpf file ' + fpf_file + ' does not exist!') return self.host = self.cmdForms['default'].descr.entryByName['WS_address']['widget'].get() # if not self.login : # self.cmdForms['default'].descr.entryByName['status0']['widget'].\ # configure(text='Connecting to '+ self.host + ". Please wait...") # self.vf.GUI.ROOT.update() # f = self.validate_login() # if f == "Failed": # return self.appLocator = AppServiceLocator() self.req = launchJobRequest() input_file = os.path.basename(dpf_file) options = '-p ' + input_file + ' -l ' + os.path.splitext(input_file)[0] + '.dlg' self.req._argList = options #input_dpf = ns0.InputFileType_Def('inputFile') #input_dpf._name = input_file dpfFile = open(dpf_file, 'r') dpfFileString = dpfFile.read() dpfFile.close() #input_dpf._contents = dpfFileString #DPF file inputFiles = [] inputFiles.append(self.uploadFile(dpf_file)) run_option = self.ad_radio.get() if run_option == 0: # sends locally stored grid files inputs = re.findall("\w*.\w*\.map ",dpfFileString) inputs.extend(re.findall("\w*\.maps.fld",dpfFileString)) inputs.extend(re.findall("\w*.pdbq[t]*",dpfFileString)) for input in inputs: input = input.strip() #ws_input = ns0.InputFileType_Def('inputFile') #ws_input._name = input input_full_name = os.path.join(os.path.split(dpf_file)[0],input) #inputFile = open(input_full_name, 'r') #inputFileString = inputFile.read() #inputFile.close() #ws_input._contents = inputFileString inputFiles.append(self.uploadFile(input_full_name)) elif run_option == 2: # runs AutoGrid first prev_dir = self.prev_dir.get() inputs = re.findall("\w*.\w*\.map ",dpfFileString) inputs.extend(re.findall("\w*\.maps.fld",dpfFileString)) host = 'http://'+self.host.split('/')[2] for input in inputs: self.req._argList += " " +host+"/"+prev_dir+"/"+input pdbq_input = re.findall("\w*.pdbq[t]*",dpfFileString) pdbq_input = pdbq_input[0].strip() #ws_input = ns0.InputFileType_Def('inputFile') #ws_input._name = pdbq_input input_full_name = os.path.join(os.path.split(dpf_file)[0],pdbq_input) #inputFile = open(input_full_name, 'r') #inputFileString = inputFile.read() #inputFile.close() #ws_input._contents = inputFileString inputFiles.append(self.uploadFile(input_full_name)) self.req._inputFile = inputFiles self.vf.GUI.ROOT.update() self.appServicePort = self.appLocator.getAppServicePort( self.host+'/AutodockOpalService',) resp = self.appServicePort.launchJob(self.req) self.JobID = resp._jobID self.cmdForms['default'].descr.entryByName['status0']['widget'].\ configure(text = 'Running Autodock Job ID: ' + self.JobID) self.vf.GUI.ROOT.update() self.vf.GUI.ROOT.after(5, self.checkStatus) self.cmdForms['default'].descr.entryByName['Run_autodock']['widget'].configure(state='normal') def uploadFile(self, path): """ this function given a string containing a path creates a InputFileType to be used with jobLaunch """ inputFile = ns0.InputFileType_Def('inputFile') inputFile._name = os.path.basename(path) if self.isOpal2(): #use attachment this is opal2 server inputFile._attachment = open(path, "r") else: #it's not a opal2 server don't user attachment infile = open(path, "r") inputFile._contents = infile.read() infile.close() return inputFile def isOpal2(self): """return True if we are using Opal2""" print("self.host is: " + self.host) if self.host.find("/opal2/") != -1: return True else: return False def checkStatus(self): resp = self.appServicePort.queryStatus(queryStatusRequest(self.JobID)) if resp._code == 8: # 8 = GramJob.STATUS_DONE descr = self.cmdForms['default'].descr descr.entryByName['status0']['widget'].configure(text=resp._message) webbrowser.open(resp._baseURL) descr.entryByName['status1']['widget'].configure(text=resp._baseURL, fg='Blue',cursor='hand1') def openurl(event): webbrowser.open(resp._baseURL) descr.entryByName['status1']['widget'].bind(sequence="<Button-1>", func=openurl) self.resp = self.appServicePort.getOutputs(getOutputsRequest(self.JobID)) descr.entryByName['WS_ProgressBar']['widget'].grid(sticky='ew', row=2, column=0) self.opener = urllib.request.FancyURLopener(cert_file=self.proxy_gama, key_file=self.proxy_gama) self.download_finished = False self.new_download = True self.file_counter = -1 inputs = [x for x in self.resp._outputFile if x._name[-3:] !='dlg'] if len(inputs) != len(self.resp._outputFile): for input in inputs: self.resp._outputFile.remove(input) self.download() return else: self.cmdForms['default'].descr.entryByName['status0']['widget'].\ configure(text = "Status: " + resp._message) self.cmdForms['default'].descr.entryByName['status1']['widget'].\ configure(text = "") self.vf.GUI.ROOT.after(5000, self.checkStatus) def download(self): if self.new_download: self.file_counter += 1 if self.file_counter > self.resp._outputFile.__len__() - 1 : self.cmdForms['default'].descr.entryByName['WS_ProgressBar']\ ['widget'].grid_forget() self.cmdForms['default'].descr.entryByName['down_label']\ ['widget'].configure(text = "") self.cmdForms['default'].descr.entryByName['Run_autogrid']\ ['widget'].configure(state='normal') return self.progressBar.configure(progressformat='percent', labeltext='Progress ... ', max =100) self.progressBar.set(0) remote_file = self.resp._outputFile[self.file_counter] self.cmdForms['default'].descr.entryByName['down_label']['widget'].\ configure(text = "Downloading " + remote_file._name + " " + str(self.file_counter+1) +" of " + str(self.resp._outputFile.__len__())) self._url = self.opener.open(remote_file._url) try: self._out = open(remote_file._name,"w") except IOError: showerror("Download Failed!", "Permission denied: " +os.path.join(os.getcwd(),remote_file._name), parent = self.cmdForms['default'].root) return bytes = int(self._url.headers.dict['content-length']) self._progress_counter = 0 self._download_bytes = bytes/100 if self._download_bytes == 0: self._download_bytes = 1 self.new_download = False self.vf.GUI.ROOT.after(1, self.download) return else: self._progress_counter += 1 if self._progress_counter > 100: self._progress_counter = 100 self.progressBar.set(self._progress_counter) tmp = self._url.read(self._download_bytes) if tmp: self._out.write(tmp) else: self._url.close() self._out.close() self.new_download = True self.vf.GUI.ROOT.after(50, self.download) def validate_login(self): self.login = False from mglutil.web.services.SecuritymyproxyloginImplService_services import \ loginUserMyProxyRequestWrapper, \ SecuritymyproxyloginImplServiceLocator gamaLoginLocator = SecuritymyproxyloginImplServiceLocator() gamaLoginService = gamaLoginLocator.getSecuritymyproxyloginImpl( ssl=1,transport=http.client.HTTPSConnection) req = loginUserMyProxyRequestWrapper() username = self.cmdForms['default'].descr.\ entryByName['UserName_Entry']['widget'].get() passwd = self.cmdForms['default'].descr.\ entryByName['Password_Entry']['widget'].get() if not username or not passwd: showerror("Username or Password is missing", "Login failed. Please type your User Name and Password,\ or click on New User?", parent = self.cmdForms['default'].root) return "Failed" req._username = username req._passwd = passwd resp = gamaLoginService.loginUserMyProxy(req) f = open(self.proxy_gama, "w") f.write(resp._loginUserMyProxyReturn) f.close() if self.RememberLogin_var.get(): file = open(self.rc_ad,'w') user = self.cmdForms['default'].descr.entryByName\ ['UserName_Entry']['widget'].get() passwd = self.cmdForms['default'].descr.entryByName\ ['Password_Entry']['widget'].get() file.write("User:%s\nPassword:%s\n"%(user,passwd)) self.login = True WebServicesGUI=CommandGUI() WebServicesGUI.addMenuCommand('AutoToolsBar', menuText['StartMB'], "Web Services...") commandList = [{'name':'ADweb_services','cmd':WebServices(),'gui':WebServicesGUI}] WebServices4GUI=CommandGUI() WebServices4GUI.addMenuCommand('AutoTools4Bar', menuText['StartMB'], "Web Services...") def initModule(viewer): if not hasattr(viewer, 'ADweb_services') and hasattr(viewer, 'GUI')\ and hasattr(viewer.GUI, 'currentADTBar'): viewer.addCommand(WebServices(),'ADweb_services',WebServices4GUI) #else: # for _dict in commandList: # viewer.addCommand(_dict['cmd'],_dict['name'],_dict['gui'])
})
virtualvolume_webhook.go
/* * Copyright (c) 2019. * * Metaprov.com */ package v1alpha1 import ( "github.com/metaprov/modelaapi/pkg/apis/common" apierrors "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/runtime" "k8s.io/apimachinery/pkg/runtime/schema" "k8s.io/apimachinery/pkg/util/validation/field" "sigs.k8s.io/controller-runtime/pkg/webhook" ) // validation var _ webhook.Defaulter = &VirtualVolume{} // validation var _ webhook.Validator = &VirtualVolume{} // ValidateCreate implements webhook.Validator so a webhook will be registered for the type func (volume *VirtualVolume) ValidateCreate() error { return volume.validate() } // ValidateUpdate implements webhook.Validator so a webhook will be registered for the type func (volume *VirtualVolume) ValidateUpdate(old runtime.Object) error { return volume.validate() } func (notifier *VirtualVolume) validate() error { var allErrs field.ErrorList allErrs = append(allErrs, notifier.validateMeta(field.NewPath("metadata"))...) allErrs = append(allErrs, notifier.validateSpec(field.NewPath("spec"))...) if len(allErrs) == 0 { return nil } return apierrors.NewInvalid( schema.GroupKind{Group: "infra.modela.ai", Kind: "VirtualVolume"}, notifier.Name, allErrs) } func (dataset *VirtualVolume) validateMeta(fldPath *field.Path) field.ErrorList { var allErrs field.ErrorList allErrs = append(allErrs, dataset.validateName(fldPath.Child("name"))...) return allErrs } func (dataset *VirtualVolume) validateName(fldPath *field.Path) field.ErrorList { var allErrs field.ErrorList err := common.ValidateResourceName(dataset.Name) if err != nil { allErrs = append(allErrs, field.Invalid(fldPath.Child("FileName"), dataset.Name, err.Error())) } return allErrs } func (dataset *VirtualVolume) validateSpec(fldPath *field.Path) field.ErrorList { var allErrs field.ErrorList return allErrs } func (volume *VirtualVolume) Default() { if !volume.HasFinalizer() { volume.AddFinalizer() } volume.Kind = "VirtualVolume" } func (volume *VirtualVolume) ValidateDelete() error { panic("implement me")
}
handlers.rs
use actix_web::http::header::{self, ContentType}; use actix_web::http::StatusCode; use actix_web::{web, HttpRequest, HttpResponse}; use prometheus::{Encoder, TextEncoder}; use serde::Deserialize; use crate::broadcaster::Broadcaster; use crate::dto::PlanetDto; use crate::errors::CustomError; use crate::model::PlanetType; use crate::services::{PlanetService, RateLimitingService}; use std::sync::Mutex; #[derive(Debug, Deserialize)] pub struct GetPlanetsQueryParams { r#type: Option<PlanetType>, } pub async fn get_planets( req: HttpRequest, web::Query(query_params): web::Query<GetPlanetsQueryParams>, rate_limit_service: web::Data<RateLimitingService>, planet_service: web::Data<PlanetService>, ) -> Result<HttpResponse, CustomError> { // can be moved to actix middleware rate_limit_service .assert_rate_limit_not_exceeded(get_ip_addr(&req)?) .await?; let planets = planet_service.get_planets(query_params.r#type).await?; Ok(HttpResponse::Ok().json(planets.into_iter().map(PlanetDto::from).collect::<Vec<_>>())) } pub async fn create_planet( planet_dto: web::Json<PlanetDto>, planet_service: web::Data<PlanetService>, ) -> Result<HttpResponse, CustomError> { let planet = planet_service .create_planet(planet_dto.into_inner().into()) .await?; Ok(HttpResponse::Ok().json(PlanetDto::from(planet))) } pub async fn get_planet( planet_id: web::Path<String>, planet_service: web::Data<PlanetService>, ) -> Result<HttpResponse, CustomError> { let planet = planet_service.get_planet(&planet_id.into_inner()).await?; Ok(HttpResponse::Ok().json(PlanetDto::from(planet))) } pub async fn update_planet( planet_id: web::Path<String>, planet_dto: web::Json<PlanetDto>, planet_service: web::Data<PlanetService>, ) -> Result<HttpResponse, CustomError> { let planet = planet_service .update_planet(&planet_id.into_inner(), planet_dto.into_inner().into()) .await?; Ok(HttpResponse::Ok().json(PlanetDto::from(planet))) } pub async fn delete_planet( planet_id: web::Path<String>, planet_service: web::Data<PlanetService>, ) -> Result<HttpResponse, CustomError> { planet_service .delete_planet(&planet_id.into_inner()) .await?; Ok(HttpResponse::Ok().finish()) } pub async fn get_image_of_planet( planet_id: web::Path<String>, planet_service: web::Data<PlanetService>, ) -> Result<HttpResponse, CustomError> { let image = planet_service .get_image_of_planet(&planet_id.into_inner()) .await?; Ok(HttpResponse::Ok() .content_type(ContentType::png()) .body(image)) } pub async fn sse(broadcaster: web::Data<Mutex<Broadcaster>>) -> Result<HttpResponse, CustomError> { let rx = broadcaster .lock() .expect("Can't lock broadcaster") .new_client() .await; let response_stream = tokio_stream::wrappers::ReceiverStream::new(rx); Ok(HttpResponse::build(StatusCode::OK) .insert_header(header::ContentType(mime::TEXT_EVENT_STREAM)) .streaming(response_stream)) } pub async fn index() -> Result<HttpResponse, CustomError> { let content = include_str!("index.html"); Ok(HttpResponse::Ok() .insert_header(header::ContentType(mime::TEXT_HTML)) .body(content)) } pub async fn metrics() -> Result<HttpResponse, CustomError> { let encoder = TextEncoder::new(); let mut buffer = vec![]; encoder .encode(&prometheus::gather(), &mut buffer) .expect("Failed to encode metrics"); let response = String::from_utf8(buffer.clone()).expect("Failed to convert bytes to string"); buffer.clear(); Ok(HttpResponse::Ok() .insert_header(header::ContentType(mime::TEXT_PLAIN)) .body(response)) } fn get_ip_addr(req: &HttpRequest) -> Result<String, CustomError> { Ok(req
}
.peer_addr() .ok_or(CustomError::InternalError)? .ip() .to_string())
mod.rs
//! A double-ended queue implemented with a growable ring buffer. //! //! This queue has *O*(1) amortized inserts and removals from both ends of the //! container. It also has *O*(1) indexing like a vector. The contained elements //! are not required to be copyable, and the queue will be sendable if the //! contained type is sendable. #![stable(feature = "rust1", since = "1.0.0")] use core::cmp::{self, Ordering}; use core::fmt; use core::hash::{Hash, Hasher}; use core::iter::{repeat_with, FromIterator}; use core::marker::PhantomData; use core::mem::{self, ManuallyDrop}; use core::ops::{Index, IndexMut, Range, RangeBounds}; use core::ptr::{self, NonNull}; use core::slice; use crate::collections::TryReserveError; use crate::raw_vec::RawVec; use crate::vec::Vec; #[macro_use] mod macros; #[stable(feature = "drain", since = "1.6.0")] pub use self::drain::Drain; mod drain; #[stable(feature = "rust1", since = "1.0.0")] pub use self::iter_mut::IterMut; mod iter_mut; #[stable(feature = "rust1", since = "1.0.0")] pub use self::into_iter::IntoIter; mod into_iter; #[stable(feature = "rust1", since = "1.0.0")] pub use self::iter::Iter; mod iter; use self::pair_slices::PairSlices; mod pair_slices; use self::ring_slices::RingSlices; mod ring_slices; #[cfg(test)] mod tests; const INITIAL_CAPACITY: usize = 7; // 2^3 - 1 const MINIMUM_CAPACITY: usize = 1; // 2 - 1 const MAXIMUM_ZST_CAPACITY: usize = 1 << (core::mem::size_of::<usize>() * 8 - 1); // Largest possible power of two /// A double-ended queue implemented with a growable ring buffer. /// /// The "default" usage of this type as a queue is to use [`push_back`] to add to /// the queue, and [`pop_front`] to remove from the queue. [`extend`] and [`append`] /// push onto the back in this manner, and iterating over `VecDeque` goes front /// to back. /// /// Since `VecDeque` is a ring buffer, its elements are not necessarily contiguous /// in memory. If you want to access the elements as a single slice, such as for /// efficient sorting, you can use [`make_contiguous`]. It rotates the `VecDeque` /// so that its elements do not wrap, and returns a mutable slice to the /// now-contiguous element sequence. /// /// [`push_back`]: VecDeque::push_back /// [`pop_front`]: VecDeque::pop_front /// [`extend`]: VecDeque::extend /// [`append`]: VecDeque::append /// [`make_contiguous`]: VecDeque::make_contiguous #[cfg_attr(not(test), rustc_diagnostic_item = "vecdeque_type")] #[stable(feature = "rust1", since = "1.0.0")] pub struct VecDeque<T> { // tail and head are pointers into the buffer. Tail always points // to the first element that could be read, Head always points // to where data should be written. // If tail == head the buffer is empty. The length of the ringbuffer // is defined as the distance between the two. tail: usize, head: usize, buf: RawVec<T>, } #[stable(feature = "rust1", since = "1.0.0")] impl<T: Clone> Clone for VecDeque<T> { fn clone(&self) -> VecDeque<T> { self.iter().cloned().collect() } fn clone_from(&mut self, other: &Self) { self.truncate(other.len()); let mut iter = PairSlices::from(self, other); while let Some((dst, src)) = iter.next() { dst.clone_from_slice(&src); } if iter.has_remainder() { for remainder in iter.remainder() { self.extend(remainder.iter().cloned()); } } } } #[stable(feature = "rust1", since = "1.0.0")] unsafe impl<#[may_dangle] T> Drop for VecDeque<T> { fn drop(&mut self) { /// Runs the destructor for all items in the slice when it gets dropped (normally or /// during unwinding). struct Dropper<'a, T>(&'a mut [T]); impl<'a, T> Drop for Dropper<'a, T> { fn drop(&mut self) { unsafe { ptr::drop_in_place(self.0); } } } let (front, back) = self.as_mut_slices(); unsafe { let _back_dropper = Dropper(back); // use drop for [T] ptr::drop_in_place(front); } // RawVec handles deallocation } } #[stable(feature = "rust1", since = "1.0.0")] impl<T> Default for VecDeque<T> { /// Creates an empty `VecDeque<T>`. #[inline] fn default() -> VecDeque<T> { VecDeque::new() } } impl<T> VecDeque<T> { /// Marginally more convenient #[inline] fn ptr(&self) -> *mut T { self.buf.ptr() } /// Marginally more convenient #[inline] fn cap(&self) -> usize { if mem::size_of::<T>() == 0 { // For zero sized types, we are always at maximum capacity MAXIMUM_ZST_CAPACITY } else { self.buf.capacity() } } /// Turn ptr into a slice #[inline] unsafe fn buffer_as_slice(&self) -> &[T] { unsafe { slice::from_raw_parts(self.ptr(), self.cap()) } } /// Turn ptr into a mut slice #[inline] unsafe fn buffer_as_mut_slice(&mut self) -> &mut [T] { unsafe { slice::from_raw_parts_mut(self.ptr(), self.cap()) } } /// Moves an element out of the buffer #[inline] unsafe fn buffer_read(&mut self, off: usize) -> T { unsafe { ptr::read(self.ptr().add(off)) } } /// Writes an element into the buffer, moving it. #[inline] unsafe fn buffer_write(&mut self, off: usize, value: T) { unsafe { ptr::write(self.ptr().add(off), value); } } /// Returns `true` if the buffer is at full capacity. #[inline] fn is_full(&self) -> bool { self.cap() - self.len() == 1 } /// Returns the index in the underlying buffer for a given logical element /// index. #[inline] fn wrap_index(&self, idx: usize) -> usize { wrap_index(idx, self.cap()) } /// Returns the index in the underlying buffer for a given logical element /// index + addend. #[inline] fn wrap_add(&self, idx: usize, addend: usize) -> usize { wrap_index(idx.wrapping_add(addend), self.cap()) } /// Returns the index in the underlying buffer for a given logical element /// index - subtrahend. #[inline] fn wrap_sub(&self, idx: usize, subtrahend: usize) -> usize { wrap_index(idx.wrapping_sub(subtrahend), self.cap()) } /// Copies a contiguous block of memory len long from src to dst #[inline] unsafe fn copy(&self, dst: usize, src: usize, len: usize) { debug_assert!( dst + len <= self.cap(), "cpy dst={} src={} len={} cap={}", dst, src, len, self.cap() ); debug_assert!( src + len <= self.cap(), "cpy dst={} src={} len={} cap={}", dst, src, len, self.cap() ); unsafe { ptr::copy(self.ptr().add(src), self.ptr().add(dst), len); } } /// Copies a contiguous block of memory len long from src to dst #[inline] unsafe fn copy_nonoverlapping(&self, dst: usize, src: usize, len: usize) { debug_assert!( dst + len <= self.cap(), "cno dst={} src={} len={} cap={}", dst, src, len, self.cap() ); debug_assert!( src + len <= self.cap(), "cno dst={} src={} len={} cap={}", dst, src, len, self.cap() ); unsafe { ptr::copy_nonoverlapping(self.ptr().add(src), self.ptr().add(dst), len); } } /// Copies a potentially wrapping block of memory len long from src to dest. /// (abs(dst - src) + len) must be no larger than cap() (There must be at /// most one continuous overlapping region between src and dest). unsafe fn wrap_copy(&self, dst: usize, src: usize, len: usize) { #[allow(dead_code)] fn diff(a: usize, b: usize) -> usize { if a <= b { b - a } else { a - b } } debug_assert!( cmp::min(diff(dst, src), self.cap() - diff(dst, src)) + len <= self.cap(), "wrc dst={} src={} len={} cap={}", dst, src, len, self.cap() ); if src == dst || len == 0 { return; } let dst_after_src = self.wrap_sub(dst, src) < len; let src_pre_wrap_len = self.cap() - src; let dst_pre_wrap_len = self.cap() - dst; let src_wraps = src_pre_wrap_len < len; let dst_wraps = dst_pre_wrap_len < len; match (dst_after_src, src_wraps, dst_wraps) { (_, false, false) => { // src doesn't wrap, dst doesn't wrap // // S . . . // 1 [_ _ A A B B C C _] // 2 [_ _ A A A A B B _] // D . . . // unsafe { self.copy(dst, src, len); } } (false, false, true) => { // dst before src, src doesn't wrap, dst wraps // // S . . . // 1 [A A B B _ _ _ C C] // 2 [A A B B _ _ _ A A] // 3 [B B B B _ _ _ A A] // . . D . // unsafe { self.copy(dst, src, dst_pre_wrap_len); self.copy(0, src + dst_pre_wrap_len, len - dst_pre_wrap_len); } } (true, false, true) => { // src before dst, src doesn't wrap, dst wraps // // S . . . // 1 [C C _ _ _ A A B B] // 2 [B B _ _ _ A A B B] // 3 [B B _ _ _ A A A A] // . . D . // unsafe { self.copy(0, src + dst_pre_wrap_len, len - dst_pre_wrap_len); self.copy(dst, src, dst_pre_wrap_len); } } (false, true, false) => { // dst before src, src wraps, dst doesn't wrap // // . . S . // 1 [C C _ _ _ A A B B] // 2 [C C _ _ _ B B B B] // 3 [C C _ _ _ B B C C] // D . . . // unsafe { self.copy(dst, src, src_pre_wrap_len); self.copy(dst + src_pre_wrap_len, 0, len - src_pre_wrap_len); } } (true, true, false) => { // src before dst, src wraps, dst doesn't wrap // // . . S . // 1 [A A B B _ _ _ C C] // 2 [A A A A _ _ _ C C] // 3 [C C A A _ _ _ C C] // D . . . // unsafe { self.copy(dst + src_pre_wrap_len, 0, len - src_pre_wrap_len); self.copy(dst, src, src_pre_wrap_len); } } (false, true, true) => { // dst before src, src wraps, dst wraps // // . . . S . // 1 [A B C D _ E F G H] // 2 [A B C D _ E G H H] // 3 [A B C D _ E G H A] // 4 [B C C D _ E G H A] // . . D . . // debug_assert!(dst_pre_wrap_len > src_pre_wrap_len); let delta = dst_pre_wrap_len - src_pre_wrap_len; unsafe { self.copy(dst, src, src_pre_wrap_len); self.copy(dst + src_pre_wrap_len, 0, delta); self.copy(0, delta, len - dst_pre_wrap_len); } } (true, true, true) => { // src before dst, src wraps, dst wraps // // . . S . . // 1 [A B C D _ E F G H] // 2 [A A B D _ E F G H] // 3 [H A B D _ E F G H] // 4 [H A B D _ E F F G] // . . . D . // debug_assert!(src_pre_wrap_len > dst_pre_wrap_len); let delta = src_pre_wrap_len - dst_pre_wrap_len; unsafe { self.copy(delta, 0, len - src_pre_wrap_len); self.copy(0, self.cap() - delta, delta); self.copy(dst, src, dst_pre_wrap_len); } } } } /// Frobs the head and tail sections around to handle the fact that we /// just reallocated. Unsafe because it trusts old_capacity. #[inline] unsafe fn handle_capacity_increase(&mut self, old_capacity: usize) { let new_capacity = self.cap(); // Move the shortest contiguous section of the ring buffer // T H // [o o o o o o o . ] // T H // A [o o o o o o o . . . . . . . . . ] // H T // [o o . o o o o o ] // T H // B [. . . o o o o o o o . . . . . . ] // H T // [o o o o o . o o ] // H T // C [o o o o o . . . . . . . . . o o ] if self.tail <= self.head { // A // Nop } else if self.head < old_capacity - self.tail { // B unsafe { self.copy_nonoverlapping(old_capacity, 0, self.head); } self.head += old_capacity; debug_assert!(self.head > self.tail); } else { // C let new_tail = new_capacity - (old_capacity - self.tail); unsafe { self.copy_nonoverlapping(new_tail, self.tail, old_capacity - self.tail); } self.tail = new_tail; debug_assert!(self.head < self.tail); } debug_assert!(self.head < self.cap()); debug_assert!(self.tail < self.cap()); debug_assert!(self.cap().count_ones() == 1); } } impl<T> VecDeque<T> { /// Creates an empty `VecDeque`. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let vector: VecDeque<u32> = VecDeque::new(); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn new() -> VecDeque<T> { VecDeque::with_capacity(INITIAL_CAPACITY) } /// Creates an empty `VecDeque` with space for at least `capacity` elements. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let vector: VecDeque<u32> = VecDeque::with_capacity(10); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn with_capacity(capacity: usize) -> VecDeque<T> { // +1 since the ringbuffer always leaves one space empty let cap = cmp::max(capacity + 1, MINIMUM_CAPACITY + 1).next_power_of_two(); assert!(cap > capacity, "capacity overflow"); VecDeque { tail: 0, head: 0, buf: RawVec::with_capacity(cap) } } /// Provides a reference to the element at the given index. /// /// Element at index 0 is the front of the queue. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.push_back(3); /// buf.push_back(4); /// buf.push_back(5); /// assert_eq!(buf.get(1), Some(&4)); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn get(&self, index: usize) -> Option<&T> { if index < self.len() { let idx = self.wrap_add(self.tail, index); unsafe { Some(&*self.ptr().add(idx)) } } else { None } } /// Provides a mutable reference to the element at the given index. /// /// Element at index 0 is the front of the queue. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.push_back(3); /// buf.push_back(4); /// buf.push_back(5); /// if let Some(elem) = buf.get_mut(1) { /// *elem = 7; /// } /// /// assert_eq!(buf[1], 7); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn get_mut(&mut self, index: usize) -> Option<&mut T> { if index < self.len() { let idx = self.wrap_add(self.tail, index); unsafe { Some(&mut *self.ptr().add(idx)) } } else { None } } /// Swaps elements at indices `i` and `j`. /// /// `i` and `j` may be equal. /// /// Element at index 0 is the front of the queue. /// /// # Panics /// /// Panics if either index is out of bounds. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.push_back(3); /// buf.push_back(4); /// buf.push_back(5); /// assert_eq!(buf, [3, 4, 5]); /// buf.swap(0, 2); /// assert_eq!(buf, [5, 4, 3]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn swap(&mut self, i: usize, j: usize) { assert!(i < self.len()); assert!(j < self.len()); let ri = self.wrap_add(self.tail, i); let rj = self.wrap_add(self.tail, j); unsafe { ptr::swap(self.ptr().add(ri), self.ptr().add(rj)) } } /// Returns the number of elements the `VecDeque` can hold without /// reallocating. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let buf: VecDeque<i32> = VecDeque::with_capacity(10); /// assert!(buf.capacity() >= 10); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn capacity(&self) -> usize { self.cap() - 1 } /// Reserves the minimum capacity for exactly `additional` more elements to be inserted in the /// given `VecDeque`. Does nothing if the capacity is already sufficient. /// /// Note that the allocator may give the collection more space than it requests. Therefore /// capacity can not be relied upon to be precisely minimal. Prefer [`reserve`] if future /// insertions are expected. /// /// # Panics /// /// Panics if the new capacity overflows `usize`. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf: VecDeque<i32> = vec![1].into_iter().collect(); /// buf.reserve_exact(10); /// assert!(buf.capacity() >= 11); /// ``` /// /// [`reserve`]: VecDeque::reserve #[stable(feature = "rust1", since = "1.0.0")] pub fn reserve_exact(&mut self, additional: usize) { self.reserve(additional); } /// Reserves capacity for at least `additional` more elements to be inserted in the given /// `VecDeque`. The collection may reserve more space to avoid frequent reallocations. /// /// # Panics /// /// Panics if the new capacity overflows `usize`. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf: VecDeque<i32> = vec![1].into_iter().collect(); /// buf.reserve(10); /// assert!(buf.capacity() >= 11); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn reserve(&mut self, additional: usize) { let old_cap = self.cap(); let used_cap = self.len() + 1; let new_cap = used_cap .checked_add(additional) .and_then(|needed_cap| needed_cap.checked_next_power_of_two()) .expect("capacity overflow"); if new_cap > old_cap { self.buf.reserve_exact(used_cap, new_cap - used_cap); unsafe { self.handle_capacity_increase(old_cap); } } } /// Tries to reserve the minimum capacity for exactly `additional` more elements to /// be inserted in the given `VecDeque<T>`. After calling `try_reserve_exact`, /// capacity will be greater than or equal to `self.len() + additional`. /// Does nothing if the capacity is already sufficient. /// /// Note that the allocator may give the collection more space than it /// requests. Therefore, capacity can not be relied upon to be precisely /// minimal. Prefer `reserve` if future insertions are expected. /// /// # Errors /// /// If the capacity overflows `usize`, or the allocator reports a failure, then an error /// is returned. /// /// # Examples /// /// ``` /// #![feature(try_reserve)] /// use std::collections::TryReserveError; /// use std::collections::VecDeque; /// /// fn process_data(data: &[u32]) -> Result<VecDeque<u32>, TryReserveError> { /// let mut output = VecDeque::new(); /// /// // Pre-reserve the memory, exiting if we can't /// output.try_reserve_exact(data.len())?; /// /// // Now we know this can't OOM(Out-Of-Memory) in the middle of our complex work /// output.extend(data.iter().map(|&val| { /// val * 2 + 5 // very complicated /// })); /// /// Ok(output) /// } /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?"); /// ``` #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")] pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> { self.try_reserve(additional) } /// Tries to reserve capacity for at least `additional` more elements to be inserted /// in the given `VecDeque<T>`. The collection may reserve more space to avoid /// frequent reallocations. After calling `try_reserve`, capacity will be /// greater than or equal to `self.len() + additional`. Does nothing if /// capacity is already sufficient. /// /// # Errors /// /// If the capacity overflows `usize`, or the allocator reports a failure, then an error /// is returned. /// /// # Examples /// /// ``` /// #![feature(try_reserve)] /// use std::collections::TryReserveError; /// use std::collections::VecDeque; /// /// fn process_data(data: &[u32]) -> Result<VecDeque<u32>, TryReserveError> { /// let mut output = VecDeque::new(); /// /// // Pre-reserve the memory, exiting if we can't /// output.try_reserve(data.len())?; /// /// // Now we know this can't OOM in the middle of our complex work /// output.extend(data.iter().map(|&val| { /// val * 2 + 5 // very complicated /// })); /// /// Ok(output) /// } /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?"); /// ``` #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")] pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> { let old_cap = self.cap(); let used_cap = self.len() + 1; let new_cap = used_cap .checked_add(additional) .and_then(|needed_cap| needed_cap.checked_next_power_of_two()) .ok_or(TryReserveError::CapacityOverflow)?; if new_cap > old_cap { self.buf.try_reserve_exact(used_cap, new_cap - used_cap)?; unsafe { self.handle_capacity_increase(old_cap); } } Ok(()) } /// Shrinks the capacity of the `VecDeque` as much as possible. /// /// It will drop down as close as possible to the length but the allocator may still inform the /// `VecDeque` that there is space for a few more elements. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::with_capacity(15); /// buf.extend(0..4); /// assert_eq!(buf.capacity(), 15); /// buf.shrink_to_fit(); /// assert!(buf.capacity() >= 4); /// ``` #[stable(feature = "deque_extras_15", since = "1.5.0")] pub fn shrink_to_fit(&mut self) { self.shrink_to(0); } /// Shrinks the capacity of the `VecDeque` with a lower bound. /// /// The capacity will remain at least as large as both the length /// and the supplied value. /// /// If the current capacity is less than the lower limit, this is a no-op. /// /// # Examples /// /// ``` /// #![feature(shrink_to)] /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::with_capacity(15); /// buf.extend(0..4); /// assert_eq!(buf.capacity(), 15); /// buf.shrink_to(6); /// assert!(buf.capacity() >= 6); /// buf.shrink_to(0); /// assert!(buf.capacity() >= 4); /// ``` #[unstable(feature = "shrink_to", reason = "new API", issue = "56431")] pub fn shrink_to(&mut self, min_capacity: usize) { let min_capacity = cmp::min(min_capacity, self.capacity()); // We don't have to worry about an overflow as neither `self.len()` nor `self.capacity()` // can ever be `usize::MAX`. +1 as the ringbuffer always leaves one space empty. let target_cap = cmp::max(cmp::max(min_capacity, self.len()) + 1, MINIMUM_CAPACITY + 1) .next_power_of_two(); if target_cap < self.cap() { // There are three cases of interest: // All elements are out of desired bounds // Elements are contiguous, and head is out of desired bounds // Elements are discontiguous, and tail is out of desired bounds // // At all other times, element positions are unaffected. // // Indicates that elements at the head should be moved. let head_outside = self.head == 0 || self.head >= target_cap; // Move elements from out of desired bounds (positions after target_cap) if self.tail >= target_cap && head_outside { // T H // [. . . . . . . . o o o o o o o . ] // T H // [o o o o o o o . ] unsafe { self.copy_nonoverlapping(0, self.tail, self.len()); } self.head = self.len(); self.tail = 0; } else if self.tail != 0 && self.tail < target_cap && head_outside { // T H // [. . . o o o o o o o . . . . . . ] // H T // [o o . o o o o o ] let len = self.wrap_sub(self.head, target_cap); unsafe { self.copy_nonoverlapping(0, target_cap, len); } self.head = len; debug_assert!(self.head < self.tail); } else if self.tail >= target_cap { // H T // [o o o o o . . . . . . . . . o o ] // H T // [o o o o o . o o ] debug_assert!(self.wrap_sub(self.head, 1) < target_cap); let len = self.cap() - self.tail; let new_tail = target_cap - len; unsafe { self.copy_nonoverlapping(new_tail, self.tail, len); } self.tail = new_tail; debug_assert!(self.head < self.tail); } self.buf.shrink_to_fit(target_cap); debug_assert!(self.head < self.cap()); debug_assert!(self.tail < self.cap()); debug_assert!(self.cap().count_ones() == 1); } } /// Shortens the `VecDeque`, keeping the first `len` elements and dropping /// the rest. /// /// If `len` is greater than the `VecDeque`'s current length, this has no /// effect. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.push_back(5); /// buf.push_back(10); /// buf.push_back(15); /// assert_eq!(buf, [5, 10, 15]); /// buf.truncate(1); /// assert_eq!(buf, [5]); /// ``` #[stable(feature = "deque_extras", since = "1.16.0")] pub fn truncate(&mut self, len: usize) { /// Runs the destructor for all items in the slice when it gets dropped (normally or /// during unwinding). struct Dropper<'a, T>(&'a mut [T]); impl<'a, T> Drop for Dropper<'a, T> { fn drop(&mut self) { unsafe { ptr::drop_in_place(self.0); } } } // Safe because: // // * Any slice passed to `drop_in_place` is valid; the second case has // `len <= front.len()` and returning on `len > self.len()` ensures // `begin <= back.len()` in the first case // * The head of the VecDeque is moved before calling `drop_in_place`, // so no value is dropped twice if `drop_in_place` panics unsafe { if len > self.len() { return; } let num_dropped = self.len() - len; let (front, back) = self.as_mut_slices(); if len > front.len() { let begin = len - front.len(); let drop_back = back.get_unchecked_mut(begin..) as *mut _; self.head = self.wrap_sub(self.head, num_dropped); ptr::drop_in_place(drop_back); } else { let drop_back = back as *mut _; let drop_front = front.get_unchecked_mut(len..) as *mut _; self.head = self.wrap_sub(self.head, num_dropped); // Make sure the second half is dropped even when a destructor // in the first one panics. let _back_dropper = Dropper(&mut *drop_back); ptr::drop_in_place(drop_front); } } } /// Returns a front-to-back iterator. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.push_back(5); /// buf.push_back(3); /// buf.push_back(4); /// let b: &[_] = &[&5, &3, &4]; /// let c: Vec<&i32> = buf.iter().collect(); /// assert_eq!(&c[..], b); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn iter(&self) -> Iter<'_, T> { Iter { tail: self.tail, head: self.head, ring: unsafe { self.buffer_as_slice() } } } /// Returns a front-to-back iterator that returns mutable references. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.push_back(5); /// buf.push_back(3); /// buf.push_back(4); /// for num in buf.iter_mut() { /// *num = *num - 2; /// } /// let b: &[_] = &[&mut 3, &mut 1, &mut 2]; /// assert_eq!(&buf.iter_mut().collect::<Vec<&mut i32>>()[..], b); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn iter_mut(&mut self) -> IterMut<'_, T> { // SAFETY: The internal `IterMut` safety invariant is established because the // `ring` we create is a dereferencable slice for lifetime '_. IterMut { tail: self.tail, head: self.head, ring: ptr::slice_from_raw_parts_mut(self.ptr(), self.cap()), phantom: PhantomData, } } /// Returns a pair of slices which contain, in order, the contents of the /// `VecDeque`. /// /// If [`make_contiguous`] was previously called, all elements of the /// `VecDeque` will be in the first slice and the second slice will be empty. /// /// [`make_contiguous`]: VecDeque::make_contiguous /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut vector = VecDeque::new(); /// /// vector.push_back(0); /// vector.push_back(1); /// vector.push_back(2); /// /// assert_eq!(vector.as_slices(), (&[0, 1, 2][..], &[][..])); /// /// vector.push_front(10); /// vector.push_front(9); /// /// assert_eq!(vector.as_slices(), (&[9, 10][..], &[0, 1, 2][..])); /// ``` #[inline] #[stable(feature = "deque_extras_15", since = "1.5.0")] pub fn as_slices(&self) -> (&[T], &[T]) { unsafe { let buf = self.buffer_as_slice(); RingSlices::ring_slices(buf, self.head, self.tail) } } /// Returns a pair of slices which contain, in order, the contents of the /// `VecDeque`. /// /// If [`make_contiguous`] was previously called, all elements of the /// `VecDeque` will be in the first slice and the second slice will be empty. /// /// [`make_contiguous`]: VecDeque::make_contiguous /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut vector = VecDeque::new(); /// /// vector.push_back(0); /// vector.push_back(1); /// /// vector.push_front(10); /// vector.push_front(9); /// /// vector.as_mut_slices().0[0] = 42; /// vector.as_mut_slices().1[0] = 24; /// assert_eq!(vector.as_slices(), (&[42, 10][..], &[24, 1][..])); /// ``` #[inline] #[stable(feature = "deque_extras_15", since = "1.5.0")] pub fn as_mut_slices(&mut self) -> (&mut [T], &mut [T]) { unsafe { let head = self.head; let tail = self.tail; let buf = self.buffer_as_mut_slice(); RingSlices::ring_slices(buf, head, tail) } } /// Returns the number of elements in the `VecDeque`. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut v = VecDeque::new(); /// assert_eq!(v.len(), 0); /// v.push_back(1); /// assert_eq!(v.len(), 1); /// ``` #[doc(alias = "length")] #[stable(feature = "rust1", since = "1.0.0")] pub fn len(&self) -> usize { count(self.tail, self.head, self.cap()) } /// Returns `true` if the `VecDeque` is empty. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut v = VecDeque::new(); /// assert!(v.is_empty()); /// v.push_front(1); /// assert!(!v.is_empty()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn is_empty(&self) -> bool { self.tail == self.head } fn range_tail_head<R>(&self, range: R) -> (usize, usize) where R: RangeBounds<usize>, { let Range { start, end } = slice::range(range, ..self.len()); let tail = self.wrap_add(self.tail, start); let head = self.wrap_add(self.tail, end); (tail, head) } /// Creates an iterator that covers the specified range in the `VecDeque`. /// /// # Panics /// /// Panics if the starting point is greater than the end point or if /// the end point is greater than the length of the vector. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let v: VecDeque<_> = vec![1, 2, 3].into_iter().collect(); /// let range = v.range(2..).copied().collect::<VecDeque<_>>(); /// assert_eq!(range, [3]); /// /// // A full range covers all contents /// let all = v.range(..); /// assert_eq!(all.len(), 3); /// ``` #[inline] #[stable(feature = "deque_range", since = "1.51.0")] pub fn range<R>(&self, range: R) -> Iter<'_, T> where R: RangeBounds<usize>, { let (tail, head) = self.range_tail_head(range); Iter { tail, head, // The shared reference we have in &self is maintained in the '_ of Iter. ring: unsafe { self.buffer_as_slice() }, } } /// Creates an iterator that covers the specified mutable range in the `VecDeque`. /// /// # Panics /// /// Panics if the starting point is greater than the end point or if /// the end point is greater than the length of the vector. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut v: VecDeque<_> = vec![1, 2, 3].into_iter().collect(); /// for v in v.range_mut(2..) { /// *v *= 2; /// } /// assert_eq!(v, vec![1, 2, 6]); /// /// // A full range covers all contents /// for v in v.range_mut(..) { /// *v *= 2; /// } /// assert_eq!(v, vec![2, 4, 12]); /// ``` #[inline] #[stable(feature = "deque_range", since = "1.51.0")] pub fn range_mut<R>(&mut self, range: R) -> IterMut<'_, T> where R: RangeBounds<usize>, { let (tail, head) = self.range_tail_head(range); // SAFETY: The internal `IterMut` safety invariant is established because the // `ring` we create is a dereferencable slice for lifetime '_. IterMut { tail, head, ring: ptr::slice_from_raw_parts_mut(self.ptr(), self.cap()), phantom: PhantomData, } } /// Creates a draining iterator that removes the specified range in the /// `VecDeque` and yields the removed items. /// /// Note 1: The element range is removed even if the iterator is not /// consumed until the end. /// /// Note 2: It is unspecified how many elements are removed from the deque, /// if the `Drain` value is not dropped, but the borrow it holds expires /// (e.g., due to `mem::forget`). /// /// # Panics /// /// Panics if the starting point is greater than the end point or if /// the end point is greater than the length of the vector. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut v: VecDeque<_> = vec![1, 2, 3].into_iter().collect(); /// let drained = v.drain(2..).collect::<VecDeque<_>>(); /// assert_eq!(drained, [3]); /// assert_eq!(v, [1, 2]); /// /// // A full range clears all contents /// v.drain(..); /// assert!(v.is_empty()); /// ``` #[inline] #[stable(feature = "drain", since = "1.6.0")] pub fn drain<R>(&mut self, range: R) -> Drain<'_, T> where R: RangeBounds<usize>, { // Memory safety // // When the Drain is first created, the source deque is shortened to // make sure no uninitialized or moved-from elements are accessible at // all if the Drain's destructor never gets to run. // // Drain will ptr::read out the values to remove. // When finished, the remaining data will be copied back to cover the hole, // and the head/tail values will be restored correctly. // let (drain_tail, drain_head) = self.range_tail_head(range); // The deque's elements are parted into three segments: // * self.tail -> drain_tail // * drain_tail -> drain_head // * drain_head -> self.head // // T = self.tail; H = self.head; t = drain_tail; h = drain_head // // We store drain_tail as self.head, and drain_head and self.head as // after_tail and after_head respectively on the Drain. This also // truncates the effective array such that if the Drain is leaked, we // have forgotten about the potentially moved values after the start of // the drain. // // T t h H // [. . . o o x x o o . . .] // let head = self.head; // "forget" about the values after the start of the drain until after // the drain is complete and the Drain destructor is run. self.head = drain_tail; Drain { deque: NonNull::from(&mut *self), after_tail: drain_head, after_head: head, iter: Iter { tail: drain_tail, head: drain_head, // Crucially, we only create shared references from `self` here and read from
}, } } /// Clears the `VecDeque`, removing all values. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut v = VecDeque::new(); /// v.push_back(1); /// v.clear(); /// assert!(v.is_empty()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn clear(&mut self) { self.truncate(0); } /// Returns `true` if the `VecDeque` contains an element equal to the /// given value. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut vector: VecDeque<u32> = VecDeque::new(); /// /// vector.push_back(0); /// vector.push_back(1); /// /// assert_eq!(vector.contains(&1), true); /// assert_eq!(vector.contains(&10), false); /// ``` #[stable(feature = "vec_deque_contains", since = "1.12.0")] pub fn contains(&self, x: &T) -> bool where T: PartialEq<T>, { let (a, b) = self.as_slices(); a.contains(x) || b.contains(x) } /// Provides a reference to the front element, or `None` if the `VecDeque` is /// empty. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut d = VecDeque::new(); /// assert_eq!(d.front(), None); /// /// d.push_back(1); /// d.push_back(2); /// assert_eq!(d.front(), Some(&1)); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn front(&self) -> Option<&T> { self.get(0) } /// Provides a mutable reference to the front element, or `None` if the /// `VecDeque` is empty. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut d = VecDeque::new(); /// assert_eq!(d.front_mut(), None); /// /// d.push_back(1); /// d.push_back(2); /// match d.front_mut() { /// Some(x) => *x = 9, /// None => (), /// } /// assert_eq!(d.front(), Some(&9)); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn front_mut(&mut self) -> Option<&mut T> { self.get_mut(0) } /// Provides a reference to the back element, or `None` if the `VecDeque` is /// empty. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut d = VecDeque::new(); /// assert_eq!(d.back(), None); /// /// d.push_back(1); /// d.push_back(2); /// assert_eq!(d.back(), Some(&2)); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn back(&self) -> Option<&T> { self.get(self.len().wrapping_sub(1)) } /// Provides a mutable reference to the back element, or `None` if the /// `VecDeque` is empty. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut d = VecDeque::new(); /// assert_eq!(d.back(), None); /// /// d.push_back(1); /// d.push_back(2); /// match d.back_mut() { /// Some(x) => *x = 9, /// None => (), /// } /// assert_eq!(d.back(), Some(&9)); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn back_mut(&mut self) -> Option<&mut T> { self.get_mut(self.len().wrapping_sub(1)) } /// Removes the first element and returns it, or `None` if the `VecDeque` is /// empty. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut d = VecDeque::new(); /// d.push_back(1); /// d.push_back(2); /// /// assert_eq!(d.pop_front(), Some(1)); /// assert_eq!(d.pop_front(), Some(2)); /// assert_eq!(d.pop_front(), None); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn pop_front(&mut self) -> Option<T> { if self.is_empty() { None } else { let tail = self.tail; self.tail = self.wrap_add(self.tail, 1); unsafe { Some(self.buffer_read(tail)) } } } /// Removes the last element from the `VecDeque` and returns it, or `None` if /// it is empty. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// assert_eq!(buf.pop_back(), None); /// buf.push_back(1); /// buf.push_back(3); /// assert_eq!(buf.pop_back(), Some(3)); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn pop_back(&mut self) -> Option<T> { if self.is_empty() { None } else { self.head = self.wrap_sub(self.head, 1); let head = self.head; unsafe { Some(self.buffer_read(head)) } } } /// Prepends an element to the `VecDeque`. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut d = VecDeque::new(); /// d.push_front(1); /// d.push_front(2); /// assert_eq!(d.front(), Some(&2)); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn push_front(&mut self, value: T) { if self.is_full() { self.grow(); } self.tail = self.wrap_sub(self.tail, 1); let tail = self.tail; unsafe { self.buffer_write(tail, value); } } /// Appends an element to the back of the `VecDeque`. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.push_back(1); /// buf.push_back(3); /// assert_eq!(3, *buf.back().unwrap()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn push_back(&mut self, value: T) { if self.is_full() { self.grow(); } let head = self.head; self.head = self.wrap_add(self.head, 1); unsafe { self.buffer_write(head, value) } } #[inline] fn is_contiguous(&self) -> bool { // FIXME: Should we consider `head == 0` to mean // that `self` is contiguous? self.tail <= self.head } /// Removes an element from anywhere in the `VecDeque` and returns it, /// replacing it with the first element. /// /// This does not preserve ordering, but is *O*(1). /// /// Returns `None` if `index` is out of bounds. /// /// Element at index 0 is the front of the queue. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// assert_eq!(buf.swap_remove_front(0), None); /// buf.push_back(1); /// buf.push_back(2); /// buf.push_back(3); /// assert_eq!(buf, [1, 2, 3]); /// /// assert_eq!(buf.swap_remove_front(2), Some(3)); /// assert_eq!(buf, [2, 1]); /// ``` #[stable(feature = "deque_extras_15", since = "1.5.0")] pub fn swap_remove_front(&mut self, index: usize) -> Option<T> { let length = self.len(); if length > 0 && index < length && index != 0 { self.swap(index, 0); } else if index >= length { return None; } self.pop_front() } /// Removes an element from anywhere in the `VecDeque` and returns it, replacing it with the /// last element. /// /// This does not preserve ordering, but is *O*(1). /// /// Returns `None` if `index` is out of bounds. /// /// Element at index 0 is the front of the queue. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// assert_eq!(buf.swap_remove_back(0), None); /// buf.push_back(1); /// buf.push_back(2); /// buf.push_back(3); /// assert_eq!(buf, [1, 2, 3]); /// /// assert_eq!(buf.swap_remove_back(0), Some(1)); /// assert_eq!(buf, [3, 2]); /// ``` #[stable(feature = "deque_extras_15", since = "1.5.0")] pub fn swap_remove_back(&mut self, index: usize) -> Option<T> { let length = self.len(); if length > 0 && index < length - 1 { self.swap(index, length - 1); } else if index >= length { return None; } self.pop_back() } /// Inserts an element at `index` within the `VecDeque`, shifting all elements with indices /// greater than or equal to `index` towards the back. /// /// Element at index 0 is the front of the queue. /// /// # Panics /// /// Panics if `index` is greater than `VecDeque`'s length /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut vec_deque = VecDeque::new(); /// vec_deque.push_back('a'); /// vec_deque.push_back('b'); /// vec_deque.push_back('c'); /// assert_eq!(vec_deque, &['a', 'b', 'c']); /// /// vec_deque.insert(1, 'd'); /// assert_eq!(vec_deque, &['a', 'd', 'b', 'c']); /// ``` #[stable(feature = "deque_extras_15", since = "1.5.0")] pub fn insert(&mut self, index: usize, value: T) { assert!(index <= self.len(), "index out of bounds"); if self.is_full() { self.grow(); } // Move the least number of elements in the ring buffer and insert // the given object // // At most len/2 - 1 elements will be moved. O(min(n, n-i)) // // There are three main cases: // Elements are contiguous // - special case when tail is 0 // Elements are discontiguous and the insert is in the tail section // Elements are discontiguous and the insert is in the head section // // For each of those there are two more cases: // Insert is closer to tail // Insert is closer to head // // Key: H - self.head // T - self.tail // o - Valid element // I - Insertion element // A - The element that should be after the insertion point // M - Indicates element was moved let idx = self.wrap_add(self.tail, index); let distance_to_tail = index; let distance_to_head = self.len() - index; let contiguous = self.is_contiguous(); match (contiguous, distance_to_tail <= distance_to_head, idx >= self.tail) { (true, true, _) if index == 0 => { // push_front // // T // I H // [A o o o o o o . . . . . . . . .] // // H T // [A o o o o o o o . . . . . I] // self.tail = self.wrap_sub(self.tail, 1); } (true, true, _) => { unsafe { // contiguous, insert closer to tail: // // T I H // [. . . o o A o o o o . . . . . .] // // T H // [. . o o I A o o o o . . . . . .] // M M // // contiguous, insert closer to tail and tail is 0: // // // T I H // [o o A o o o o . . . . . . . . .] // // H T // [o I A o o o o o . . . . . . . o] // M M let new_tail = self.wrap_sub(self.tail, 1); self.copy(new_tail, self.tail, 1); // Already moved the tail, so we only copy `index - 1` elements. self.copy(self.tail, self.tail + 1, index - 1); self.tail = new_tail; } } (true, false, _) => { unsafe { // contiguous, insert closer to head: // // T I H // [. . . o o o o A o o . . . . . .] // // T H // [. . . o o o o I A o o . . . . .] // M M M self.copy(idx + 1, idx, self.head - idx); self.head = self.wrap_add(self.head, 1); } } (false, true, true) => { unsafe { // discontiguous, insert closer to tail, tail section: // // H T I // [o o o o o o . . . . . o o A o o] // // H T // [o o o o o o . . . . o o I A o o] // M M self.copy(self.tail - 1, self.tail, index); self.tail -= 1; } } (false, false, true) => { unsafe { // discontiguous, insert closer to head, tail section: // // H T I // [o o . . . . . . . o o o o o A o] // // H T // [o o o . . . . . . o o o o o I A] // M M M M // copy elements up to new head self.copy(1, 0, self.head); // copy last element into empty spot at bottom of buffer self.copy(0, self.cap() - 1, 1); // move elements from idx to end forward not including ^ element self.copy(idx + 1, idx, self.cap() - 1 - idx); self.head += 1; } } (false, true, false) if idx == 0 => { unsafe { // discontiguous, insert is closer to tail, head section, // and is at index zero in the internal buffer: // // I H T // [A o o o o o o o o o . . . o o o] // // H T // [A o o o o o o o o o . . o o o I] // M M M // copy elements up to new tail self.copy(self.tail - 1, self.tail, self.cap() - self.tail); // copy last element into empty spot at bottom of buffer self.copy(self.cap() - 1, 0, 1); self.tail -= 1; } } (false, true, false) => { unsafe { // discontiguous, insert closer to tail, head section: // // I H T // [o o o A o o o o o o . . . o o o] // // H T // [o o I A o o o o o o . . o o o o] // M M M M M M // copy elements up to new tail self.copy(self.tail - 1, self.tail, self.cap() - self.tail); // copy last element into empty spot at bottom of buffer self.copy(self.cap() - 1, 0, 1); // move elements from idx-1 to end forward not including ^ element self.copy(0, 1, idx - 1); self.tail -= 1; } } (false, false, false) => { unsafe { // discontiguous, insert closer to head, head section: // // I H T // [o o o o A o o . . . . . . o o o] // // H T // [o o o o I A o o . . . . . o o o] // M M M self.copy(idx + 1, idx, self.head - idx); self.head += 1; } } } // tail might've been changed so we need to recalculate let new_idx = self.wrap_add(self.tail, index); unsafe { self.buffer_write(new_idx, value); } } /// Removes and returns the element at `index` from the `VecDeque`. /// Whichever end is closer to the removal point will be moved to make /// room, and all the affected elements will be moved to new positions. /// Returns `None` if `index` is out of bounds. /// /// Element at index 0 is the front of the queue. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.push_back(1); /// buf.push_back(2); /// buf.push_back(3); /// assert_eq!(buf, [1, 2, 3]); /// /// assert_eq!(buf.remove(1), Some(2)); /// assert_eq!(buf, [1, 3]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn remove(&mut self, index: usize) -> Option<T> { if self.is_empty() || self.len() <= index { return None; } // There are three main cases: // Elements are contiguous // Elements are discontiguous and the removal is in the tail section // Elements are discontiguous and the removal is in the head section // - special case when elements are technically contiguous, // but self.head = 0 // // For each of those there are two more cases: // Insert is closer to tail // Insert is closer to head // // Key: H - self.head // T - self.tail // o - Valid element // x - Element marked for removal // R - Indicates element that is being removed // M - Indicates element was moved let idx = self.wrap_add(self.tail, index); let elem = unsafe { Some(self.buffer_read(idx)) }; let distance_to_tail = index; let distance_to_head = self.len() - index; let contiguous = self.is_contiguous(); match (contiguous, distance_to_tail <= distance_to_head, idx >= self.tail) { (true, true, _) => { unsafe { // contiguous, remove closer to tail: // // T R H // [. . . o o x o o o o . . . . . .] // // T H // [. . . . o o o o o o . . . . . .] // M M self.copy(self.tail + 1, self.tail, index); self.tail += 1; } } (true, false, _) => { unsafe { // contiguous, remove closer to head: // // T R H // [. . . o o o o x o o . . . . . .] // // T H // [. . . o o o o o o . . . . . . .] // M M self.copy(idx, idx + 1, self.head - idx - 1); self.head -= 1; } } (false, true, true) => { unsafe { // discontiguous, remove closer to tail, tail section: // // H T R // [o o o o o o . . . . . o o x o o] // // H T // [o o o o o o . . . . . . o o o o] // M M self.copy(self.tail + 1, self.tail, index); self.tail = self.wrap_add(self.tail, 1); } } (false, false, false) => { unsafe { // discontiguous, remove closer to head, head section: // // R H T // [o o o o x o o . . . . . . o o o] // // H T // [o o o o o o . . . . . . . o o o] // M M self.copy(idx, idx + 1, self.head - idx - 1); self.head -= 1; } } (false, false, true) => { unsafe { // discontiguous, remove closer to head, tail section: // // H T R // [o o o . . . . . . o o o o o x o] // // H T // [o o . . . . . . . o o o o o o o] // M M M M // // or quasi-discontiguous, remove next to head, tail section: // // H T R // [. . . . . . . . . o o o o o x o] // // T H // [. . . . . . . . . o o o o o o .] // M // draw in elements in the tail section self.copy(idx, idx + 1, self.cap() - idx - 1); // Prevents underflow. if self.head != 0 { // copy first element into empty spot self.copy(self.cap() - 1, 0, 1); // move elements in the head section backwards self.copy(0, 1, self.head - 1); } self.head = self.wrap_sub(self.head, 1); } } (false, true, false) => { unsafe { // discontiguous, remove closer to tail, head section: // // R H T // [o o x o o o o o o o . . . o o o] // // H T // [o o o o o o o o o o . . . . o o] // M M M M M // draw in elements up to idx self.copy(1, 0, idx); // copy last element into empty spot self.copy(0, self.cap() - 1, 1); // move elements from tail to end forward, excluding the last one self.copy(self.tail + 1, self.tail, self.cap() - self.tail - 1); self.tail = self.wrap_add(self.tail, 1); } } } elem } /// Splits the `VecDeque` into two at the given index. /// /// Returns a newly allocated `VecDeque`. `self` contains elements `[0, at)`, /// and the returned `VecDeque` contains elements `[at, len)`. /// /// Note that the capacity of `self` does not change. /// /// Element at index 0 is the front of the queue. /// /// # Panics /// /// Panics if `at > len`. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf: VecDeque<_> = vec![1, 2, 3].into_iter().collect(); /// let buf2 = buf.split_off(1); /// assert_eq!(buf, [1]); /// assert_eq!(buf2, [2, 3]); /// ``` #[inline] #[must_use = "use `.truncate()` if you don't need the other half"] #[stable(feature = "split_off", since = "1.4.0")] pub fn split_off(&mut self, at: usize) -> Self { let len = self.len(); assert!(at <= len, "`at` out of bounds"); let other_len = len - at; let mut other = VecDeque::with_capacity(other_len); unsafe { let (first_half, second_half) = self.as_slices(); let first_len = first_half.len(); let second_len = second_half.len(); if at < first_len { // `at` lies in the first half. let amount_in_first = first_len - at; ptr::copy_nonoverlapping(first_half.as_ptr().add(at), other.ptr(), amount_in_first); // just take all of the second half. ptr::copy_nonoverlapping( second_half.as_ptr(), other.ptr().add(amount_in_first), second_len, ); } else { // `at` lies in the second half, need to factor in the elements we skipped // in the first half. let offset = at - first_len; let amount_in_second = second_len - offset; ptr::copy_nonoverlapping( second_half.as_ptr().add(offset), other.ptr(), amount_in_second, ); } } // Cleanup where the ends of the buffers are self.head = self.wrap_sub(self.head, other_len); other.head = other.wrap_index(other_len); other } /// Moves all the elements of `other` into `self`, leaving `other` empty. /// /// # Panics /// /// Panics if the new number of elements in self overflows a `usize`. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf: VecDeque<_> = vec![1, 2].into_iter().collect(); /// let mut buf2: VecDeque<_> = vec![3, 4].into_iter().collect(); /// buf.append(&mut buf2); /// assert_eq!(buf, [1, 2, 3, 4]); /// assert_eq!(buf2, []); /// ``` #[inline] #[stable(feature = "append", since = "1.4.0")] pub fn append(&mut self, other: &mut Self) { // naive impl self.extend(other.drain(..)); } /// Retains only the elements specified by the predicate. /// /// In other words, remove all elements `e` such that `f(&e)` returns false. /// This method operates in place, visiting each element exactly once in the /// original order, and preserves the order of the retained elements. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.extend(1..5); /// buf.retain(|&x| x % 2 == 0); /// assert_eq!(buf, [2, 4]); /// ``` /// /// The exact order may be useful for tracking external state, like an index. /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.extend(1..6); /// /// let keep = [false, true, true, false, true]; /// let mut i = 0; /// buf.retain(|_| (keep[i], i += 1).0); /// assert_eq!(buf, [2, 3, 5]); /// ``` #[stable(feature = "vec_deque_retain", since = "1.4.0")] pub fn retain<F>(&mut self, mut f: F) where F: FnMut(&T) -> bool, { let len = self.len(); let mut del = 0; for i in 0..len { if !f(&self[i]) { del += 1; } else if del > 0 { self.swap(i - del, i); } } if del > 0 { self.truncate(len - del); } } // This may panic or abort #[inline(never)] fn grow(&mut self) { if self.is_full() { let old_cap = self.cap(); // Double the buffer size. self.buf.reserve_exact(old_cap, old_cap); assert!(self.cap() == old_cap * 2); unsafe { self.handle_capacity_increase(old_cap); } debug_assert!(!self.is_full()); } } /// Modifies the `VecDeque` in-place so that `len()` is equal to `new_len`, /// either by removing excess elements from the back or by appending /// elements generated by calling `generator` to the back. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.push_back(5); /// buf.push_back(10); /// buf.push_back(15); /// assert_eq!(buf, [5, 10, 15]); /// /// buf.resize_with(5, Default::default); /// assert_eq!(buf, [5, 10, 15, 0, 0]); /// /// buf.resize_with(2, || unreachable!()); /// assert_eq!(buf, [5, 10]); /// /// let mut state = 100; /// buf.resize_with(5, || { state += 1; state }); /// assert_eq!(buf, [5, 10, 101, 102, 103]); /// ``` #[stable(feature = "vec_resize_with", since = "1.33.0")] pub fn resize_with(&mut self, new_len: usize, generator: impl FnMut() -> T) { let len = self.len(); if new_len > len { self.extend(repeat_with(generator).take(new_len - len)) } else { self.truncate(new_len); } } /// Rearranges the internal storage of this deque so it is one contiguous /// slice, which is then returned. /// /// This method does not allocate and does not change the order of the /// inserted elements. As it returns a mutable slice, this can be used to /// sort a deque. /// /// Once the internal storage is contiguous, the [`as_slices`] and /// [`as_mut_slices`] methods will return the entire contents of the /// `VecDeque` in a single slice. /// /// [`as_slices`]: VecDeque::as_slices /// [`as_mut_slices`]: VecDeque::as_mut_slices /// /// # Examples /// /// Sorting the content of a deque. /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::with_capacity(15); /// /// buf.push_back(2); /// buf.push_back(1); /// buf.push_front(3); /// /// // sorting the deque /// buf.make_contiguous().sort(); /// assert_eq!(buf.as_slices(), (&[1, 2, 3] as &[_], &[] as &[_])); /// /// // sorting it in reverse order /// buf.make_contiguous().sort_by(|a, b| b.cmp(a)); /// assert_eq!(buf.as_slices(), (&[3, 2, 1] as &[_], &[] as &[_])); /// ``` /// /// Getting immutable access to the contiguous slice. /// /// ```rust /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// /// buf.push_back(2); /// buf.push_back(1); /// buf.push_front(3); /// /// buf.make_contiguous(); /// if let (slice, &[]) = buf.as_slices() { /// // we can now be sure that `slice` contains all elements of the deque, /// // while still having immutable access to `buf`. /// assert_eq!(buf.len(), slice.len()); /// assert_eq!(slice, &[3, 2, 1] as &[_]); /// } /// ``` #[stable(feature = "deque_make_contiguous", since = "1.48.0")] pub fn make_contiguous(&mut self) -> &mut [T] { if self.is_contiguous() { let tail = self.tail; let head = self.head; return unsafe { RingSlices::ring_slices(self.buffer_as_mut_slice(), head, tail).0 }; } let buf = self.buf.ptr(); let cap = self.cap(); let len = self.len(); let free = self.tail - self.head; let tail_len = cap - self.tail; if free >= tail_len { // there is enough free space to copy the tail in one go, // this means that we first shift the head backwards, and then // copy the tail to the correct position. // // from: DEFGH....ABC // to: ABCDEFGH.... unsafe { ptr::copy(buf, buf.add(tail_len), self.head); // ...DEFGH.ABC ptr::copy_nonoverlapping(buf.add(self.tail), buf, tail_len); // ABCDEFGH.... self.tail = 0; self.head = len; } } else if free > self.head { // FIXME: We currently do not consider ....ABCDEFGH // to be contiguous because `head` would be `0` in this // case. While we probably want to change this it // isn't trivial as a few places expect `is_contiguous` // to mean that we can just slice using `buf[tail..head]`. // there is enough free space to copy the head in one go, // this means that we first shift the tail forwards, and then // copy the head to the correct position. // // from: FGH....ABCDE // to: ...ABCDEFGH. unsafe { ptr::copy(buf.add(self.tail), buf.add(self.head), tail_len); // FGHABCDE.... ptr::copy_nonoverlapping(buf, buf.add(self.head + tail_len), self.head); // ...ABCDEFGH. self.tail = self.head; self.head = self.wrap_add(self.tail, len); } } else { // free is smaller than both head and tail, // this means we have to slowly "swap" the tail and the head. // // from: EFGHI...ABCD or HIJK.ABCDEFG // to: ABCDEFGHI... or ABCDEFGHIJK. let mut left_edge: usize = 0; let mut right_edge: usize = self.tail; unsafe { // The general problem looks like this // GHIJKLM...ABCDEF - before any swaps // ABCDEFM...GHIJKL - after 1 pass of swaps // ABCDEFGHIJM...KL - swap until the left edge reaches the temp store // - then restart the algorithm with a new (smaller) store // Sometimes the temp store is reached when the right edge is at the end // of the buffer - this means we've hit the right order with fewer swaps! // E.g // EF..ABCD // ABCDEF.. - after four only swaps we've finished while left_edge < len && right_edge != cap { let mut right_offset = 0; for i in left_edge..right_edge { right_offset = (i - left_edge) % (cap - right_edge); let src: isize = (right_edge + right_offset) as isize; ptr::swap(buf.add(i), buf.offset(src)); } let n_ops = right_edge - left_edge; left_edge += n_ops; right_edge += right_offset + 1; } self.tail = 0; self.head = len; } } let tail = self.tail; let head = self.head; unsafe { RingSlices::ring_slices(self.buffer_as_mut_slice(), head, tail).0 } } /// Rotates the double-ended queue `mid` places to the left. /// /// Equivalently, /// - Rotates item `mid` into the first position. /// - Pops the first `mid` items and pushes them to the end. /// - Rotates `len() - mid` places to the right. /// /// # Panics /// /// If `mid` is greater than `len()`. Note that `mid == len()` /// does _not_ panic and is a no-op rotation. /// /// # Complexity /// /// Takes `*O*(min(mid, len() - mid))` time and no extra space. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf: VecDeque<_> = (0..10).collect(); /// /// buf.rotate_left(3); /// assert_eq!(buf, [3, 4, 5, 6, 7, 8, 9, 0, 1, 2]); /// /// for i in 1..10 { /// assert_eq!(i * 3 % 10, buf[0]); /// buf.rotate_left(3); /// } /// assert_eq!(buf, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]); /// ``` #[stable(feature = "vecdeque_rotate", since = "1.36.0")] pub fn rotate_left(&mut self, mid: usize) { assert!(mid <= self.len()); let k = self.len() - mid; if mid <= k { unsafe { self.rotate_left_inner(mid) } } else { unsafe { self.rotate_right_inner(k) } } } /// Rotates the double-ended queue `k` places to the right. /// /// Equivalently, /// - Rotates the first item into position `k`. /// - Pops the last `k` items and pushes them to the front. /// - Rotates `len() - k` places to the left. /// /// # Panics /// /// If `k` is greater than `len()`. Note that `k == len()` /// does _not_ panic and is a no-op rotation. /// /// # Complexity /// /// Takes `*O*(min(k, len() - k))` time and no extra space. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf: VecDeque<_> = (0..10).collect(); /// /// buf.rotate_right(3); /// assert_eq!(buf, [7, 8, 9, 0, 1, 2, 3, 4, 5, 6]); /// /// for i in 1..10 { /// assert_eq!(0, buf[i * 3 % 10]); /// buf.rotate_right(3); /// } /// assert_eq!(buf, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]); /// ``` #[stable(feature = "vecdeque_rotate", since = "1.36.0")] pub fn rotate_right(&mut self, k: usize) { assert!(k <= self.len()); let mid = self.len() - k; if k <= mid { unsafe { self.rotate_right_inner(k) } } else { unsafe { self.rotate_left_inner(mid) } } } // SAFETY: the following two methods require that the rotation amount // be less than half the length of the deque. // // `wrap_copy` requires that `min(x, cap() - x) + copy_len <= cap()`, // but than `min` is never more than half the capacity, regardless of x, // so it's sound to call here because we're calling with something // less than half the length, which is never above half the capacity. unsafe fn rotate_left_inner(&mut self, mid: usize) { debug_assert!(mid * 2 <= self.len()); unsafe { self.wrap_copy(self.head, self.tail, mid); } self.head = self.wrap_add(self.head, mid); self.tail = self.wrap_add(self.tail, mid); } unsafe fn rotate_right_inner(&mut self, k: usize) { debug_assert!(k * 2 <= self.len()); self.head = self.wrap_sub(self.head, k); self.tail = self.wrap_sub(self.tail, k); unsafe { self.wrap_copy(self.tail, self.head, k); } } /// Binary searches this sorted `VecDeque` for a given element. /// /// If the value is found then [`Result::Ok`] is returned, containing the /// index of the matching element. If there are multiple matches, then any /// one of the matches could be returned. If the value is not found then /// [`Result::Err`] is returned, containing the index where a matching /// element could be inserted while maintaining sorted order. /// /// # Examples /// /// Looks up a series of four elements. The first is found, with a /// uniquely determined position; the second and third are not /// found; the fourth could match any position in `[1, 4]`. /// /// ``` /// #![feature(vecdeque_binary_search)] /// use std::collections::VecDeque; /// /// let deque: VecDeque<_> = vec![0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55].into(); /// /// assert_eq!(deque.binary_search(&13), Ok(9)); /// assert_eq!(deque.binary_search(&4), Err(7)); /// assert_eq!(deque.binary_search(&100), Err(13)); /// let r = deque.binary_search(&1); /// assert!(matches!(r, Ok(1..=4))); /// ``` /// /// If you want to insert an item to a sorted `VecDeque`, while maintaining /// sort order: /// /// ``` /// #![feature(vecdeque_binary_search)] /// use std::collections::VecDeque; /// /// let mut deque: VecDeque<_> = vec![0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55].into(); /// let num = 42; /// let idx = deque.binary_search(&num).unwrap_or_else(|x| x); /// deque.insert(idx, num); /// assert_eq!(deque, &[0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 42, 55]); /// ``` #[unstable(feature = "vecdeque_binary_search", issue = "78021")] #[inline] pub fn binary_search(&self, x: &T) -> Result<usize, usize> where T: Ord, { self.binary_search_by(|e| e.cmp(x)) } /// Binary searches this sorted `VecDeque` with a comparator function. /// /// The comparator function should implement an order consistent /// with the sort order of the underlying `VecDeque`, returning an /// order code that indicates whether its argument is `Less`, /// `Equal` or `Greater` than the desired target. /// /// If the value is found then [`Result::Ok`] is returned, containing the /// index of the matching element. If there are multiple matches, then any /// one of the matches could be returned. If the value is not found then /// [`Result::Err`] is returned, containing the index where a matching /// element could be inserted while maintaining sorted order. /// /// # Examples /// /// Looks up a series of four elements. The first is found, with a /// uniquely determined position; the second and third are not /// found; the fourth could match any position in `[1, 4]`. /// /// ``` /// #![feature(vecdeque_binary_search)] /// use std::collections::VecDeque; /// /// let deque: VecDeque<_> = vec![0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55].into(); /// /// assert_eq!(deque.binary_search_by(|x| x.cmp(&13)), Ok(9)); /// assert_eq!(deque.binary_search_by(|x| x.cmp(&4)), Err(7)); /// assert_eq!(deque.binary_search_by(|x| x.cmp(&100)), Err(13)); /// let r = deque.binary_search_by(|x| x.cmp(&1)); /// assert!(matches!(r, Ok(1..=4))); /// ``` #[unstable(feature = "vecdeque_binary_search", issue = "78021")] pub fn binary_search_by<'a, F>(&'a self, mut f: F) -> Result<usize, usize> where F: FnMut(&'a T) -> Ordering, { let (front, back) = self.as_slices(); if let Some(Ordering::Less | Ordering::Equal) = back.first().map(|elem| f(elem)) { back.binary_search_by(f).map(|idx| idx + front.len()).map_err(|idx| idx + front.len()) } else { front.binary_search_by(f) } } /// Binary searches this sorted `VecDeque` with a key extraction function. /// /// Assumes that the `VecDeque` is sorted by the key, for instance with /// [`make_contiguous().sort_by_key()`](#method.make_contiguous) using the same /// key extraction function. /// /// If the value is found then [`Result::Ok`] is returned, containing the /// index of the matching element. If there are multiple matches, then any /// one of the matches could be returned. If the value is not found then /// [`Result::Err`] is returned, containing the index where a matching /// element could be inserted while maintaining sorted order. /// /// # Examples /// /// Looks up a series of four elements in a slice of pairs sorted by /// their second elements. The first is found, with a uniquely /// determined position; the second and third are not found; the /// fourth could match any position in `[1, 4]`. /// /// ``` /// #![feature(vecdeque_binary_search)] /// use std::collections::VecDeque; /// /// let deque: VecDeque<_> = vec![(0, 0), (2, 1), (4, 1), (5, 1), /// (3, 1), (1, 2), (2, 3), (4, 5), (5, 8), (3, 13), /// (1, 21), (2, 34), (4, 55)].into(); /// /// assert_eq!(deque.binary_search_by_key(&13, |&(a, b)| b), Ok(9)); /// assert_eq!(deque.binary_search_by_key(&4, |&(a, b)| b), Err(7)); /// assert_eq!(deque.binary_search_by_key(&100, |&(a, b)| b), Err(13)); /// let r = deque.binary_search_by_key(&1, |&(a, b)| b); /// assert!(matches!(r, Ok(1..=4))); /// ``` #[unstable(feature = "vecdeque_binary_search", issue = "78021")] #[inline] pub fn binary_search_by_key<'a, B, F>(&'a self, b: &B, mut f: F) -> Result<usize, usize> where F: FnMut(&'a T) -> B, B: Ord, { self.binary_search_by(|k| f(k).cmp(b)) } } impl<T: Clone> VecDeque<T> { /// Modifies the `VecDeque` in-place so that `len()` is equal to new_len, /// either by removing excess elements from the back or by appending clones of `value` /// to the back. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// let mut buf = VecDeque::new(); /// buf.push_back(5); /// buf.push_back(10); /// buf.push_back(15); /// assert_eq!(buf, [5, 10, 15]); /// /// buf.resize(2, 0); /// assert_eq!(buf, [5, 10]); /// /// buf.resize(5, 20); /// assert_eq!(buf, [5, 10, 20, 20, 20]); /// ``` #[stable(feature = "deque_extras", since = "1.16.0")] pub fn resize(&mut self, new_len: usize, value: T) { self.resize_with(new_len, || value.clone()); } } /// Returns the index in the underlying buffer for a given logical element index. #[inline] fn wrap_index(index: usize, size: usize) -> usize { // size is always a power of 2 debug_assert!(size.is_power_of_two()); index & (size - 1) } /// Calculate the number of elements left to be read in the buffer #[inline] fn count(tail: usize, head: usize, size: usize) -> usize { // size is always a power of 2 (head.wrapping_sub(tail)) & (size - 1) } #[stable(feature = "rust1", since = "1.0.0")] impl<A: PartialEq> PartialEq for VecDeque<A> { fn eq(&self, other: &VecDeque<A>) -> bool { if self.len() != other.len() { return false; } let (sa, sb) = self.as_slices(); let (oa, ob) = other.as_slices(); if sa.len() == oa.len() { sa == oa && sb == ob } else if sa.len() < oa.len() { // Always divisible in three sections, for example: // self: [a b c|d e f] // other: [0 1 2 3|4 5] // front = 3, mid = 1, // [a b c] == [0 1 2] && [d] == [3] && [e f] == [4 5] let front = sa.len(); let mid = oa.len() - front; let (oa_front, oa_mid) = oa.split_at(front); let (sb_mid, sb_back) = sb.split_at(mid); debug_assert_eq!(sa.len(), oa_front.len()); debug_assert_eq!(sb_mid.len(), oa_mid.len()); debug_assert_eq!(sb_back.len(), ob.len()); sa == oa_front && sb_mid == oa_mid && sb_back == ob } else { let front = oa.len(); let mid = sa.len() - front; let (sa_front, sa_mid) = sa.split_at(front); let (ob_mid, ob_back) = ob.split_at(mid); debug_assert_eq!(sa_front.len(), oa.len()); debug_assert_eq!(sa_mid.len(), ob_mid.len()); debug_assert_eq!(sb.len(), ob_back.len()); sa_front == oa && sa_mid == ob_mid && sb == ob_back } } } #[stable(feature = "rust1", since = "1.0.0")] impl<A: Eq> Eq for VecDeque<A> {} __impl_slice_eq1! { [] VecDeque<A>, Vec<B>, } __impl_slice_eq1! { [] VecDeque<A>, &[B], } __impl_slice_eq1! { [] VecDeque<A>, &mut [B], } __impl_slice_eq1! { [const N: usize] VecDeque<A>, [B; N], } __impl_slice_eq1! { [const N: usize] VecDeque<A>, &[B; N], } __impl_slice_eq1! { [const N: usize] VecDeque<A>, &mut [B; N], } #[stable(feature = "rust1", since = "1.0.0")] impl<A: PartialOrd> PartialOrd for VecDeque<A> { fn partial_cmp(&self, other: &VecDeque<A>) -> Option<Ordering> { self.iter().partial_cmp(other.iter()) } } #[stable(feature = "rust1", since = "1.0.0")] impl<A: Ord> Ord for VecDeque<A> { #[inline] fn cmp(&self, other: &VecDeque<A>) -> Ordering { self.iter().cmp(other.iter()) } } #[stable(feature = "rust1", since = "1.0.0")] impl<A: Hash> Hash for VecDeque<A> { fn hash<H: Hasher>(&self, state: &mut H) { self.len().hash(state); // It's not possible to use Hash::hash_slice on slices // returned by as_slices method as their length can vary // in otherwise identical deques. // // Hasher only guarantees equivalence for the exact same // set of calls to its methods. self.iter().for_each(|elem| elem.hash(state)); } } #[stable(feature = "rust1", since = "1.0.0")] impl<A> Index<usize> for VecDeque<A> { type Output = A; #[inline] fn index(&self, index: usize) -> &A { self.get(index).expect("Out of bounds access") } } #[stable(feature = "rust1", since = "1.0.0")] impl<A> IndexMut<usize> for VecDeque<A> { #[inline] fn index_mut(&mut self, index: usize) -> &mut A { self.get_mut(index).expect("Out of bounds access") } } #[stable(feature = "rust1", since = "1.0.0")] impl<A> FromIterator<A> for VecDeque<A> { fn from_iter<T: IntoIterator<Item = A>>(iter: T) -> VecDeque<A> { let iterator = iter.into_iter(); let (lower, _) = iterator.size_hint(); let mut deq = VecDeque::with_capacity(lower); deq.extend(iterator); deq } } #[stable(feature = "rust1", since = "1.0.0")] impl<T> IntoIterator for VecDeque<T> { type Item = T; type IntoIter = IntoIter<T>; /// Consumes the `VecDeque` into a front-to-back iterator yielding elements by /// value. fn into_iter(self) -> IntoIter<T> { IntoIter { inner: self } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> IntoIterator for &'a VecDeque<T> { type Item = &'a T; type IntoIter = Iter<'a, T>; fn into_iter(self) -> Iter<'a, T> { self.iter() } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> IntoIterator for &'a mut VecDeque<T> { type Item = &'a mut T; type IntoIter = IterMut<'a, T>; fn into_iter(self) -> IterMut<'a, T> { self.iter_mut() } } #[stable(feature = "rust1", since = "1.0.0")] impl<A> Extend<A> for VecDeque<A> { fn extend<T: IntoIterator<Item = A>>(&mut self, iter: T) { // This function should be the moral equivalent of: // // for item in iter.into_iter() { // self.push_back(item); // } let mut iter = iter.into_iter(); while let Some(element) = iter.next() { if self.len() == self.capacity() { let (lower, _) = iter.size_hint(); self.reserve(lower.saturating_add(1)); } let head = self.head; self.head = self.wrap_add(self.head, 1); unsafe { self.buffer_write(head, element); } } } #[inline] fn extend_one(&mut self, elem: A) { self.push_back(elem); } #[inline] fn extend_reserve(&mut self, additional: usize) { self.reserve(additional); } } #[stable(feature = "extend_ref", since = "1.2.0")] impl<'a, T: 'a + Copy> Extend<&'a T> for VecDeque<T> { fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) { self.extend(iter.into_iter().cloned()); } #[inline] fn extend_one(&mut self, &elem: &T) { self.push_back(elem); } #[inline] fn extend_reserve(&mut self, additional: usize) { self.reserve(additional); } } #[stable(feature = "rust1", since = "1.0.0")] impl<T: fmt::Debug> fmt::Debug for VecDeque<T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self).finish() } } #[stable(feature = "vecdeque_vec_conversions", since = "1.10.0")] impl<T> From<Vec<T>> for VecDeque<T> { /// Turn a [`Vec<T>`] into a [`VecDeque<T>`]. /// /// [`Vec<T>`]: crate::vec::Vec /// [`VecDeque<T>`]: crate::collections::VecDeque /// /// This avoids reallocating where possible, but the conditions for that are /// strict, and subject to change, and so shouldn't be relied upon unless the /// `Vec<T>` came from `From<VecDeque<T>>` and hasn't been reallocated. fn from(other: Vec<T>) -> Self { unsafe { let mut other = ManuallyDrop::new(other); let other_buf = other.as_mut_ptr(); let mut buf = RawVec::from_raw_parts(other_buf, other.capacity()); let len = other.len(); // We need to extend the buf if it's not a power of two, too small // or doesn't have at least one free space. // We check if `T` is a ZST in the first condition, // because `usize::MAX` (the capacity returned by `capacity()` for ZST) // is not a power of two and thus it'll always try // to reserve more memory which will panic for ZST (rust-lang/rust#78532) if (!buf.capacity().is_power_of_two() && mem::size_of::<T>() != 0) || (buf.capacity() < (MINIMUM_CAPACITY + 1)) || (buf.capacity() == len) { let cap = cmp::max(buf.capacity() + 1, MINIMUM_CAPACITY + 1).next_power_of_two(); buf.reserve_exact(len, cap - len); } VecDeque { tail: 0, head: len, buf } } } } #[stable(feature = "vecdeque_vec_conversions", since = "1.10.0")] impl<T> From<VecDeque<T>> for Vec<T> { /// Turn a [`VecDeque<T>`] into a [`Vec<T>`]. /// /// [`Vec<T>`]: crate::vec::Vec /// [`VecDeque<T>`]: crate::collections::VecDeque /// /// This never needs to re-allocate, but does need to do *O*(*n*) data movement if /// the circular buffer doesn't happen to be at the beginning of the allocation. /// /// # Examples /// /// ``` /// use std::collections::VecDeque; /// /// // This one is *O*(1). /// let deque: VecDeque<_> = (1..5).collect(); /// let ptr = deque.as_slices().0.as_ptr(); /// let vec = Vec::from(deque); /// assert_eq!(vec, [1, 2, 3, 4]); /// assert_eq!(vec.as_ptr(), ptr); /// /// // This one needs data rearranging. /// let mut deque: VecDeque<_> = (1..5).collect(); /// deque.push_front(9); /// deque.push_front(8); /// let ptr = deque.as_slices().1.as_ptr(); /// let vec = Vec::from(deque); /// assert_eq!(vec, [8, 9, 1, 2, 3, 4]); /// assert_eq!(vec.as_ptr(), ptr); /// ``` fn from(mut other: VecDeque<T>) -> Self { other.make_contiguous(); unsafe { let other = ManuallyDrop::new(other); let buf = other.buf.ptr(); let len = other.len(); let cap = other.cap(); if other.tail != 0 { ptr::copy(buf.add(other.tail), buf, len); } Vec::from_raw_parts(buf, len, cap) } } }
// it. We do not write to `self` nor reborrow to a mutable reference. // Hence the raw pointer we created above, for `deque`, remains valid. ring: unsafe { self.buffer_as_slice() },
main.rs
mod attestation_producer; mod block_producer; mod config; mod duties; pub mod error; mod service; mod signer; use crate::config::Config as ValidatorClientConfig; use crate::service::Service as ValidatorService; use clap::{App, Arg}; use eth2_config::{get_data_dir, read_from_file, write_to_file, Eth2Config}; use protos::services_grpc::ValidatorServiceClient; use slog::{crit, error, info, o, Drain}; use std::path::PathBuf; use types::{Keypair, MainnetEthSpec, MinimalEthSpec}; pub const DEFAULT_SPEC: &str = "minimal"; pub const DEFAULT_DATA_DIR: &str = ".lighthouse-validator"; pub const CLIENT_CONFIG_FILENAME: &str = "validator-client.toml"; pub const ETH2_CONFIG_FILENAME: &str = "eth2-spec.toml"; fn
() { // Logging let decorator = slog_term::TermDecorator::new().build(); let drain = slog_term::CompactFormat::new(decorator).build().fuse(); let drain = slog_async::Async::new(drain).build().fuse(); let log = slog::Logger::root(drain, o!()); // CLI let matches = App::new("Lighthouse Validator Client") .version("0.0.1") .author("Sigma Prime <[email protected]>") .about("Eth 2.0 Validator Client") .arg( Arg::with_name("datadir") .long("datadir") .value_name("DIR") .help("Data directory for keys and databases.") .takes_value(true), ) .arg( Arg::with_name("eth2-spec") .long("eth2-spec") .short("e") .value_name("TOML_FILE") .help("Path to Ethereum 2.0 specifications file.") .takes_value(true), ) .arg( Arg::with_name("server") .long("server") .value_name("server") .help("Address to connect to BeaconNode.") .takes_value(true), ) .arg( Arg::with_name("spec-constants") .long("spec-constants") .value_name("TITLE") .short("s") .help("The title of the spec constants for chain config.") .takes_value(true) .possible_values(&["mainnet", "minimal"]) .default_value("minimal"), ) .get_matches(); let data_dir = match get_data_dir(&matches, PathBuf::from(DEFAULT_DATA_DIR)) { Ok(dir) => dir, Err(e) => { crit!(log, "Failed to initialize data dir"; "error" => format!("{:?}", e)); return; } }; let client_config_path = data_dir.join(CLIENT_CONFIG_FILENAME); // Attempt to lead the `ClientConfig` from disk. // // If file doesn't exist, create a new, default one. let mut client_config = match read_from_file::<ValidatorClientConfig>( client_config_path.clone(), ) { Ok(Some(c)) => c, Ok(None) => { let default = ValidatorClientConfig::default(); if let Err(e) = write_to_file(client_config_path.clone(), &default) { crit!(log, "Failed to write default ClientConfig to file"; "error" => format!("{:?}", e)); return; } default } Err(e) => { crit!(log, "Failed to load a ChainConfig file"; "error" => format!("{:?}", e)); return; } }; // Ensure the `data_dir` in the config matches that supplied to the CLI. client_config.data_dir = data_dir.clone(); // Update the client config with any CLI args. match client_config.apply_cli_args(&matches) { Ok(()) => (), Err(s) => { crit!(log, "Failed to parse ClientConfig CLI arguments"; "error" => s); return; } }; let eth2_config_path: PathBuf = matches .value_of("eth2-spec") .and_then(|s| Some(PathBuf::from(s))) .unwrap_or_else(|| data_dir.join(ETH2_CONFIG_FILENAME)); // Attempt to load the `Eth2Config` from file. // // If the file doesn't exist, create a default one depending on the CLI flags. let mut eth2_config = match read_from_file::<Eth2Config>(eth2_config_path.clone()) { Ok(Some(c)) => c, Ok(None) => { let default = match matches.value_of("spec-constants") { Some("mainnet") => Eth2Config::mainnet(), Some("minimal") => Eth2Config::minimal(), _ => unreachable!(), // Guarded by slog. }; if let Err(e) = write_to_file(eth2_config_path, &default) { crit!(log, "Failed to write default Eth2Config to file"; "error" => format!("{:?}", e)); return; } default } Err(e) => { crit!(log, "Failed to instantiate an Eth2Config"; "error" => format!("{:?}", e)); return; } }; // Update the eth2 config with any CLI flags. match eth2_config.apply_cli_args(&matches) { Ok(()) => (), Err(s) => { crit!(log, "Failed to parse Eth2Config CLI arguments"; "error" => s); return; } }; info!( log, "Starting validator client"; "datadir" => client_config.data_dir.to_str(), "spec_constants" => &eth2_config.spec_constants, ); let result = match eth2_config.spec_constants.as_str() { "mainnet" => ValidatorService::<ValidatorServiceClient, Keypair>::start::<MainnetEthSpec>( client_config, eth2_config, log.clone(), ), "minimal" => ValidatorService::<ValidatorServiceClient, Keypair>::start::<MinimalEthSpec>( client_config, eth2_config, log.clone(), ), other => { crit!(log, "Unknown spec constants"; "title" => other); return; } }; // start the validator service. // this specifies the GRPC and signer type to use as the duty manager beacon node. match result { Ok(_) => info!(log, "Validator client shutdown successfully."), Err(e) => crit!(log, "Validator client exited with error"; "error" => e.to_string()), } }
main
test262.rs
#![feature(box_syntax)] #![feature(specialization)] #![feature(test)] extern crate test; use std::{ env, fs::{read_dir, File}, io::{self, Read, Write}, path::Path, sync::{Arc, RwLock}, }; use swc_common::comments::Comments; use swc_ecma_codegen::{self, Emitter}; use swc_ecma_parser::{lexer::Lexer, Parser, Session, SourceFileInput, Syntax}; use test::{ test_main, DynTestFn, Options, ShouldPanic::No, TestDesc, TestDescAndFn, TestName, TestType, }; use testing::NormalizedOutput; const IGNORED_PASS_TESTS: &[&str] = &[ // Temporalily ignored "431ecef8c85d4d24.js", "8386fbff927a9e0e.js", "5654d4106d7025c2.js", // Wrong tests (variable name or value is different) "0339fa95c78c11bd.js", "0426f15dac46e92d.js", "0b4d61559ccce0f9.js", "0f88c334715d2489.js", "1093d98f5fc0758d.js", "15d9592709b947a0.js", "2179895ec5cc6276.js", "247a3a57e8176ebd.js", "441a92357939904a.js", "47f974d6fc52e3e4.js", "4e1a0da46ca45afe.js", "5829d742ab805866.js", "589dc8ad3b9aa28f.js", "598a5cedba92154d.js", "72d79750e81ef03d.js", "7788d3c1e1247da9.js", "7b72d7b43bedc895.js", "7dab6e55461806c9.js", "82c827ccaecbe22b.js", "87a9b0d1d80812cc.js", "8c80f7ee04352eba.js", "96f5d93be9a54573.js", "988e362ed9ddcac5.js", "9bcae7c7f00b4e3c.js", "a8a03a88237c4e8f.js", "ad06370e34811a6a.js", "b0fdc038ee292aba.js", "b62c6dd890bef675.js", "cb211fadccb029c7.js", "ce968fcdf3a1987c.js", "db3c01738aaf0b92.js", "e1387fe892984e2b.js", "e71c1d5f0b6b833c.js", "e8ea384458526db0.js", // We don't implement Annex B fully. "1c1e2a43fe5515b6.js", "3dabeca76119d501.js", "52aeec7b8da212a2.js", "59ae0289778b80cd.js", "a4d62a651f69d815.js", "c06df922631aeabc.js", ]; fn add_test<F: FnOnce() + Send + 'static>( tests: &mut Vec<TestDescAndFn>, name: String, ignore: bool, f: F, ) { tests.push(TestDescAndFn { desc: TestDesc { test_type: TestType::UnitTest, name: TestName::DynTestName(name), ignore, should_panic: No, allow_fail: false, }, testfn: DynTestFn(box f), }); } struct MyHandlers; impl swc_ecma_codegen::Handlers for MyHandlers {} fn error_tests(tests: &mut Vec<TestDescAndFn>) -> Result<(), io::Error> { let ref_dir = Path::new(env!("CARGO_MANIFEST_DIR")) .join("tests") .join("references"); let dir = Path::new(env!("CARGO_MANIFEST_DIR")) .parent() .unwrap() .join("parser") .join("tests") .join("test262-parser") .join("pass"); eprintln!("Loading tests from {}", dir.display()); for entry in read_dir(&dir).expect("failed to read directory") { let entry = entry?; let file_name = entry .path() .strip_prefix(&dir) .expect("failed to strip prefix") .to_str() .expect("to_str() failed") .to_string(); let input = { let mut buf = String::new(); File::open(entry.path())?.read_to_string(&mut buf)?; buf }; let ignore = IGNORED_PASS_TESTS.contains(&&*file_name); let module = file_name.contains("module"); let ref_dir = ref_dir.clone(); let name = format!("test262::golden::{}", file_name); add_test(tests, name, ignore, move || { let msg = format!( "\n\n========== Running codegen test {}\nSource:\n{}\n", file_name, input ); let mut wr = Buf(Arc::new(RwLock::new(vec![]))); ::testing::run_test(false, |cm, handler| { let src = cm.load_file(&entry.path()).expect("failed to load file"); eprintln!( "{}\nPos: {:?} ~ {:?} (L{})", msg, src.start_pos, src.end_pos, src.count_lines() ); let comments = Comments::default(); let handlers = box MyHandlers; let mut parser: Parser<'_, Lexer<'_, SourceFileInput<'_>>> = Parser::new( Session { handler: &handler }, Syntax::default(), (&*src).into(), Some(&comments), ); { let mut emitter = Emitter { cfg: Default::default(), cm: cm.clone(), wr: box swc_ecma_codegen::text_writer::JsWriter::new( cm, "\n", &mut wr, None, ), comments: Some(&comments), handlers, pos_of_leading_comments: Default::default(), }; // Parse source if module { emitter .emit_module(&parser.parse_module().map_err(|mut e| { e.emit(); })?) .unwrap(); } else { emitter .emit_script(&parser.parse_script().map_err(|mut e| { e.emit(); })?) .unwrap(); } } let ref_file = format!("{}", ref_dir.join(&file_name).display()); let code_output = wr.0.read().unwrap(); let with_srcmap = NormalizedOutput::from(String::from_utf8_lossy(&code_output).into_owned()); with_srcmap.compare_to_file(ref_file).unwrap(); Ok(()) }) .expect("failed to run test"); }); } Ok(()) } #[test] fn identity() { let args: Vec<_> = env::args().collect(); let mut tests = Vec::new(); error_tests(&mut tests).expect("failed to load testss"); test_main(&args, tests, Some(Options::new())); } #[derive(Debug, Clone)] struct Buf(Arc<RwLock<Vec<u8>>>); impl Write for Buf { fn write(&mut self, data: &[u8]) -> io::Result<usize> { self.0.write().unwrap().write(data) } fn flush(&mut self) -> io::Result<()>
}
{ self.0.write().unwrap().flush() }
trainer_dist_adapter.py
from torch.nn.parallel import DistributedDataParallel as DDP import torch.distributed as dist from .fedml_trainer import FedMLTrainer from .process_group_manager import ProcessGroupManager from torch.nn.parallel import DistributedDataParallel as DDP from .trainer.my_model_trainer_classification import MyModelTrainer as MyModelTrainerCLS from .trainer.my_model_trainer_nwp import MyModelTrainer as MyModelTrainerNWP from .trainer.my_model_trainer_tag_prediction import MyModelTrainer as MyModelTrainerTAG from ...utils.logging import logger from .fedml_trainer import FedMLTrainer # import torch # import time # from ...standalone.fedavg.my_model_trainer_classification import MyModelTrainer as MyModelTrainerCLS # from ...standalone.fedavg.my_model_trainer_nwp import MyModelTrainer as MyModelTrainerNWP # from ...standalone.fedavg.my_model_trainer_tag_prediction import MyModelTrainer as MyModelTrainerTAG # from .process_group_manager import ProcessGroupManager # from .utils import transform_list_to_tensor, post_complete_message_to_sweep_process # from .message_define import MyMessage # import logging # import os # import sys # sys.path.insert(0, os.path.abspath(os.path.join(os.getcwd(), "../../../"))) # sys.path.insert(0, os.path.abspath( # os.path.join(os.getcwd(), "../../../../FedML"))) # try: # from fedml_core.distributed.client.client_manager import ClientManager # from fedml_core.distributed.communication.message import Message # from fedml_core.distributed.communication.utils import log_round_start, log_round_end # except ImportError: # from fedml_core.distributed.client.client_manager import ClientManager # from fedml_core.distributed.communication.message import Message # from fedml_core.distributed.communication.utils import log_round_start, log_round_end class
: def __init__( self, args, device, client_rank, model, train_data_num, train_data_local_num_dict, train_data_local_dict, test_data_local_dict, model_trainer=None, ): only_gpu = args.using_gpu self.process_group_manager = ProcessGroupManager( args.silo_proc_rank, args.silo_proc_num, args.pg_master_address, args.pg_master_port, only_gpu ) # if not args.is_mobile: model.to(device) model = DDP(model, device_ids=[device] if only_gpu else None) client_index = client_rank - 1 if model_trainer is None: model_trainer = self.get_model_trainer(model, args) model_trainer.set_id(client_index) logger.info("Initiating Trainer") trainer = self.get_trainer( client_index, train_data_local_dict, train_data_local_num_dict, test_data_local_dict, train_data_num, device, args, model_trainer, ) self.client_index = client_index self.client_rank = client_rank self.device = device self.trainer = trainer self.args = args def get_trainer( self, client_index, train_data_local_dict, train_data_local_num_dict, test_data_local_dict, train_data_num, device, args, model_trainer, ): return FedMLTrainer( client_index, train_data_local_dict, train_data_local_num_dict, test_data_local_dict, train_data_num, device, args, model_trainer, ) def get_model_trainer(self, model, args): if args.dataset == "stackoverflow_lr": model_trainer = MyModelTrainerTAG(model, args, args.enable_cuda_rpc) elif args.dataset in ["fed_shakespeare", "stackoverflow_nwp"]: model_trainer = MyModelTrainerNWP(model, args, args.enable_cuda_rpc) else: # default model trainer is for classification problem model_trainer = MyModelTrainerCLS(model, args, args.enable_cuda_rpc) return model_trainer def train(self, round_idx): # log_round_start(self.client_rank, round_idx) dist.barrier() weights, local_sample_num = self.trainer.train(round_idx) return weights, local_sample_num def update_model(self, model_params): self.trainer.update_model(model_params) def update_dataset(self, client_index=None): _client_index = client_index or self.client_index self.trainer.update_dataset(int(_client_index)) def cleanup_pg(self): logger.info( "Cleaningup process group for client %s in silo %s" % ( self.args.silo_proc_rank, self.args.client_rank) ) self.process_group_manager.cleanup()
TrainerDistAdapter
iact_dnn_utils.py
import numpy as np import h5py import time import os # functions (to be moved to utils.py) def add_meta_keys(fn, pars_keys, image_keys=[]): with h5py.File(fn, 'r') as f: for key in f.keys(): if key not in pars_keys and key not in image_keys: pars_keys.append(key) return 0 def get_square_images_fn(cdict, file_number=None): # in the future: load the first n_events from a file with more images event_type = cdict['event_type'] n_events = cdict['n_events'] mode = cdict['mode'] Etrue_min = cdict['Etrue_min'] fn = '%s_%i_images_%s' % (event_type, n_events, mode) if Etrue_min is not None and Etrue_min != 'None': fn += '_Etrue_min%.1fTeV' % Etrue_min if cdict.get('tel') != None: fn += '_%s' % cdict['tel'] if file_number is not None: fn += '_file%i' % file_number fn += '.h5' return fn def get_images_fns(cdict, folder=None, exists=False, nfiles=200): ev_types = cdict['model_events'] #n_events = cdict['n_events'] #n_events_tot = cdict.get('n_events_tot', None) #if n_events_tot == None: # n_events_tot = n_events #nfiles = int(n_events_tot / n_events) out_dict = {} for k, event_type in enumerate(ev_types): cdict['event_type'] = event_type out_dict[event_type] = [get_square_images_fn(cdict, file_number=j+1) for j in range(nfiles)] if folder is not None and exists: out_dict[event_type] = [fn for fn in out_dict[event_type] if os.path.isfile(folder + fn)] return out_dict def get_zeta_fns(cdict, folder=None, exists=False): out_dict = get_images_fns(cdict) for key in out_dict.keys(): out_dict[key] = [fn.replace('.h5', '_zeta.h5') for fn in out_dict[key]] if folder is not None and exists: out_dict[key] = [fn for fn in out_dict[key] if os.path.isfile(folder + fn)] return out_dict def load_images(folder, cdict): ev_types = cdict['model_events'] n_events = cdict['n_events'] n_events_tot = cdict.get('n_events_tot', None) if n_events_tot == None: n_events_tot = n_events nfiles = int(n_events_tot / n_events)
print('load images') for k, event_type in enumerate(ev_types): print('load %s images' % event_type) cdict['event_type'] = event_type for j in range(nfiles): fn = folder + get_square_images_fn(cdict, file_number=j+1) with h5py.File(fn, 'r') as f: if k == 0 and j == 0: dims = f[data_key].shape out_dims = list(dims) out_dims[0] = n_events_tot * len(ev_types) images = np.zeros(out_dims, dtype=np.float32) ind_start = n_events_tot * k + dims[0] * j ind_end = n_events_tot * k + dims[0] * j + dims[0] fill_inds = list(range(ind_start, ind_end)) images[fill_inds] = f[data_key][:] return images def load_images_from_file(fn, key): with h5py.File(fn, 'r') as f: return f[key][:] def get_group_key(key, f): for gkey in f.keys(): if type(f[gkey]) != h5py._hl.dataset.Dataset and key in f[gkey].keys(): return gkey return None def load_meta_data(folder, cdict): ev_types = cdict['model_events'] n_events = cdict['n_events'] n_events_tot = cdict.get('n_events_tot', None) if n_events_tot == None: n_events_tot = n_events nfiles = int(n_events_tot / n_events) data_key = cdict['data_key'] pars_keys = cdict['pars_keys'] print('load meta data') for k, event_type in enumerate(ev_types): print(event_type) cdict['event_type'] = event_type for j in range(nfiles): fn = folder + get_square_images_fn(cdict, file_number=j+1) with h5py.File(fn, 'r') as f: if k == 0 and j == 0: pars_dict = {} for key in pars_keys: gkey = get_group_key(key, f) if gkey is None: dims = [n_events] out_dims = n_events_tot * len(ev_types) else: dims = f[gkey][key].shape out_dims = list(dims) out_dims[0] = n_events_tot * len(ev_types) pars_dict[key] = np.zeros(out_dims, dtype=np.float32) ind_start = n_events_tot * k + dims[0] * j ind_end = n_events_tot * k + dims[0] * j + dims[0] fill_inds = list(range(ind_start, ind_end)) for key in pars_dict.keys(): gkey = get_group_key(key, f) if key == 'CR_type': pars_dict[key][fill_inds] += int(event_type != 'proton') elif gkey is not None: pars_dict[key][fill_inds] = f[gkey][key][:] else: pass return pars_dict def load_metadata_from_file(fn, key, event_type=None): with h5py.File(fn, 'r') as f: gkey = get_group_key(key, f) if key == 'CR_type' and event_type is not None: return int(event_type != 'proton') elif gkey is not None: return f[gkey][key][:] else: return None # crop images def get_min_max_inds(center, half_size, nmax): imin = np.ceil(center - half_size) imax = np.ceil(center + half_size) shift = -imin * np.heaviside(-imin, 0.) - (imax - nmax) * np.heaviside(imax - nmax, 0.) imin = (imin + shift).astype(int) imax = (imax + shift).astype(int) return imin, imax, shift def crop_images(images, size, test=False, crop_fraction=0.03, boundary=5): t0 = time.time() di = 0.5 * size nn, nx, ny = images.shape res_arr = np.zeros((nn, size, size)) norm = np.sum(images, axis=(1,2)) + 1.e-15 t1 = time.time() # center of gravity along x ix = np.sum(np.sum(images, axis=2) * np.arange(nx), axis=1) / norm ix_min, ix_max, x_shift = get_min_max_inds(ix, di, nx) # center of gravity along y iy = np.sum(np.sum(images, axis=1) * np.arange(ny), axis=1) / norm iy_min, iy_max, y_shift = get_min_max_inds(iy, di, ny) t2 = time.time() # if True - the image is not cropped #crop_mask = np.abs(x_shift) + np.abs(y_shift) == 0. crop_mask = np.ones(nn, dtype=bool) t3 = time.time() for i in range(nn): res_arr[i] = images[i, ix_min[i]:ix_max[i], iy_min[i]:iy_max[i]] test_image = 1. * images[i] in_sum = np.sum(res_arr[i]) test_image[ix_min[i]:ix_max[i], iy_min[i]:iy_max[i]] = 0. out_sum = np.sum(test_image) if in_sum == 0. or out_sum / in_sum > crop_fraction: crop_mask[i] = False test_image = 1. * images[i] b = boundary in_sum = np.sum(test_image[b:-b, b:-b]) test_image[b:-b, b:-b] = 0. out_sum = np.sum(test_image) if in_sum == 0. or out_sum / in_sum > crop_fraction: crop_mask[i] = False t4 = time.time() if test: print('create arrays: %.3f s' % (t1 - t0)) print('Get indices: %.3f s' % (t2 - t1)) print('Crop mask: %.3f s' % (t3 - t2)) print('Create final array: %.3f s' % (t4 - t3)) return res_arr, crop_mask def flatten_tel_images(images): ''' flatten the number of telescopes dimension of the images ''' ntot, image_size, image_size, ntel = images.shape im_new = np.zeros((ntel*ntot, image_size, image_size), dtype=np.float32) for i in range(ntel): im_new[i::ntel] = images[:,:,:,i] return im_new.reshape((ntel*ntot, image_size, image_size, 1)) def deflatten_tel_images(images, ntel): ''' deflatten the number of telescopes dimension of the images ''' ntot, image_size, image_size = images.shape[:3] ntot = int(ntot / ntel) im_new = np.zeros((ntot, image_size, image_size, ntel), dtype=np.float32) for i in range(ntel): im_new[:,:,:,i] = images[i::ntel] return im_new def flatten_meta_data(data, ntel=4): if data.ndim == 1: data_loc = np.outer(data, np.ones(ntel)) else: data_loc = 1. * data return data.flatten()
data_key = cdict['data_key']
zap.go
package zaputils import ( "fmt" "gopkg.in/natefinch/lumberjack.v2" "go.uber.org/zap" "go.uber.org/zap/zapcore" ) var defaultDir string var setDefaultDir bool // SetDefaultDir sets default directory of all zap writer logger. func SetDefaultDir(dir string) { defaultDir = dir setDefaultDir = true } // DefaultDir gets defaultDir. func
() string { return defaultDir } // NewZapWriter constructs a new ZapWriter instance. func NewZapWriter(path string, level int, fs []Field) *ZapWriter { l := &ZapWriter{ level: level, } encoderConfig := zap.NewProductionConfig().EncoderConfig encoderConfig.LevelKey = "lv" encoderConfig.StacktraceKey = "stack" enc := zapcore.NewJSONEncoder(encoderConfig) filePath := path if setDefaultDir { filePath = fmt.Sprintf("%s/%s", defaultDir, path) } syncer := zapcore.AddSync(&lumberjack.Logger{ Filename: filePath, MaxSize: 500, // MB MaxAge: 1, // days MaxBackups: 100, LocalTime: true, Compress: true, }) core := zapcore.NewCore( enc, syncer, zap.DebugLevel, ) opts := []zap.Option{ zap.AddStacktrace(zap.ErrorLevel), zap.Fields(fs...), } l.logger = zap.New(core, opts...).Sugar() return l } // A ZapWriter wraps the zap.SugarWriter. type ZapWriter struct { logger *zap.SugaredLogger options Options level int } // Sync flushes any buffered log entries. func (l *ZapWriter) Sync() error { return l.logger.Sync() } // Level return ZapWriter lever. func (l *ZapWriter) Level() int { return l.level } // Print logs message with structured-style. func (l *ZapWriter) Print(level int, msg string, keysAndValues ...interface{}) { switch level { case FatalLevel: l.logger.Fatalw(msg, keysAndValues...) case PanicLevel: l.logger.Panicw(msg, keysAndValues...) case ErrorLevel: l.logger.Errorw(msg, keysAndValues...) case WarnLevel: l.logger.Warnw(msg, keysAndValues...) case InfoLevel: l.logger.Infow(msg, keysAndValues...) case DebugLevel: l.logger.Debugw(msg, keysAndValues...) } } // Printf logs message with printf-style. func (l *ZapWriter) Printf(level int, format string, keysAndValues ...interface{}) { switch level { case FatalLevel: l.logger.Fatalf(format, keysAndValues...) case PanicLevel: l.logger.Panicf(format, keysAndValues...) case ErrorLevel: l.logger.Errorf(format, keysAndValues...) case WarnLevel: l.logger.Warnf(format, keysAndValues...) case InfoLevel: l.logger.Infof(format, keysAndValues...) case DebugLevel: l.logger.Debugf(format, keysAndValues...) } } // CheckErr checks error, error will be logged if it's not equal to nil. func (l *ZapWriter) CheckErr(err error, logFunc func(string, ...interface{})) (isErr bool) { return checkErr(err, logFunc) }
DefaultDir
users.py
import email import jwt import datetime from models.users import User from bson.objectid import ObjectId from utils.email_util import sent_email from flask import jsonify, make_response from special_variables import _secret_key from utils.token_util import token_required from flask_bcrypt import generate_password_hash, check_password_hash def sign_up_controller(doc): try: user = User.objects(email = doc.get('email', None)).first() if user: return make_response(jsonify({'msg':'user already exists'}), 400) if not (doc and doc.get('email', None)): return make_response(jsonify({'msg':'email and password are required'}), 400) token = jwt.encode({'email': doc.get('email'), 'exp': datetime.datetime.utcnow() + datetime.timedelta(minutes = 30)}, _secret_key, 'HS256') subject = 'Registration Token' return sent_email(subject, [doc.get('email')], token), 200 except Exception as e: return make_response(jsonify({'msg': e.args }), 400) @token_required def registration_controller(data, doc): try: user = User.objects(email = data.get('email', None)).first() if user: return make_response(jsonify({'msg':'user already exists'}), 400) if not (doc and doc.get('password', None)): return make_response(jsonify({'msg':'password is required'}), 400) user = User(email = data.get('email'), password = generate_password_hash(doc.get('password', None)), created_at = datetime.datetime.now(), updated_at = datetime.datetime.now()) user.save() return make_response(jsonify({"msg": "user created successfully"}), 201) except Exception as e: return make_response(jsonify({'msg': e.args }), 400) def get_users_controller(): try: users = [{'email': user.email} for user in User.objects] if not users: return make_response(jsonify({'msg': "users list is empty"}), 404) return make_response(jsonify({'users': users}), 200) except Exception as e: return make_response(jsonify({'msg': e.args }), 400) def get_user_controller(id): try: user = User.objects(id = ObjectId(id)).first() if not user: return make_response(jsonify({"msg": f"user not found, with id: {id}"}), 404) return make_response(jsonify({'email': user.email}), 200) except Exception as e: return make_response(jsonify({'msg':e.args }), 400) @token_required def delete_user_controller(data, id, doc): try: user = User.objects(id = ObjectId(id)).first() if not user: return make_response(jsonify({"msg": f"user not found, with id: {id}"}), 404) if not (doc and doc.get('email', None) and doc.get('password', None)): return make_response(jsonify({'msg':'email and password are required'}), 400) if not (user.email == doc.get('email') and check_password_hash(user.password[2:-1], doc['password'])): return make_response(jsonify({'msg':'wrong email or password'}), 400) user.delete() return make_response(jsonify({"msg": f"user deleted successfully, with id: {id}"}), 204) except Exception as e: return make_response(jsonify({'msg':e.args}), 400) def
(doc): try: user = User.objects(email = doc.get('email', None)).first() if not (user and doc.get('password', None)): return make_response(jsonify({"msg": f"user not exists or incorrect password", "required fields": ['email', 'password'] }), 404) if user.password[0] != '$': password = user.password.split("'")[1] else: password = user.password if not check_password_hash(password, doc['password']): return make_response(jsonify({"msg": "password is incorrect"})) token = jwt.encode({'email': user.email, 'exp': datetime.datetime.utcnow() + datetime.timedelta(hours=24)}, _secret_key, 'HS256') return make_response(jsonify({"msg": f"LoggedIn successfully", "token": token}), 200) except Exception as e: return make_response(jsonify({'msg':f'{e.args} or invalid data'}), 400) def forget_password_controller(doc): try: email = doc.get('email', None) user = User.objects(email = email).first() if not user: return make_response(jsonify({'msg':f'user not found, with email {email}' } ), 404) token = jwt.encode({'email': user.email, 'exp': datetime.datetime.utcnow() + datetime.timedelta(minutes = 20)}, _secret_key, 'HS256') subject = 'Forget Password Token' return sent_email(subject, [email], token) except Exception as e: return make_response(jsonify({'msg': 'invalid data'}), 400) @token_required def reset_password_controller(data, doc): try: new_password = doc.get('new_password', None) if not new_password: return make_response(jsonify({'msg': 'new password is required'}), 400) user = User.objects(email = data['email']).first() if not user: return make_response(jsonify({"msg": f"user not found, with email: {data['email']}"}), 404) user.update(email = user['email'], password = str(generate_password_hash(new_password)), updated_at = datetime.datetime.now()) subject = 'Password reset successful' body = f'your password has been reset successfully, your new password is: {new_password}' return sent_email(subject, [user.email], body) except Exception as e: return make_response(jsonify({'msg':e.args, 'status': 500}))
user_login_controller
blinking_led.rs
//! A very basic example of a program blinking a LED diode using native library API. //! //! This example assumes that physical pin #7 is connected to diode's anode (+). //! Make sure to put resistor to reduce current flowing through the diode. use c2_mmap_gpio::{Device, PinId, Value}; use std::error::Error; use std::thread::sleep; use std::time::Duration; fn main() -> Result<(), Box<dyn Error>> { let mut odroid = Device::new()?; let mut led_pin = odroid.output_pin(PinId::Phy7)?; let blink_interval = Duration::from_millis(500);
sleep(blink_interval); } }
loop { led_pin.set_value(Value::High); sleep(blink_interval); led_pin.set_value(Value::Low);