file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
main_Hand_Tracking.py | import cv2
import mediapipe as mp
class handDetector():
"""
Initiating: | trackCon --> used for tracking of hand, might have increased latency
"""
def __init__(self, mode=False, maxHands=1, detectionCon=0.5, trackCon=0.7):
self.mode = mode
self.maxHands = maxHands
self.detectionCon = detectionCon
self.trackCon = trackCon
self.mpHands = mp.solutions.hands
self.hands = self.mpHands.Hands(self.mode, self.maxHands,self.detectionCon, self.trackCon)
self.mpDraw = mp.solutions.drawing_utils
"""
Finds hand and draws points (21 points total)
"""
def findHands(self,img, draw=True):
imgRGB = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
self.results = self.hands.process(imgRGB)
if self.results.multi_hand_landmarks:
handLms = self.results.multi_hand_landmarks[0]
if draw:
self.mpDraw.draw_landmarks(img, handLms,self.mpHands.HAND_CONNECTIONS)
return img
"""
Used to find postions of points of hand
"""
def findpositions(self, img):
self.lmList = []
if self.results.multi_hand_landmarks:
myHand = self.results.multi_hand_landmarks[0]
"""
id represents as shown in hand_landmarks.png
"""
for id, lm in enumerate(myHand.landmark):
h, w, c = img.shape
x, y = int(lm.x * w), int(lm.y * h)
self.lmList.append([id, x, y])
return self.lmList
detector = handDetector()
def cv2_handdetector(img):
original = img.copy()
#
img = detector.findHands(img)
lmk_list = detector.findpositions(img)
points_to_get = [0,4,8,12,16,20]
if lmk_list != None:
new_list = []
for i in lmk_list:
if i[0] in points_to_get:
new_list.append(i)
return new_list,original
else:
return None, original | mode = False --> when using dynamic video capture
maxHands --> How many hands to detect
detectionCon --> success of detection |
components.py | class FSM:
def __init__(self,states,alphabet,transitionmatrix,currstate):
self.S=states
self.A=alphabet
self.TM=transitionmatrix
self.currstate=currstate
def accept(self,sym):
if sym not in self.A:
return
symi=self.A.index(sym)
if self.TM[currstate][symi]:
self.currstate=self.TM[currstate][symi]
else:
raise Exception('undefined transition delta(%s,%s)'%(self.currstate,sym))
class FSMView:
def __init__(self,x,y,w,h,bg,fg,fsm):
self.x=x | self.fg=fg
self.fsm=fsm
def render():
fill(self.bg)
stroke(self.fg)
rect(self.x,self.y,self.w,self.h)
def transition(evt):
pass | self.y=y
self.w=w
self.h=h
self.bg=bg |
forms.py | from django import forms
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
from .models import Profile
class UserOurRegistration(UserCreationForm):
email = forms.EmailField(required=True)
class | :
model = User
fields = ['email', 'username', 'password1', 'password2']
class UserUpdateForm(forms.ModelForm):
email = forms.EmailField()
class Meta:
model = User
fields = ['username', 'email']
class ProfileImage(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(ProfileImage, self).__init__(*args, **kwargs)
self.fields['img'].label = "Изображение профиля"
class Meta:
model = Profile
fields = ['img']
| Meta |
algorithm.py | class GeneticAlgorithm:
def __init__(popSize=10, maxGens=100000, crossoverRate=0.7, mutateRate=0.05):
self.pop = []
self.popSize = popSize
self.maxGens = maxGens
self.crossoverRate = crossoverRate
self.mutateRate = mutateRate
# Replace funcs
def searialize(self):
|
def desearialize(self):
raise(NotImplementedError('Function desearialize should be replaced'))
def fitness(self):
raise(NotImplementedError('Function fitness should be replaced'))
def terminate(self):
raise(NotImplementedError('Function terminate should be replaced'))
# Specific funcs
# General funcs
def mutate(self, chromosome):
return self.bitFlip(chromosome)
def crossover(self, chromosome1, chromosome2):
return self.onePointCrossover(chromosome1, chromosome2)
def select(self, fitness):
return self.selectRoulette(fitness)
def _fitness(self, pop):
for i in self.pop:
pop[i] = (pop[i], self.fitness(i))
return sorted(pop, key=itemgetter(1))
def _mate(self):
fittest = self._fitness(self.pop)
a, b = self.select(fittest)
return self.desearialize(self.mutate(self.crossover(self.searializeCreatue(a), self.searialize(b))))
def reproduce(self):
return self.reproduce
# Interface funcs
def initPop(seed):
for i in seed:
if len(self.pop) <= popSize:
self.pop.append(i)
while len(self.pop) <= popSize:
self.pop.append(self.initCreature)
def evolve():
if len(self.pop) != self.popSize:
self.initPop()
if
| raise(NotImplementedError('Function searialize should be replaced')) |
key_reg.rs | use algonaut::algod::AlgodBuilder;
use algonaut_core::{VotePk, VrfPk};
use algonaut_transaction::RegisterKey;
use algonaut_transaction::{account::Account, TxnBuilder};
use dotenv::dotenv;
use std::env;
use std::error::Error;
#[tokio::main]
async fn main() -> Result<(), Box<dyn Error>> {
// load variables in .env
dotenv().ok();
let algod = AlgodBuilder::new()
.bind(env::var("ALGOD_URL")?.as_ref())
.auth(env::var("ALGOD_TOKEN")?.as_ref())
.build_v2()?;
let account = Account::from_mnemonic("fire enlist diesel stamp nuclear chunk student stumble call snow flock brush example slab guide choice option recall south kangaroo hundred matrix school above zero")?;
let vote_pk_str = "KgL5qW1jtHAQb1lQNIKuqHBqDWXRmb7GTmBN92a/sOQ=";
let selection_pk_str = "A3s+2bgKlbG9qIaA4wJsrrJl8mVKGzTp/h6gGEyZmAg="; | params.clone(),
RegisterKey::online(
account.address(),
VotePk::from_base64_str(vote_pk_str)?,
VrfPk::from_base64_str(selection_pk_str)?,
params.first_valid,
params.first_valid + 3_000_000,
10_000,
)
.build(),
)
.build();
let sign_response = account.sign_transaction(&t)?;
// Broadcast the transaction to the network
// Note this transaction will get rejected because the accounts do not have any tokens
let send_response = algod.broadcast_signed_transaction(&sign_response).await;
println!("{:#?}", send_response);
Ok(())
} |
let params = algod.suggested_transaction_params().await?;
let t = TxnBuilder::with( |
domain_record_list.go | package cmd
import (
"os"
"strconv"
"github.com/civo/cli/config"
"github.com/civo/cli/utility"
"github.com/spf13/cobra"
)
var domainRecordListCmd = &cobra.Command{
Use: "ls",
Aliases: []string{"list", "all"},
Example: "civo domain record ls DOMAIN/DOMAIN_ID",
Args: cobra.MinimumNArgs(1),
Short: "List all domains records",
Long: `List all current domain records.
If you wish to use a custom format, the available fields are:
* ID
* Name
* Value
* Type
* TTL
* Priority`,
ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
if len(args) == 0 {
return getAllDomainList(), cobra.ShellCompDirectiveNoFileComp
}
return getDomainList(toComplete), cobra.ShellCompDirectiveNoFileComp
},
Run: func(cmd *cobra.Command, args []string) {
client, err := config.CivoAPIClient()
if err != nil {
utility.Error("Creating the connection to Civo's API failed with %s", err)
os.Exit(1)
}
domain, err := client.FindDNSDomain(args[0])
if err != nil {
utility.Error("%s", err)
os.Exit(1)
}
records, err := client.ListDNSRecords(domain.ID)
if err != nil {
utility.Error("%s", err)
os.Exit(1)
}
ow := utility.NewOutputWriter()
for _, record := range records {
ow.StartLine()
ow.AppendData("ID", record.ID)
ow.AppendData("Name", record.Name)
ow.AppendData("Value", record.Value)
ow.AppendData("Type", string(record.Type))
ow.AppendData("TTL", strconv.Itoa(record.TTL))
ow.AppendData("Priority", strconv.Itoa(record.Priority))
}
switch outputFormat {
case "json":
ow.WriteMultipleObjectsJSON()
case "custom":
ow.WriteCustomOutput(outputFields)
default:
ow.WriteTable()
}
},
}
func getDomainList(value string) []string |
func getAllDomainList() []string {
client, err := config.CivoAPIClient()
if err != nil {
utility.Error("Creating the connection to Civo's API failed with %s", err)
os.Exit(1)
}
domain, err := client.ListDNSDomains()
if err != nil {
utility.Error("Unable to list domains %s", err)
os.Exit(1)
}
var domainList []string
for _, v := range domain {
domainList = append(domainList, v.Name)
}
return domainList
}
| {
client, err := config.CivoAPIClient()
if err != nil {
utility.Error("Creating the connection to Civo's API failed with %s", err)
os.Exit(1)
}
domain, err := client.FindDNSDomain(value)
if err != nil {
utility.Error("Unable to list domains %s", err)
os.Exit(1)
}
var domainList []string
domainList = append(domainList, domain.Name)
return domainList
} |
IconCenterFocusStrong.js | /* @flow */
import React from 'react';
import Icon from 'mineral-ui/Icon';
type Props = {
size?: string | 'small' | 'medium' | 'large',
color?: string,
rtl?: boolean,
title?: string
};
/* eslint-disable prettier/prettier */
export default function | (props: Props) {
const iconProps = {
rtl: false,
...props
};
return (
<Icon {...iconProps}>
<g>
<path d="M12 8c-2.21 0-4 1.79-4 4s1.79 4 4 4 4-1.79 4-4-1.79-4-4-4zm-7 7H3v4c0 1.1.9 2 2 2h4v-2H5v-4zM5 5h4V3H5c-1.1 0-2 .9-2 2v4h2V5zm14-2h-4v2h4v4h2V5c0-1.1-.9-2-2-2zm0 16h-4v2h4c1.1 0 2-.9 2-2v-4h-2v4z"/>
</g>
</Icon>
);
}
IconCenterFocusStrong.displayName = 'IconCenterFocusStrong';
IconCenterFocusStrong.category = 'image';
| IconCenterFocusStrong |
tccr_e.rs | #[doc = r" Value read from the register"]
pub struct R {
bits: u8,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u8,
}
impl super::TCCR_E {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct TLOCKR {
bits: bool,
}
impl TLOCKR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct ENHCR {
bits: bool,
}
impl ENHCR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct OCOE5R {
bits: bool,
}
impl OCOE5R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct OCOE4R {
bits: bool,
}
impl OCOE4R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct OCOE3R {
bits: bool,
}
impl OCOE3R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct OCOE2R {
bits: bool,
}
impl OCOE2R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct OCOE1R {
bits: bool,
}
impl OCOE1R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct OCOE0R {
bits: bool,
}
impl OCOE0R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Proxy"]
pub struct _OCOE5W<'a> {
w: &'a mut W,
}
impl<'a> _OCOE5W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 5;
self.w.bits &= !((MASK as u8) << OFFSET);
self.w.bits |= ((value & MASK) as u8) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _OCOE4W<'a> {
w: &'a mut W,
}
impl<'a> _OCOE4W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn | (self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 4;
self.w.bits &= !((MASK as u8) << OFFSET);
self.w.bits |= ((value & MASK) as u8) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _OCOE3W<'a> {
w: &'a mut W,
}
impl<'a> _OCOE3W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 3;
self.w.bits &= !((MASK as u8) << OFFSET);
self.w.bits |= ((value & MASK) as u8) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _OCOE2W<'a> {
w: &'a mut W,
}
impl<'a> _OCOE2W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 2;
self.w.bits &= !((MASK as u8) << OFFSET);
self.w.bits |= ((value & MASK) as u8) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _OCOE1W<'a> {
w: &'a mut W,
}
impl<'a> _OCOE1W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 1;
self.w.bits &= !((MASK as u8) << OFFSET);
self.w.bits |= ((value & MASK) as u8) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _OCOE0W<'a> {
w: &'a mut W,
}
impl<'a> _OCOE0W<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u8) << OFFSET);
self.w.bits |= ((value & MASK) as u8) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
#[doc = "Bit 7 - Register Update Lock"]
#[inline]
pub fn tlock(&self) -> TLOCKR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 7;
((self.bits >> OFFSET) & MASK as u8) != 0
};
TLOCKR { bits }
}
#[doc = "Bit 6 - Enhanced Compare/PWM Mode"]
#[inline]
pub fn enhc(&self) -> ENHCR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 6;
((self.bits >> OFFSET) & MASK as u8) != 0
};
ENHCR { bits }
}
#[doc = "Bit 5 - Output Compare Override Enable (PD7)"]
#[inline]
pub fn ocoe5(&self) -> OCOE5R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 5;
((self.bits >> OFFSET) & MASK as u8) != 0
};
OCOE5R { bits }
}
#[doc = "Bit 4 - Output Compare Override Enable (PD6)"]
#[inline]
pub fn ocoe4(&self) -> OCOE4R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 4;
((self.bits >> OFFSET) & MASK as u8) != 0
};
OCOE4R { bits }
}
#[doc = "Bit 3 - Output Compare Override Enable (PB6)"]
#[inline]
pub fn ocoe3(&self) -> OCOE3R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 3;
((self.bits >> OFFSET) & MASK as u8) != 0
};
OCOE3R { bits }
}
#[doc = "Bit 2 - Output Compare Override Enable (PB5)"]
#[inline]
pub fn ocoe2(&self) -> OCOE2R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 2;
((self.bits >> OFFSET) & MASK as u8) != 0
};
OCOE2R { bits }
}
#[doc = "Bit 1 - Output Compare Override Enable (PC7)"]
#[inline]
pub fn ocoe1(&self) -> OCOE1R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 1;
((self.bits >> OFFSET) & MASK as u8) != 0
};
OCOE1R { bits }
}
#[doc = "Bit 0 - Output Compare Override Enable (PC6)"]
#[inline]
pub fn ocoe0(&self) -> OCOE0R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u8) != 0
};
OCOE0R { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u8) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 5 - Output Compare Override Enable (PD7)"]
#[inline]
pub fn ocoe5(&mut self) -> _OCOE5W {
_OCOE5W { w: self }
}
#[doc = "Bit 4 - Output Compare Override Enable (PD6)"]
#[inline]
pub fn ocoe4(&mut self) -> _OCOE4W {
_OCOE4W { w: self }
}
#[doc = "Bit 3 - Output Compare Override Enable (PB6)"]
#[inline]
pub fn ocoe3(&mut self) -> _OCOE3W {
_OCOE3W { w: self }
}
#[doc = "Bit 2 - Output Compare Override Enable (PB5)"]
#[inline]
pub fn ocoe2(&mut self) -> _OCOE2W {
_OCOE2W { w: self }
}
#[doc = "Bit 1 - Output Compare Override Enable (PC7)"]
#[inline]
pub fn ocoe1(&mut self) -> _OCOE1W {
_OCOE1W { w: self }
}
#[doc = "Bit 0 - Output Compare Override Enable (PC6)"]
#[inline]
pub fn ocoe0(&mut self) -> _OCOE0W {
_OCOE0W { w: self }
}
}
| bit |
tell.py | #!/usr/bin/env python2
# -*- coding: UTF-8 -*-
# 打开一个文件
fo = open("foo.txt", "r+")
str = fo.read(10)
print "读取的字符串是 : ", str
# 查找当前位置
position = fo.tell()
print "当前文件位置 : ", position
# 把指针再次重新定位到文件开头
position = fo.seek(0, 0)
str = fo.read(10)
print "重新读取字符串 : ", str
| fo.close() | # 关闭打开的文件
|
cinchy-dropdown-options.ts | export interface IDropdownOption { | }
export class DropdownOption implements IDropdownOption {
public group: string;
constructor(public id: string, public label: string, public displayOnlyLabel?: any) {
}
} | id: string;
label: string;
group: string;
displayOnlyLabel?: any |
Quaternion.js | /**
* @author mikael emtinger / http://gomo.se/
* @author alteredq / http://alteredqualia.com/
* @author WestLangley / http://github.com/WestLangley
* @author bhouston / http://clara.io
*/
THREE.Quaternion = function ( x, y, z, w ) {
this._x = x || 0;
this._y = y || 0;
this._z = z || 0;
this._w = ( w !== undefined ) ? w : 1;
};
THREE.Quaternion.prototype = {
constructor: THREE.Quaternion,
get x () {
return this._x;
},
set x ( value ) {
this._x = value;
this.onChangeCallback();
},
get y () {
return this._y;
},
set y ( value ) {
this._y = value;
this.onChangeCallback();
},
get z () {
return this._z;
},
set z ( value ) {
this._z = value;
this.onChangeCallback();
},
get w () {
return this._w;
},
set w ( value ) {
this._w = value;
this.onChangeCallback();
},
set: function ( x, y, z, w ) {
this._x = x;
this._y = y;
this._z = z;
this._w = w;
this.onChangeCallback();
return this;
},
clone: function () {
return new this.constructor( this._x, this._y, this._z, this._w );
},
copy: function ( quaternion ) {
this._x = quaternion.x;
this._y = quaternion.y;
this._z = quaternion.z;
this._w = quaternion.w;
this.onChangeCallback();
return this;
},
setFromEuler: function ( euler, update ) {
if ( euler instanceof THREE.Euler === false ) {
throw new Error( 'THREE.Quaternion: .setFromEuler() now expects a Euler rotation rather than a Vector3 and order.' );
}
// http://www.mathworks.com/matlabcentral/fileexchange/
// 20696-function-to-convert-between-dcm-euler-angles-quaternions-and-euler-vectors/
// content/SpinCalc.m
var c1 = Math.cos( euler._x / 2 );
var c2 = Math.cos( euler._y / 2 );
var c3 = Math.cos( euler._z / 2 );
var s1 = Math.sin( euler._x / 2 );
var s2 = Math.sin( euler._y / 2 );
var s3 = Math.sin( euler._z / 2 );
var order = euler.order;
if ( order === 'XYZ' ) {
this._x = s1 * c2 * c3 + c1 * s2 * s3;
this._y = c1 * s2 * c3 - s1 * c2 * s3;
this._z = c1 * c2 * s3 + s1 * s2 * c3;
this._w = c1 * c2 * c3 - s1 * s2 * s3;
} else if ( order === 'YXZ' ) {
this._x = s1 * c2 * c3 + c1 * s2 * s3;
this._y = c1 * s2 * c3 - s1 * c2 * s3;
this._z = c1 * c2 * s3 - s1 * s2 * c3;
this._w = c1 * c2 * c3 + s1 * s2 * s3;
} else if ( order === 'ZXY' ) {
this._x = s1 * c2 * c3 - c1 * s2 * s3;
this._y = c1 * s2 * c3 + s1 * c2 * s3;
this._z = c1 * c2 * s3 + s1 * s2 * c3;
this._w = c1 * c2 * c3 - s1 * s2 * s3;
} else if ( order === 'ZYX' ) {
this._x = s1 * c2 * c3 - c1 * s2 * s3;
this._y = c1 * s2 * c3 + s1 * c2 * s3;
this._z = c1 * c2 * s3 - s1 * s2 * c3;
this._w = c1 * c2 * c3 + s1 * s2 * s3;
} else if ( order === 'YZX' ) {
this._x = s1 * c2 * c3 + c1 * s2 * s3;
this._y = c1 * s2 * c3 + s1 * c2 * s3;
this._z = c1 * c2 * s3 - s1 * s2 * c3;
this._w = c1 * c2 * c3 - s1 * s2 * s3;
} else if ( order === 'XZY' ) {
this._x = s1 * c2 * c3 - c1 * s2 * s3;
this._y = c1 * s2 * c3 - s1 * c2 * s3;
this._z = c1 * c2 * s3 + s1 * s2 * c3;
this._w = c1 * c2 * c3 + s1 * s2 * s3;
}
if ( update !== false ) this.onChangeCallback();
return this;
},
setFromAxisAngle: function ( axis, angle ) {
// http://www.euclideanspace.com/maths/geometry/rotations/conversions/angleToQuaternion/index.htm
// assumes axis is normalized
var halfAngle = angle / 2, s = Math.sin( halfAngle );
this._x = axis.x * s;
this._y = axis.y * s;
this._z = axis.z * s;
this._w = Math.cos( halfAngle );
this.onChangeCallback();
return this;
},
setFromRotationMatrix: function ( m ) {
// http://www.euclideanspace.com/maths/geometry/rotations/conversions/matrixToQuaternion/index.htm
// assumes the upper 3x3 of m is a pure rotation matrix (i.e, unscaled)
var te = m.elements,
m11 = te[ 0 ], m12 = te[ 4 ], m13 = te[ 8 ],
m21 = te[ 1 ], m22 = te[ 5 ], m23 = te[ 9 ],
m31 = te[ 2 ], m32 = te[ 6 ], m33 = te[ 10 ],
trace = m11 + m22 + m33,
s;
if ( trace > 0 ) {
s = 0.5 / Math.sqrt( trace + 1.0 );
this._w = 0.25 / s;
this._x = ( m32 - m23 ) * s;
this._y = ( m13 - m31 ) * s;
this._z = ( m21 - m12 ) * s;
} else if ( m11 > m22 && m11 > m33 ) {
s = 2.0 * Math.sqrt( 1.0 + m11 - m22 - m33 );
this._w = ( m32 - m23 ) / s;
this._x = 0.25 * s;
this._y = ( m12 + m21 ) / s;
this._z = ( m13 + m31 ) / s;
} else if ( m22 > m33 ) {
s = 2.0 * Math.sqrt( 1.0 + m22 - m11 - m33 );
this._w = ( m13 - m31 ) / s;
this._x = ( m12 + m21 ) / s;
this._y = 0.25 * s;
this._z = ( m23 + m32 ) / s;
} else {
s = 2.0 * Math.sqrt( 1.0 + m33 - m11 - m22 );
this._w = ( m21 - m12 ) / s;
this._x = ( m13 + m31 ) / s;
this._y = ( m23 + m32 ) / s;
this._z = 0.25 * s;
}
this.onChangeCallback();
return this;
},
setFromUnitVectors: function () {
// http://lolengine.net/blog/2014/02/24/quaternion-from-two-vectors-final
// assumes direction vectors vFrom and vTo are normalized
var v1, r;
var EPS = 0.000001;
return function ( vFrom, vTo ) {
if ( v1 === undefined ) v1 = new THREE.Vector3();
r = vFrom.dot( vTo ) + 1;
if ( r < EPS ) {
r = 0;
if ( Math.abs( vFrom.x ) > Math.abs( vFrom.z ) ) {
v1.set( - vFrom.y, vFrom.x, 0 );
} else {
v1.set( 0, - vFrom.z, vFrom.y );
}
} else {
v1.crossVectors( vFrom, vTo );
}
this._x = v1.x;
this._y = v1.y;
this._z = v1.z;
this._w = r;
return this.normalize();
};
}(),
inverse: function () {
return this.conjugate().normalize();
},
conjugate: function () {
this._x *= - 1;
this._y *= - 1;
this._z *= - 1;
this.onChangeCallback();
return this;
},
dot: function ( v ) {
return this._x * v._x + this._y * v._y + this._z * v._z + this._w * v._w;
},
lengthSq: function () {
return this._x * this._x + this._y * this._y + this._z * this._z + this._w * this._w;
},
length: function () {
return Math.sqrt( this._x * this._x + this._y * this._y + this._z * this._z + this._w * this._w );
},
normalize: function () {
var l = this.length();
if ( l === 0 ) {
this._x = 0;
this._y = 0;
this._z = 0;
this._w = 1;
} else {
l = 1 / l;
this._x = this._x * l;
this._y = this._y * l;
this._z = this._z * l;
this._w = this._w * l;
}
this.onChangeCallback();
return this;
},
multiply: function ( q, p ) {
if ( p !== undefined ) {
console.warn( 'THREE.Quaternion: .multiply() now only accepts one argument. Use .multiplyQuaternions( a, b ) instead.' );
return this.multiplyQuaternions( q, p );
}
return this.multiplyQuaternions( this, q );
},
premultiply: function ( q ) {
return this.multiplyQuaternions( q, this );
},
multiplyQuaternions: function ( a, b ) {
// from http://www.euclideanspace.com/maths/algebra/realNormedAlgebra/quaternions/code/index.htm
var qax = a._x, qay = a._y, qaz = a._z, qaw = a._w;
var qbx = b._x, qby = b._y, qbz = b._z, qbw = b._w;
this._x = qax * qbw + qaw * qbx + qay * qbz - qaz * qby;
this._y = qay * qbw + qaw * qby + qaz * qbx - qax * qbz;
this._z = qaz * qbw + qaw * qbz + qax * qby - qay * qbx;
this._w = qaw * qbw - qax * qbx - qay * qby - qaz * qbz;
this.onChangeCallback();
return this;
},
slerp: function ( qb, t ) {
if ( t === 0 ) return this;
if ( t === 1 ) return this.copy( qb );
var x = this._x, y = this._y, z = this._z, w = this._w;
// http://www.euclideanspace.com/maths/algebra/realNormedAlgebra/quaternions/slerp/
var cosHalfTheta = w * qb._w + x * qb._x + y * qb._y + z * qb._z;
if ( cosHalfTheta < 0 ) {
this._w = - qb._w;
this._x = - qb._x;
this._y = - qb._y;
this._z = - qb._z;
cosHalfTheta = - cosHalfTheta;
} else {
this.copy( qb );
}
if ( cosHalfTheta >= 1.0 ) {
this._w = w;
this._x = x;
this._y = y;
this._z = z;
return this;
}
var sinHalfTheta = Math.sqrt( 1.0 - cosHalfTheta * cosHalfTheta );
if ( Math.abs( sinHalfTheta ) < 0.001 ) {
this._w = 0.5 * ( w + this._w );
this._x = 0.5 * ( x + this._x );
this._y = 0.5 * ( y + this._y );
this._z = 0.5 * ( z + this._z );
return this;
}
var halfTheta = Math.atan2( sinHalfTheta, cosHalfTheta );
var ratioA = Math.sin( ( 1 - t ) * halfTheta ) / sinHalfTheta,
ratioB = Math.sin( t * halfTheta ) / sinHalfTheta;
this._w = ( w * ratioA + this._w * ratioB );
this._x = ( x * ratioA + this._x * ratioB );
this._y = ( y * ratioA + this._y * ratioB );
this._z = ( z * ratioA + this._z * ratioB );
this.onChangeCallback();
return this;
},
equals: function ( quaternion ) {
return ( quaternion._x === this._x ) && ( quaternion._y === this._y ) && ( quaternion._z === this._z ) && ( quaternion._w === this._w );
},
fromArray: function ( array, offset ) {
if ( offset === undefined ) offset = 0;
this._x = array[ offset ];
this._y = array[ offset + 1 ];
this._z = array[ offset + 2 ];
this._w = array[ offset + 3 ];
this.onChangeCallback();
return this;
},
toArray: function ( array, offset ) {
if ( array === undefined ) array = [];
if ( offset === undefined ) offset = 0;
array[ offset ] = this._x;
array[ offset + 1 ] = this._y;
array[ offset + 2 ] = this._z;
array[ offset + 3 ] = this._w;
return array;
},
onChange: function ( callback ) {
this.onChangeCallback = callback;
return this;
},
onChangeCallback: function () {}
};
Object.assign( THREE.Quaternion, {
slerp: function( qa, qb, qm, t ) {
return qm.copy( qa ).slerp( qb, t );
},
slerpFlat: function(
dst, dstOffset, src0, srcOffset0, src1, srcOffset1, t ) {
// fuzz-free, array-based Quaternion SLERP operation
var x0 = src0[ srcOffset0 + 0 ],
y0 = src0[ srcOffset0 + 1 ],
z0 = src0[ srcOffset0 + 2 ],
w0 = src0[ srcOffset0 + 3 ],
x1 = src1[ srcOffset1 + 0 ],
y1 = src1[ srcOffset1 + 1 ],
z1 = src1[ srcOffset1 + 2 ],
w1 = src1[ srcOffset1 + 3 ];
if ( w0 !== w1 || x0 !== x1 || y0 !== y1 || z0 !== z1 ) {
var s = 1 - t,
cos = x0 * x1 + y0 * y1 + z0 * z1 + w0 * w1,
dir = ( cos >= 0 ? 1 : - 1 ),
sqrSin = 1 - cos * cos;
// Skip the Slerp for tiny steps to avoid numeric problems:
if ( sqrSin > Number.EPSILON ) {
var sin = Math.sqrt( sqrSin ),
len = Math.atan2( sin, cos * dir );
s = Math.sin( s * len ) / sin;
t = Math.sin( t * len ) / sin;
}
var tDir = t * dir;
x0 = x0 * s + x1 * tDir;
y0 = y0 * s + y1 * tDir;
z0 = z0 * s + z1 * tDir;
w0 = w0 * s + w1 * tDir;
// Normalize in case we just did a lerp:
if ( s === 1 - t ) {
var f = 1 / Math.sqrt( x0 * x0 + y0 * y0 + z0 * z0 + w0 * w0 );
x0 *= f;
y0 *= f;
z0 *= f;
w0 *= f;
| }
dst[ dstOffset ] = x0;
dst[ dstOffset + 1 ] = y0;
dst[ dstOffset + 2 ] = z0;
dst[ dstOffset + 3 ] = w0;
}
} ); | }
|
cors.go | // Copyright © 2020 The Things Network Foundation, The Things Industries B.V.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package webmiddleware
import "github.com/gorilla/handlers"
// CORSConfig is the configuration for the CORS middleware.
type CORSConfig struct {
AllowedHeaders []string
AllowedMethods []string
AllowedOrigins []string
ExposedHeaders []string
MaxAge int
AllowCredentials bool
}
func (c CORSConfig) options() []handlers.CORSOption {
var options []handlers.CORSOption
if len(c.AllowedHeaders) > 0 {
options = append(options, handlers.AllowedHeaders(c.AllowedHeaders))
}
if len(c.AllowedMethods) > 0 { | if len(c.AllowedOrigins) > 0 {
options = append(options, handlers.AllowedOrigins(c.AllowedOrigins))
}
if len(c.ExposedHeaders) > 0 {
options = append(options, handlers.ExposedHeaders(c.ExposedHeaders))
}
if c.MaxAge > 0 {
options = append(options, handlers.MaxAge(c.MaxAge))
}
if c.AllowCredentials {
options = append(options, handlers.AllowCredentials())
}
return options
}
// CORS returns a middleware that handles Cross-Origin Resource Sharing.
func CORS(config CORSConfig) MiddlewareFunc {
return MiddlewareFunc(handlers.CORS(config.options()...))
}
|
options = append(options, handlers.AllowedMethods(c.AllowedMethods))
}
|
options.go | /*
Copyright AppsCode Inc. and Contributors
Licensed under the AppsCode Community License 1.0.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://github.com/appscode/licenses/raw/1.0.0/AppsCode-Community-1.0.0.md
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package auth
import (
"os"
"github.com/pkg/errors"
"github.com/spf13/pflag"
)
const (
K8sTokenReviewerJwtEnv = "K8S_TOKEN_REVIEWER_JWT"
)
type K8sAuthenticatorOptions struct {
// Host must be a host string, a host:port pair
// or a URL to the base of the Kubernetes API server.
Host string
// PEM encoded CA cert for use by the TLS client used
// to talk with the Kubernetes API
CA string
// A service account JWT used to access the TokenReview API
// to validate other JWTs during login. If not set the JWT
// used for login will be used to access the API.
Token string
}
func NewK8sAuthOptions() *K8sAuthenticatorOptions |
func (o *K8sAuthenticatorOptions) AddFlags(fs *pflag.FlagSet) {
fs.StringVar(&o.Host, "auth.k8s-host", o.Host, "Host must be a host string, a host:port pair, or a URL to the base of the Kubernetes API server")
fs.StringVar(&o.CA, "auth.k8s-ca-cert", o.CA, "PEM encoded CA cert for use by the TLS client used to talk with the Kubernetes API")
fs.StringVar(&o.Token, "auth.k8s-token-reviewer-jwt", o.Token, "A service account JWT used to access the TokenReview API to validate other JWTs during login. If this flag is not provided, then the value from K8S_TOKEN_REVIEWER_JWT environment variable will be used")
}
func (o *K8sAuthenticatorOptions) Validate() []error {
var errs []error
if o.Host == "" {
errs = append(errs, errors.New("auth.k8s-host must be non empty"))
}
if o.Token == "" {
errs = append(errs, errors.New("env K8S_TOKEN_REVIEWER_JWT must be non empty"))
}
return errs
}
func (o *K8sAuthenticatorOptions) Apply() error {
return nil
}
| {
return &K8sAuthenticatorOptions{
Token: os.Getenv(K8sTokenReviewerJwtEnv),
}
} |
user_query.go | // Code generated by entc, DO NOT EDIT.
package ent
import (
"context"
"errors"
"fmt"
"math"
"github.com/facebookincubator/ent/dialect/sql"
"github.com/shanbay/gobay/testdata/ent/predicate"
"github.com/shanbay/gobay/testdata/ent/user"
)
// UserQuery is the builder for querying User entities.
type UserQuery struct {
config
limit *int
offset *int
order []Order
unique []string
predicates []predicate.User
// intermediate query.
sql *sql.Selector
}
// Where adds a new predicate for the builder.
func (uq *UserQuery) Where(ps ...predicate.User) *UserQuery {
uq.predicates = append(uq.predicates, ps...)
return uq
}
// Limit adds a limit step to the query.
func (uq *UserQuery) Limit(limit int) *UserQuery {
uq.limit = &limit
return uq
}
// Offset adds an offset step to the query.
func (uq *UserQuery) Offset(offset int) *UserQuery {
uq.offset = &offset
return uq
}
// Order adds an order step to the query.
func (uq *UserQuery) Order(o ...Order) *UserQuery {
uq.order = append(uq.order, o...)
return uq
}
// First returns the first User entity in the query. Returns *ErrNotFound when no user was found.
func (uq *UserQuery) First(ctx context.Context) (*User, error) {
us, err := uq.Limit(1).All(ctx)
if err != nil {
return nil, err
}
if len(us) == 0 {
return nil, &ErrNotFound{user.Label}
}
return us[0], nil
}
// FirstX is like First, but panics if an error occurs.
func (uq *UserQuery) FirstX(ctx context.Context) *User {
u, err := uq.First(ctx)
if err != nil && !IsNotFound(err) {
panic(err)
}
return u
}
// FirstID returns the first User id in the query. Returns *ErrNotFound when no id was found.
func (uq *UserQuery) FirstID(ctx context.Context) (id int, err error) {
var ids []int
if ids, err = uq.Limit(1).IDs(ctx); err != nil {
return
}
if len(ids) == 0 {
err = &ErrNotFound{user.Label}
return
}
return ids[0], nil
}
// FirstXID is like FirstID, but panics if an error occurs.
func (uq *UserQuery) FirstXID(ctx context.Context) int {
id, err := uq.FirstID(ctx)
if err != nil && !IsNotFound(err) {
panic(err)
}
return id
}
// Only returns the only User entity in the query, returns an error if not exactly one entity was returned.
func (uq *UserQuery) Only(ctx context.Context) (*User, error) {
us, err := uq.Limit(2).All(ctx)
if err != nil {
return nil, err
}
switch len(us) {
case 1:
return us[0], nil
case 0:
return nil, &ErrNotFound{user.Label}
default:
return nil, &ErrNotSingular{user.Label}
}
}
// OnlyX is like Only, but panics if an error occurs.
func (uq *UserQuery) OnlyX(ctx context.Context) *User {
u, err := uq.Only(ctx)
if err != nil {
panic(err)
}
return u
}
// OnlyID returns the only User id in the query, returns an error if not exactly one id was returned.
func (uq *UserQuery) OnlyID(ctx context.Context) (id int, err error) {
var ids []int
if ids, err = uq.Limit(2).IDs(ctx); err != nil {
return
}
switch len(ids) {
case 1:
id = ids[0]
case 0:
err = &ErrNotFound{user.Label}
default:
err = &ErrNotSingular{user.Label}
}
return
}
// OnlyXID is like OnlyID, but panics if an error occurs.
func (uq *UserQuery) OnlyXID(ctx context.Context) int {
id, err := uq.OnlyID(ctx)
if err != nil {
panic(err)
}
return id
}
// All executes the query and returns a list of Users.
func (uq *UserQuery) All(ctx context.Context) ([]*User, error) {
return uq.sqlAll(ctx)
}
// AllX is like All, but panics if an error occurs.
func (uq *UserQuery) AllX(ctx context.Context) []*User {
us, err := uq.All(ctx)
if err != nil {
panic(err)
}
return us
}
// IDs executes the query and returns a list of User ids.
func (uq *UserQuery) IDs(ctx context.Context) ([]int, error) {
var ids []int
if err := uq.Select(user.FieldID).Scan(ctx, &ids); err != nil {
return nil, err
}
return ids, nil
}
// IDsX is like IDs, but panics if an error occurs.
func (uq *UserQuery) IDsX(ctx context.Context) []int {
ids, err := uq.IDs(ctx)
if err != nil {
panic(err)
}
return ids
}
// Count returns the count of the given query.
func (uq *UserQuery) Count(ctx context.Context) (int, error) {
return uq.sqlCount(ctx)
}
// CountX is like Count, but panics if an error occurs.
func (uq *UserQuery) CountX(ctx context.Context) int {
count, err := uq.Count(ctx)
if err != nil {
panic(err)
}
return count
}
// Exist returns true if the query has elements in the graph.
func (uq *UserQuery) Exist(ctx context.Context) (bool, error) {
return uq.sqlExist(ctx)
}
// ExistX is like Exist, but panics if an error occurs.
func (uq *UserQuery) ExistX(ctx context.Context) bool {
exist, err := uq.Exist(ctx)
if err != nil {
panic(err)
}
return exist
}
// Clone returns a duplicate of the query builder, including all associated steps. It can be
// used to prepare common query builders and use them differently after the clone is made.
func (uq *UserQuery) Clone() *UserQuery {
return &UserQuery{
config: uq.config,
limit: uq.limit,
offset: uq.offset,
order: append([]Order{}, uq.order...),
unique: append([]string{}, uq.unique...),
predicates: append([]predicate.User{}, uq.predicates...),
// clone intermediate query.
sql: uq.sql.Clone(),
}
}
// GroupBy used to group vertices by one or more fields/columns.
// It is often used with aggregate functions, like: count, max, mean, min, sum.
//
// Example:
//
// var v []struct {
// Nickname string `json:"nickname,omitempty"`
// Count int `json:"count,omitempty"`
// }
//
// client.User.Query().
// GroupBy(user.FieldNickname).
// Aggregate(ent.Count()).
// Scan(ctx, &v)
//
func (uq *UserQuery) GroupBy(field string, fields ...string) *UserGroupBy {
group := &UserGroupBy{config: uq.config}
group.fields = append([]string{field}, fields...)
group.sql = uq.sqlQuery()
return group
}
// Select one or more fields from the given query.
//
// Example:
//
// var v []struct {
// Nickname string `json:"nickname,omitempty"`
// }
//
// client.User.Query().
// Select(user.FieldNickname).
// Scan(ctx, &v)
//
func (uq *UserQuery) Select(field string, fields ...string) *UserSelect {
selector := &UserSelect{config: uq.config}
selector.fields = append([]string{field}, fields...)
selector.sql = uq.sqlQuery()
return selector
}
func (uq *UserQuery) sqlAll(ctx context.Context) ([]*User, error) {
rows := &sql.Rows{}
selector := uq.sqlQuery()
if unique := uq.unique; len(unique) == 0 {
selector.Distinct()
}
query, args := selector.Query()
if err := uq.driver.Query(ctx, query, args, rows); err != nil {
return nil, err
}
defer rows.Close()
var us Users
if err := us.FromRows(rows); err != nil {
return nil, err
} | }
func (uq *UserQuery) sqlCount(ctx context.Context) (int, error) {
rows := &sql.Rows{}
selector := uq.sqlQuery()
unique := []string{user.FieldID}
if len(uq.unique) > 0 {
unique = uq.unique
}
selector.Count(sql.Distinct(selector.Columns(unique...)...))
query, args := selector.Query()
if err := uq.driver.Query(ctx, query, args, rows); err != nil {
return 0, err
}
defer rows.Close()
if !rows.Next() {
return 0, errors.New("ent: no rows found")
}
var n int
if err := rows.Scan(&n); err != nil {
return 0, fmt.Errorf("ent: failed reading count: %v", err)
}
return n, nil
}
func (uq *UserQuery) sqlExist(ctx context.Context) (bool, error) {
n, err := uq.sqlCount(ctx)
if err != nil {
return false, fmt.Errorf("ent: check existence: %v", err)
}
return n > 0, nil
}
func (uq *UserQuery) sqlQuery() *sql.Selector {
builder := sql.Dialect(uq.driver.Dialect())
t1 := builder.Table(user.Table)
selector := builder.Select(t1.Columns(user.Columns...)...).From(t1)
if uq.sql != nil {
selector = uq.sql
selector.Select(selector.Columns(user.Columns...)...)
}
for _, p := range uq.predicates {
p(selector)
}
for _, p := range uq.order {
p(selector)
}
if offset := uq.offset; offset != nil {
// limit is mandatory for offset clause. We start
// with default value, and override it below if needed.
selector.Offset(*offset).Limit(math.MaxInt32)
}
if limit := uq.limit; limit != nil {
selector.Limit(*limit)
}
return selector
}
// UserGroupBy is the builder for group-by User entities.
type UserGroupBy struct {
config
fields []string
fns []Aggregate
// intermediate query.
sql *sql.Selector
}
// Aggregate adds the given aggregation functions to the group-by query.
func (ugb *UserGroupBy) Aggregate(fns ...Aggregate) *UserGroupBy {
ugb.fns = append(ugb.fns, fns...)
return ugb
}
// Scan applies the group-by query and scan the result into the given value.
func (ugb *UserGroupBy) Scan(ctx context.Context, v interface{}) error {
return ugb.sqlScan(ctx, v)
}
// ScanX is like Scan, but panics if an error occurs.
func (ugb *UserGroupBy) ScanX(ctx context.Context, v interface{}) {
if err := ugb.Scan(ctx, v); err != nil {
panic(err)
}
}
// Strings returns list of strings from group-by. It is only allowed when querying group-by with one field.
func (ugb *UserGroupBy) Strings(ctx context.Context) ([]string, error) {
if len(ugb.fields) > 1 {
return nil, errors.New("ent: UserGroupBy.Strings is not achievable when grouping more than 1 field")
}
var v []string
if err := ugb.Scan(ctx, &v); err != nil {
return nil, err
}
return v, nil
}
// StringsX is like Strings, but panics if an error occurs.
func (ugb *UserGroupBy) StringsX(ctx context.Context) []string {
v, err := ugb.Strings(ctx)
if err != nil {
panic(err)
}
return v
}
// Ints returns list of ints from group-by. It is only allowed when querying group-by with one field.
func (ugb *UserGroupBy) Ints(ctx context.Context) ([]int, error) {
if len(ugb.fields) > 1 {
return nil, errors.New("ent: UserGroupBy.Ints is not achievable when grouping more than 1 field")
}
var v []int
if err := ugb.Scan(ctx, &v); err != nil {
return nil, err
}
return v, nil
}
// IntsX is like Ints, but panics if an error occurs.
func (ugb *UserGroupBy) IntsX(ctx context.Context) []int {
v, err := ugb.Ints(ctx)
if err != nil {
panic(err)
}
return v
}
// Float64s returns list of float64s from group-by. It is only allowed when querying group-by with one field.
func (ugb *UserGroupBy) Float64s(ctx context.Context) ([]float64, error) {
if len(ugb.fields) > 1 {
return nil, errors.New("ent: UserGroupBy.Float64s is not achievable when grouping more than 1 field")
}
var v []float64
if err := ugb.Scan(ctx, &v); err != nil {
return nil, err
}
return v, nil
}
// Float64sX is like Float64s, but panics if an error occurs.
func (ugb *UserGroupBy) Float64sX(ctx context.Context) []float64 {
v, err := ugb.Float64s(ctx)
if err != nil {
panic(err)
}
return v
}
// Bools returns list of bools from group-by. It is only allowed when querying group-by with one field.
func (ugb *UserGroupBy) Bools(ctx context.Context) ([]bool, error) {
if len(ugb.fields) > 1 {
return nil, errors.New("ent: UserGroupBy.Bools is not achievable when grouping more than 1 field")
}
var v []bool
if err := ugb.Scan(ctx, &v); err != nil {
return nil, err
}
return v, nil
}
// BoolsX is like Bools, but panics if an error occurs.
func (ugb *UserGroupBy) BoolsX(ctx context.Context) []bool {
v, err := ugb.Bools(ctx)
if err != nil {
panic(err)
}
return v
}
func (ugb *UserGroupBy) sqlScan(ctx context.Context, v interface{}) error {
rows := &sql.Rows{}
query, args := ugb.sqlQuery().Query()
if err := ugb.driver.Query(ctx, query, args, rows); err != nil {
return err
}
defer rows.Close()
return sql.ScanSlice(rows, v)
}
func (ugb *UserGroupBy) sqlQuery() *sql.Selector {
selector := ugb.sql
columns := make([]string, 0, len(ugb.fields)+len(ugb.fns))
columns = append(columns, ugb.fields...)
for _, fn := range ugb.fns {
columns = append(columns, fn(selector))
}
return selector.Select(columns...).GroupBy(ugb.fields...)
}
// UserSelect is the builder for select fields of User entities.
type UserSelect struct {
config
fields []string
// intermediate queries.
sql *sql.Selector
}
// Scan applies the selector query and scan the result into the given value.
func (us *UserSelect) Scan(ctx context.Context, v interface{}) error {
return us.sqlScan(ctx, v)
}
// ScanX is like Scan, but panics if an error occurs.
func (us *UserSelect) ScanX(ctx context.Context, v interface{}) {
if err := us.Scan(ctx, v); err != nil {
panic(err)
}
}
// Strings returns list of strings from selector. It is only allowed when selecting one field.
func (us *UserSelect) Strings(ctx context.Context) ([]string, error) {
if len(us.fields) > 1 {
return nil, errors.New("ent: UserSelect.Strings is not achievable when selecting more than 1 field")
}
var v []string
if err := us.Scan(ctx, &v); err != nil {
return nil, err
}
return v, nil
}
// StringsX is like Strings, but panics if an error occurs.
func (us *UserSelect) StringsX(ctx context.Context) []string {
v, err := us.Strings(ctx)
if err != nil {
panic(err)
}
return v
}
// Ints returns list of ints from selector. It is only allowed when selecting one field.
func (us *UserSelect) Ints(ctx context.Context) ([]int, error) {
if len(us.fields) > 1 {
return nil, errors.New("ent: UserSelect.Ints is not achievable when selecting more than 1 field")
}
var v []int
if err := us.Scan(ctx, &v); err != nil {
return nil, err
}
return v, nil
}
// IntsX is like Ints, but panics if an error occurs.
func (us *UserSelect) IntsX(ctx context.Context) []int {
v, err := us.Ints(ctx)
if err != nil {
panic(err)
}
return v
}
// Float64s returns list of float64s from selector. It is only allowed when selecting one field.
func (us *UserSelect) Float64s(ctx context.Context) ([]float64, error) {
if len(us.fields) > 1 {
return nil, errors.New("ent: UserSelect.Float64s is not achievable when selecting more than 1 field")
}
var v []float64
if err := us.Scan(ctx, &v); err != nil {
return nil, err
}
return v, nil
}
// Float64sX is like Float64s, but panics if an error occurs.
func (us *UserSelect) Float64sX(ctx context.Context) []float64 {
v, err := us.Float64s(ctx)
if err != nil {
panic(err)
}
return v
}
// Bools returns list of bools from selector. It is only allowed when selecting one field.
func (us *UserSelect) Bools(ctx context.Context) ([]bool, error) {
if len(us.fields) > 1 {
return nil, errors.New("ent: UserSelect.Bools is not achievable when selecting more than 1 field")
}
var v []bool
if err := us.Scan(ctx, &v); err != nil {
return nil, err
}
return v, nil
}
// BoolsX is like Bools, but panics if an error occurs.
func (us *UserSelect) BoolsX(ctx context.Context) []bool {
v, err := us.Bools(ctx)
if err != nil {
panic(err)
}
return v
}
func (us *UserSelect) sqlScan(ctx context.Context, v interface{}) error {
rows := &sql.Rows{}
query, args := us.sqlQuery().Query()
if err := us.driver.Query(ctx, query, args, rows); err != nil {
return err
}
defer rows.Close()
return sql.ScanSlice(rows, v)
}
func (us *UserSelect) sqlQuery() sql.Querier {
view := "user_view"
return sql.Dialect(us.driver.Dialect()).
Select(us.fields...).From(us.sql.As(view))
} | us.config(uq.config)
return us, nil |
instantiate.py | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Instantiate a workflow template."""
import uuid
from googlecloudsdk.api_lib.dataproc import dataproc as dp
from googlecloudsdk.api_lib.dataproc import util
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.dataproc import flags
from googlecloudsdk.core import log
@base.ReleaseTracks(base.ReleaseTrack.BETA)
class Instantiate(base.CreateCommand):
"""Instantiate a workflow template."""
@staticmethod
def Args(parser):
flags.AddTemplateFlag(parser, 'run')
flags.AddTimeoutFlag(parser, default='35m')
base.ASYNC_FLAG.AddToParser(parser)
def | (self, args):
dataproc = dp.Dataproc(self.ReleaseTrack())
msgs = dataproc.messages
template = util.ParseWorkflowTemplates(args.template, dataproc)
instantiate_request = dataproc.messages.InstantiateWorkflowTemplateRequest()
instantiate_request.instanceId = uuid.uuid4().hex # request UUID
request = msgs.DataprocProjectsRegionsWorkflowTemplatesInstantiateRequest(
instantiateWorkflowTemplateRequest=instantiate_request,
name=template.RelativeName())
operation = dataproc.client.projects_regions_workflowTemplates.Instantiate(
request)
if args.async:
log.status.Print('Instantiating [{0}] with operation [{1}].'.format(
template.Name(), operation.name))
return
operation = util.WaitForWorkflowTemplateOperation(
dataproc, operation, timeout_s=args.timeout)
return operation
| Run |
canvasModel.ts | // Copyright (C) 2019-2020 Intel Corporation
//
// SPDX-License-Identifier: MIT
import { MasterImpl } from './master';
export interface Size {
width: number;
height: number;
}
export interface Position {
x: number;
y: number;
}
export interface Geometry {
image: Size;
canvas: Size;
grid: Size;
top: number;
left: number;
scale: number;
offset: number;
angle: number;
}
export interface FocusData {
clientID: number;
padding: number;
}
export interface ActiveElement {
clientID: number | null;
attributeID: number | null;
}
export enum RectDrawingMethod {
CLASSIC = 'By 2 points',
EXTREME_POINTS = 'By 4 points'
}
export interface DrawData {
enabled: boolean;
shapeType?: string;
rectDrawingMethod?: RectDrawingMethod;
numberOfPoints?: number;
initialState?: any;
crosshair?: boolean;
}
export interface EditData {
enabled: boolean;
state: any;
pointID: number;
}
export interface GroupData {
enabled: boolean;
}
export interface MergeData {
enabled: boolean;
}
export interface SplitData {
enabled: boolean;
}
export enum FrameZoom {
MIN = 0.1,
MAX = 10,
}
export enum UpdateReasons {
IMAGE_CHANGED = 'image_changed',
IMAGE_ZOOMED = 'image_zoomed',
IMAGE_FITTED = 'image_fitted',
IMAGE_MOVED = 'image_moved',
GRID_UPDATED = 'grid_updated',
SET_Z_LAYER = 'set_z_layer',
OBJECTS_UPDATED = 'objects_updated',
SHAPE_ACTIVATED = 'shape_activated',
SHAPE_FOCUSED = 'shape_focused',
FITTED_CANVAS = 'fitted_canvas',
DRAW = 'draw',
MERGE = 'merge',
SPLIT = 'split',
GROUP = 'group',
SELECT = 'select',
CANCEL = 'cancel',
DRAG_CANVAS = 'drag_canvas',
ZOOM_CANVAS = 'ZOOM_CANVAS',
}
export enum Mode {
IDLE = 'idle',
DRAG = 'drag',
RESIZE = 'resize',
DRAW = 'draw',
EDIT = 'edit',
MERGE = 'merge',
SPLIT = 'split',
GROUP = 'group',
DRAG_CANVAS = 'drag_canvas',
ZOOM_CANVAS = 'zoom_canvas',
}
export interface CanvasModel {
readonly image: HTMLImageElement | null;
readonly objects: any[];
readonly zLayer: number | null;
readonly gridSize: Size;
readonly focusData: FocusData;
readonly activeElement: ActiveElement;
readonly drawData: DrawData;
readonly mergeData: MergeData;
readonly splitData: SplitData;
readonly groupData: GroupData;
readonly selected: any;
geometry: Geometry;
mode: Mode;
setZLayer(zLayer: number | null): void;
zoom(x: number, y: number, direction: number): void;
move(topOffset: number, leftOffset: number): void;
setup(frameData: any, objectStates: any[]): void;
activate(clientID: number | null, attributeID: number | null): void;
rotate(rotationAngle: number): void;
focus(clientID: number, padding: number): void;
fit(): void;
grid(stepX: number, stepY: number): void;
draw(drawData: DrawData): void;
group(groupData: GroupData): void;
split(splitData: SplitData): void;
merge(mergeData: MergeData): void;
select(objectState: any): void;
fitCanvas(width: number, height: number): void;
dragCanvas(enable: boolean): void;
zoomCanvas(enable: boolean): void;
cancel(): void;
}
export class | extends MasterImpl implements CanvasModel {
private data: {
activeElement: ActiveElement;
angle: number;
canvasSize: Size;
image: HTMLImageElement | null;
imageID: number | null;
imageOffset: number;
imageSize: Size;
focusData: FocusData;
gridSize: Size;
left: number;
objects: any[];
scale: number;
top: number;
zLayer: number | null;
drawData: DrawData;
mergeData: MergeData;
groupData: GroupData;
splitData: SplitData;
selected: any;
mode: Mode;
};
public constructor() {
super();
this.data = {
activeElement: {
clientID: null,
attributeID: null,
},
angle: 0,
canvasSize: {
height: 0,
width: 0,
},
image: null,
imageID: null,
imageOffset: 0,
imageSize: {
height: 0,
width: 0,
},
focusData: {
clientID: 0,
padding: 0,
},
gridSize: {
height: 100,
width: 100,
},
left: 0,
objects: [],
scale: 1,
top: 0,
zLayer: null,
drawData: {
enabled: false,
initialState: null,
},
mergeData: {
enabled: false,
},
groupData: {
enabled: false,
},
splitData: {
enabled: false,
},
selected: null,
mode: Mode.IDLE,
};
}
public setZLayer(zLayer: number | null): void {
this.data.zLayer = zLayer;
this.notify(UpdateReasons.SET_Z_LAYER);
}
public zoom(x: number, y: number, direction: number): void {
const oldScale: number = this.data.scale;
const newScale: number = direction > 0 ? oldScale * 6 / 5 : oldScale * 5 / 6;
this.data.scale = Math.min(Math.max(newScale, FrameZoom.MIN), FrameZoom.MAX);
const { angle } = this.data;
const mutiplier = Math.sin(angle * Math.PI / 180) + Math.cos(angle * Math.PI / 180);
if ((angle / 90) % 2) {
// 90, 270, ..
this.data.top += mutiplier * ((x - this.data.imageSize.width / 2)
* (oldScale / this.data.scale - 1)) * this.data.scale;
this.data.left -= mutiplier * ((y - this.data.imageSize.height / 2)
* (oldScale / this.data.scale - 1)) * this.data.scale;
} else {
this.data.left += mutiplier * ((x - this.data.imageSize.width / 2)
* (oldScale / this.data.scale - 1)) * this.data.scale;
this.data.top += mutiplier * ((y - this.data.imageSize.height / 2)
* (oldScale / this.data.scale - 1)) * this.data.scale;
}
this.notify(UpdateReasons.IMAGE_ZOOMED);
}
public move(topOffset: number, leftOffset: number): void {
this.data.top += topOffset;
this.data.left += leftOffset;
this.notify(UpdateReasons.IMAGE_MOVED);
}
public fitCanvas(width: number, height: number): void {
this.data.canvasSize.height = height;
this.data.canvasSize.width = width;
this.data.imageOffset = Math.floor(Math.max(
this.data.canvasSize.height / FrameZoom.MIN,
this.data.canvasSize.width / FrameZoom.MIN,
));
this.notify(UpdateReasons.FITTED_CANVAS);
this.notify(UpdateReasons.OBJECTS_UPDATED);
}
public dragCanvas(enable: boolean): void {
if (enable && this.data.mode !== Mode.IDLE) {
throw Error(`Canvas is busy. Action: ${this.data.mode}`);
}
if (!enable && this.data.mode !== Mode.DRAG_CANVAS) {
throw Error(`Canvas is not in the drag mode. Action: ${this.data.mode}`);
}
this.data.mode = enable ? Mode.DRAG_CANVAS : Mode.IDLE;
this.notify(UpdateReasons.DRAG_CANVAS);
}
public zoomCanvas(enable: boolean): void {
if (enable && this.data.mode !== Mode.IDLE) {
throw Error(`Canvas is busy. Action: ${this.data.mode}`);
}
if (!enable && this.data.mode !== Mode.ZOOM_CANVAS) {
throw Error(`Canvas is not in the zoom mode. Action: ${this.data.mode}`);
}
this.data.mode = enable ? Mode.ZOOM_CANVAS : Mode.IDLE;
this.notify(UpdateReasons.ZOOM_CANVAS);
}
public setup(frameData: any, objectStates: any[]): void {
if (frameData.number === this.data.imageID) {
this.data.objects = objectStates;
this.notify(UpdateReasons.OBJECTS_UPDATED);
return;
}
this.data.imageID = frameData.number;
frameData.data(
(): void => {
this.data.image = null;
this.notify(UpdateReasons.IMAGE_CHANGED);
},
).then((data: HTMLImageElement): void => {
if (frameData.number !== this.data.imageID) {
// already another image
return;
}
this.data.imageSize = {
height: (frameData.height as number),
width: (frameData.width as number),
};
this.data.image = data;
this.notify(UpdateReasons.IMAGE_CHANGED);
this.data.objects = objectStates;
this.notify(UpdateReasons.OBJECTS_UPDATED);
}).catch((exception: any): void => {
throw exception;
});
}
public activate(clientID: number | null, attributeID: number | null): void {
if (this.data.mode !== Mode.IDLE && clientID !== null) {
// Exception or just return?
throw Error(`Canvas is busy. Action: ${this.data.mode}`);
}
this.data.activeElement = {
clientID,
attributeID,
};
this.notify(UpdateReasons.SHAPE_ACTIVATED);
}
public rotate(rotationAngle: number): void {
if (this.data.angle !== rotationAngle) {
this.data.angle = (360 + Math.floor((rotationAngle) / 90) * 90) % 360;
this.fit();
}
}
public focus(clientID: number, padding: number): void {
this.data.focusData = {
clientID,
padding,
};
this.notify(UpdateReasons.SHAPE_FOCUSED);
}
public fit(): void {
const { angle } = this.data;
if ((angle / 90) % 2) {
// 90, 270, ..
this.data.scale = Math.min(
this.data.canvasSize.width / this.data.imageSize.height,
this.data.canvasSize.height / this.data.imageSize.width,
);
} else {
this.data.scale = Math.min(
this.data.canvasSize.width / this.data.imageSize.width,
this.data.canvasSize.height / this.data.imageSize.height,
);
}
this.data.scale = Math.min(
Math.max(this.data.scale, FrameZoom.MIN),
FrameZoom.MAX,
);
this.data.top = (this.data.canvasSize.height / 2 - this.data.imageSize.height / 2);
this.data.left = (this.data.canvasSize.width / 2 - this.data.imageSize.width / 2);
this.notify(UpdateReasons.IMAGE_FITTED);
}
public grid(stepX: number, stepY: number): void {
this.data.gridSize = {
height: stepY,
width: stepX,
};
this.notify(UpdateReasons.GRID_UPDATED);
}
public draw(drawData: DrawData): void {
if (![Mode.IDLE, Mode.DRAW].includes(this.data.mode)) {
throw Error(`Canvas is busy. Action: ${this.data.mode}`);
}
if (drawData.enabled) {
if (this.data.drawData.enabled) {
throw new Error('Drawing has been already started');
} else if (!drawData.shapeType && !drawData.initialState) {
throw new Error('A shape type is not specified');
} else if (typeof (drawData.numberOfPoints) !== 'undefined') {
if (drawData.shapeType === 'polygon' && drawData.numberOfPoints < 3) {
throw new Error('A polygon consists of at least 3 points');
} else if (drawData.shapeType === 'polyline' && drawData.numberOfPoints < 2) {
throw new Error('A polyline consists of at least 2 points');
}
}
}
this.data.drawData = { ...drawData };
if (this.data.drawData.initialState) {
this.data.drawData.shapeType = this.data.drawData.initialState.shapeType;
}
this.notify(UpdateReasons.DRAW);
}
public split(splitData: SplitData): void {
if (![Mode.IDLE, Mode.SPLIT].includes(this.data.mode)) {
throw Error(`Canvas is busy. Action: ${this.data.mode}`);
}
if (this.data.splitData.enabled && splitData.enabled) {
return;
}
if (!this.data.splitData.enabled && !splitData.enabled) {
return;
}
this.data.splitData = { ...splitData };
this.notify(UpdateReasons.SPLIT);
}
public group(groupData: GroupData): void {
if (![Mode.IDLE, Mode.GROUP].includes(this.data.mode)) {
throw Error(`Canvas is busy. Action: ${this.data.mode}`);
}
if (this.data.groupData.enabled && groupData.enabled) {
return;
}
if (!this.data.groupData.enabled && !groupData.enabled) {
return;
}
this.data.groupData = { ...groupData };
this.notify(UpdateReasons.GROUP);
}
public merge(mergeData: MergeData): void {
if (![Mode.IDLE, Mode.MERGE].includes(this.data.mode)) {
throw Error(`Canvas is busy. Action: ${this.data.mode}`);
}
if (this.data.mergeData.enabled && mergeData.enabled) {
return;
}
if (!this.data.mergeData.enabled && !mergeData.enabled) {
return;
}
this.data.mergeData = { ...mergeData };
this.notify(UpdateReasons.MERGE);
}
public select(objectState: any): void {
this.data.selected = objectState;
this.notify(UpdateReasons.SELECT);
this.data.selected = null;
}
public cancel(): void {
this.notify(UpdateReasons.CANCEL);
}
public get geometry(): Geometry {
return {
angle: this.data.angle,
canvas: { ...this.data.canvasSize },
image: { ...this.data.imageSize },
grid: { ...this.data.gridSize },
left: this.data.left,
offset: this.data.imageOffset,
scale: this.data.scale,
top: this.data.top,
};
}
public set geometry(geometry: Geometry) {
this.data.angle = geometry.angle;
this.data.canvasSize = { ...geometry.canvas };
this.data.imageSize = { ...geometry.image };
this.data.gridSize = { ...geometry.grid };
this.data.left = geometry.left;
this.data.top = geometry.top;
this.data.imageOffset = geometry.offset;
this.data.scale = geometry.scale;
this.data.imageOffset = Math.floor(Math.max(
this.data.canvasSize.height / FrameZoom.MIN,
this.data.canvasSize.width / FrameZoom.MIN,
));
}
public get zLayer(): number | null {
return this.data.zLayer;
}
public get image(): HTMLImageElement | null {
return this.data.image;
}
public get objects(): any[] {
if (this.data.zLayer !== null) {
return this.data.objects
.filter((object: any): boolean => object.zOrder <= this.data.zLayer);
}
return this.data.objects;
}
public get gridSize(): Size {
return { ...this.data.gridSize };
}
public get focusData(): FocusData {
return { ...this.data.focusData };
}
public get activeElement(): ActiveElement {
return { ...this.data.activeElement };
}
public get drawData(): DrawData {
return { ...this.data.drawData };
}
public get mergeData(): MergeData {
return { ...this.data.mergeData };
}
public get splitData(): SplitData {
return { ...this.data.splitData };
}
public get groupData(): GroupData {
return { ...this.data.groupData };
}
public get selected(): any {
return this.data.selected;
}
public set mode(value: Mode) {
this.data.mode = value;
}
public get mode(): Mode {
return this.data.mode;
}
}
| CanvasModelImpl |
parser_php8_1_test.go | package php8_test
import (
"testing"
"github.com/VKCOM/php-parser/internal/tester"
)
func TestReadonlyModifier(t *testing.T) {
suite := tester.NewParserDumpTestSuite(t)
suite.UsePHP8()
suite.Code = `<?php
class Foo {
readonly string $a;
private readonly string $a;
private string $a;
private readonly $a = 100;
public function __construct(
readonly string $a,
private readonly string $a,
private string $a,
private readonly $a = 100,
) {}
}
`
suite.Expected = `&ast.Root{
Stmts: []ast.Vertex{
&ast.StmtClass{
Name: &ast.Identifier{
Val: []byte("Foo"),
},
Stmts: []ast.Vertex{
&ast.StmtPropertyList{
Modifiers: []ast.Vertex{
&ast.Identifier{
Val: []byte("readonly"),
},
},
Type: &ast.Name{
Parts: []ast.Vertex{
&ast.NamePart{
Val: []byte("string"),
},
},
},
Props: []ast.Vertex{
&ast.StmtProperty{
Var: &ast.ExprVariable{
Name: &ast.Identifier{
Val: []byte("$a"),
},
},
},
},
},
&ast.StmtPropertyList{
Modifiers: []ast.Vertex{
&ast.Identifier{
Val: []byte("private"),
},
&ast.Identifier{
Val: []byte("readonly"),
},
},
Type: &ast.Name{
Parts: []ast.Vertex{
&ast.NamePart{
Val: []byte("string"),
},
},
},
Props: []ast.Vertex{
&ast.StmtProperty{
Var: &ast.ExprVariable{
Name: &ast.Identifier{
Val: []byte("$a"), | },
&ast.StmtPropertyList{
Modifiers: []ast.Vertex{
&ast.Identifier{
Val: []byte("private"),
},
},
Type: &ast.Name{
Parts: []ast.Vertex{
&ast.NamePart{
Val: []byte("string"),
},
},
},
Props: []ast.Vertex{
&ast.StmtProperty{
Var: &ast.ExprVariable{
Name: &ast.Identifier{
Val: []byte("$a"),
},
},
},
},
},
&ast.StmtPropertyList{
Modifiers: []ast.Vertex{
&ast.Identifier{
Val: []byte("private"),
},
&ast.Identifier{
Val: []byte("readonly"),
},
},
Props: []ast.Vertex{
&ast.StmtProperty{
Var: &ast.ExprVariable{
Name: &ast.Identifier{
Val: []byte("$a"),
},
},
Expr: &ast.ScalarLnumber{
Val: []byte("100"),
},
},
},
},
&ast.StmtClassMethod{
Modifiers: []ast.Vertex{
&ast.Identifier{
Val: []byte("public"),
},
},
Name: &ast.Identifier{
Val: []byte("__construct"),
},
Params: []ast.Vertex{
&ast.Parameter{
Modifiers: []ast.Vertex{
&ast.Identifier{
Val: []byte("readonly"),
},
},
Type: &ast.Name{
Parts: []ast.Vertex{
&ast.NamePart{
Val: []byte("string"),
},
},
},
Var: &ast.ExprVariable{
Name: &ast.Identifier{
Val: []byte("$a"),
},
},
},
&ast.Parameter{
Modifiers: []ast.Vertex{
&ast.Identifier{
Val: []byte("private"),
},
&ast.Identifier{
Val: []byte("readonly"),
},
},
Type: &ast.Name{
Parts: []ast.Vertex{
&ast.NamePart{
Val: []byte("string"),
},
},
},
Var: &ast.ExprVariable{
Name: &ast.Identifier{
Val: []byte("$a"),
},
},
},
&ast.Parameter{
Modifiers: []ast.Vertex{
&ast.Identifier{
Val: []byte("private"),
},
},
Type: &ast.Name{
Parts: []ast.Vertex{
&ast.NamePart{
Val: []byte("string"),
},
},
},
Var: &ast.ExprVariable{
Name: &ast.Identifier{
Val: []byte("$a"),
},
},
},
&ast.Parameter{
Modifiers: []ast.Vertex{
&ast.Identifier{
Val: []byte("private"),
},
&ast.Identifier{
Val: []byte("readonly"),
},
},
Var: &ast.ExprVariable{
Name: &ast.Identifier{
Val: []byte("$a"),
},
},
DefaultValue: &ast.ScalarLnumber{
Val: []byte("100"),
},
},
},
Stmt: &ast.StmtStmtList{
Stmts: []ast.Vertex{},
},
},
},
},
},
},`
suite.Run()
}
func TestNeverType(t *testing.T) {
suite := tester.NewParserDumpTestSuite(t)
suite.UsePHP8()
suite.Code = `<?php
function f(): never {}
`
suite.Expected = `&ast.Root{
Stmts: []ast.Vertex{
&ast.StmtFunction{
Name: &ast.Identifier{
Val: []byte("f"),
},
ReturnType: &ast.Name{
Parts: []ast.Vertex{
&ast.NamePart{
Val: []byte("never"),
},
},
},
Stmts: []ast.Vertex{},
},
},
},`
suite.Run()
}
func TestEnum(t *testing.T) {
suite := tester.NewParserDumpTestSuite(t)
suite.UsePHP8()
suite.Code = `<?php
enum A {}
enum B implements Bar, Baz {
}
enum C: int implements Bar {}
`
suite.Expected = `&ast.Root{
Stmts: []ast.Vertex{
&ast.StmtEnum{
Name: &ast.Identifier{
Val: []byte("A"),
},
Stmts: []ast.Vertex{},
},
&ast.StmtEnum{
Name: &ast.Identifier{
Val: []byte("B"),
},
Implements: []ast.Vertex{
&ast.Name{
Parts: []ast.Vertex{
&ast.NamePart{
Val: []byte("Bar"),
},
},
},
&ast.Name{
Parts: []ast.Vertex{
&ast.NamePart{
Val: []byte("Baz"),
},
},
},
},
Stmts: []ast.Vertex{},
},
&ast.StmtEnum{
Name: &ast.Identifier{
Val: []byte("C"),
},
Type: &ast.Name{
Parts: []ast.Vertex{
&ast.NamePart{
Val: []byte("int"),
},
},
},
Implements: []ast.Vertex{
&ast.Name{
Parts: []ast.Vertex{
&ast.NamePart{
Val: []byte("Bar"),
},
},
},
},
Stmts: []ast.Vertex{},
},
},
},`
suite.Run()
} | },
},
},
}, |
iommu.go | /*
Copyright 2018-2021 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package iommu
import (
"fmt"
"io/ioutil"
"sigs.k8s.io/node-feature-discovery/source"
)
const Name = "iommu"
// iommuSource implements the LabelSource interface.
type iommuSource struct{}
func (s *iommuSource) Name() string { return Name }
// Singleton source instance
var (
src iommuSource
_ source.LabelSource = &src
)
// Priority method of the LabelSource interface
func (s *iommuSource) Priority() int { return 0 }
func (s *iommuSource) Discover() (source.FeatureLabels, error) {
features := source.FeatureLabels{}
// Check if any iommu devices are available
devices, err := ioutil.ReadDir(source.SysfsDir.Path("class/iommu/"))
if err != nil {
return nil, fmt.Errorf("failed to check for IOMMU support: %v", err)
}
if len(devices) > 0 {
features["enabled"] = true
}
return features, nil
}
func init() | {
source.Register(&src)
} |
|
nn.py | """Neural network operations."""
from __future__ import absolute_import as _abs
from ...expr import TupleWrapper
from . import _make
def conv2d(data,
weight,
strides=(1, 1),
padding=(0, 0),
dilation=(1, 1),
groups=1,
channels=None,
kernel_size=None,
data_layout="NCHW",
kernel_layout="OIHW",
out_layout="",
out_dtype=""):
r"""2D convolution.
This operator takes the weight as the convolution kernel
and convolves it with data to produce an output.
In the default case, where the data_layout is `NCHW`
and kernel_layout is `OIHW`, conv2d takes in
a data Tensor with shape `(batch_size, in_channels, height, width)`,
and a weight Tensor with shape `(channels, in_channels, kernel_size[0], kernel_size[1])`
to produce an output Tensor with the following rule:
.. math::
\mbox{out}[b, c, y, x] = \sum_{dy, dx, k}
\mbox{data}[b, k, \mbox{strides}[0] * y + dy, \mbox{strides}[1] * x + dx] *
\mbox{weight}[c, k, dy, dx]
Padding and dilation are applied to data and weight respectively before the computation.
This operator accepts data layout specification.
Semantically, the operator will convert the layout to the canonical layout
(`NCHW` for data and `OIHW` for weight), perform the computation,
then convert to the out_layout.
Parameters
----------
data : tvm.relay.Expr
The input data to the operator.
weight : tvm.relay.Expr
The weight expressions.
strides : tuple of int, optional
The strides of convoltution.
padding : tuple of int, optional
The padding of convolution on both sides of inputs before convolution.
dilation : tuple of int, optional
Specifies the dilation rate to be used for dilated convolution.
groups : int, optional
Number of groups for grouped convolution.
channels : int, optional
Number of output channels of this convolution.
kernel_size : tuple of int, optional
The spatial of the convolution kernel.
data_layout : str, optional
Layout of the input.
kernel_layout : str, optional
Layout of the weight.
out_layout : str, optional
Layout of the output, by default, out_layout is the same as data_layout
out_dtype : str, optional
Specifies the output data type for mixed precision conv2d.
Returns
-------
result : tvm.relay.Expr
The computed result.
"""
return _make.conv2d(data, weight, strides, padding, dilation,
groups, channels, kernel_size, data_layout,
kernel_layout, out_layout, out_dtype)
def conv2d_transpose(data,
weight,
strides=(1, 1),
padding=(0, 0),
dilation=(1, 1),
groups=1,
channels=None,
kernel_size=None,
data_layout="NCHW",
kernel_layout="OIHW",
output_padding=(0, 0),
out_dtype=""):
"""Two dimensional transposed convolution operator.
Parameters
----------
data : tvm.relay.Expr
The input data to the operator.
weight : tvm.relay.Expr
The weight expressions.
strides : Tuple[int], optional
The strides of convoltution.
padding : Tuple[int], optional
The padding of convolution on both sides of inputs.
dilation : Tuple[int], optional
Specifies the dilation rate to be used for dilated convolution.
groups : int, optional
Number of groups for grouped convolution.
data_layout : str, optional
Layout of the input.
kernel_layout : str, optional
Layout of the weight.
output_padding : Tuple[int], optional
Additional zero-padding to be added to one side of the output.
out_dtype : str, optional
Specifies the output data type for mixed precision conv2d.
Returns
-------
result : tvm.relay.Expr
The computed result.
"""
return _make.conv2d_transpose(data, weight, strides, padding, dilation,
groups, channels, kernel_size, data_layout,
kernel_layout, output_padding, out_dtype)
def softmax(data, axis=-1):
r"""Computes softmax.
.. math:: \text{softmax}(x)_i = \frac{exp(x_i)}{\sum_j exp(x_j)}
.. note::
This operator can be optimized away for inference.
Parameters
----------
data: tvm.relay.Expr
The input data to the operator.
axis: int, optional
The axis to sum over when computing softmax
Returns
-------
result : tvm.relay.Expr
The computed result.
"""
return _make.softmax(data, axis)
def | (data, axis=-1):
r"""Computes log softmax.
.. math::
\text{log_softmax}(x)_i = \log \frac{exp(x_i)}{\sum_j exp(x_j)}
.. note::
This operator can be optimized away for inference.
Parameters
----------
data: tvm.relay.Expr
The input data to the operator.
axis: int
The axis to sum over when computing softmax
Returns
-------
result : tvm.relay.Expr
The computed result.
"""
return _make.log_softmax(data, axis)
def max_pool2d(data,
pool_size=(1, 1),
strides=(1, 1),
padding=(0, 0),
layout="NCHW",
ceil_mode=False):
r"""2D maximum pooling operator.
This operator takes data as input and does 2D max value calculation
with in pool_size sized window by striding defined by stride
In the default case, where the data_layout is `NCHW`
a data Tensor with shape `(batch_size, in_channels, height, width)`,
to produce an output Tensor with the following rule:
with data of shape (b, c, h, w) and pool_size (kh, kw)
.. math::
\mbox{out}(b, c, y, x) = \max_{m=0, \ldots, kh-1} \max_{n=0, \ldots, kw-1}
\mbox{data}(b, c, \mbox{stride}[0] * y + m, \mbox{stride}[1] * x + n)
Padding is applied to data before the computation.
ceil_mode is used to take ceil or floor while computing out shape.
This operator accepts data layout specification.
Parameters
----------
data : tvm.relay.Expr
The input data to the operator.
strides : tuple of int, optional
The strides of pooling.
padding : tuple of int, optional
The padding for pooling.
layout : str, optional
Layout of the input.
ceil_mode : bool, optional
To enable or disable ceil while pooling.
Returns
-------
result : tvm.relay.Expr
The computed result.
"""
return _make.max_pool2d(data, pool_size, strides, padding,
layout, ceil_mode)
def avg_pool2d(data,
pool_size=(1, 1),
strides=(1, 1),
padding=(0, 0),
layout="NCHW",
ceil_mode=False,
count_include_pad=False):
r"""2D average pooling operator.
This operator takes data as input and does 2D average value calculation
with in pool_size sized window by striding defined by stride
In the default case, where the data_layout is `NCHW`
a data Tensor with shape `(batch_size, in_channels, height, width)`,
to produce an output Tensor with the following rule:
with data of shape (b, c, h, w), pool_size (kh, kw)
.. math::
\mbox{out}(b, c, y, x) = \frac{1}{kh * kw} \sum_{m=0}^{kh-1} \sum_{n=0}^{kw-1}
\mbox{data}(b, c, \mbox{stride}[0] * y + m, \mbox{stride}[1] * x + n)
Padding is applied to data before the computation.
ceil_mode is used to take ceil or floor while computing out shape.
count_include_pad indicates including or excluding padded input values in computation.
This operator accepts data layout specification.
Parameters
----------
data : tvm.relay.Expr
The input data to the operator.
strides : tuple of int, optional
The strides of pooling.
padding : tuple of int, optional
The padding for pooling.
layout : str, optional
Layout of the input.
ceil_mode : bool, optional
To enable or disable ceil while pooling.
count_include_pad : bool, optional
To include padding to compute the average.
Returns
-------
result : tvm.relay.Expr
The computed result.
"""
return _make.avg_pool2d(data, pool_size, strides, padding,
layout, ceil_mode, count_include_pad)
def global_max_pool2d(data,
layout="NCHW"):
r"""2D global maximum pooling operator.
This operator takes data as input and does 2D max value calculation
across each window represented by WxH.
In the default case, where the data_layout is `NCHW`
a data Tensor with shape `(batch_size, in_channels, height, width)`,
to produce an output Tensor with the following rule:
with data of shape (b, c, h, w)
.. math::
\mbox{out}(b, c, 1, 1) = \max_{m=0, \ldots, h} \max_{n=0, \ldots, w}
\mbox{data}(b, c, m, n)
Parameters
----------
data : tvm.relay.Expr
The input data to the operator.
layout : str, optional
Layout of the input.
Returns
-------
result : tvm.relay.Expr
The computed result.
"""
return _make.global_max_pool2d(data, layout)
def global_avg_pool2d(data,
layout="NCHW"):
r"""2D global average pooling operator.
This operator takes data as input and does 2D average value calculation
across each window represented by WxH.
In the default case, where the data_layout is `NCHW`
a data Tensor with shape `(batch_size, in_channels, height, width)`,
to produce an output Tensor with the following rule:
with data of shape (b, c, h, w)
.. math::
\mbox{out}(b, c, 1, 1) = \frac{1}{h * w} \sum_{m=0}^{h-1} \sum_{n=0}^{w-1}
\mbox{data}(b, c, m, n)
Parameters
----------
data : tvm.relay.Expr
The input data to the operator.
layout : str, optional
Layout of the input.
Returns
-------
result : tvm.relay.Expr
The computed result.
"""
return _make.global_avg_pool2d(data, layout)
def upsampling(data,
scale=1,
layout="NCHW",
method="NEAREST_NEIGHBOR"):
"""Upsampling.
This operator takes data as input and does 2D scaling to the given scale factor.
In the default case, where the data_layout is `NCHW`
with data of shape (n, c, h, w)
out will have a shape (n, c, h*scale, w*scale)
method indicates the algorithm to be used while calculating ghe out value
and method can be one of ("BILINEAR", "NEAREST_NEIGHBOR")
Parameters
----------
data : tvm.relay.Expr
The input data to the operator.
scale : tvm.relay.Expr
The scale factor for upsampling.
layout : str, optional
Layout of the input.
method : str, optional
Scale method to used [NEAREST_NEIGHBOR, BILINEAR].
Returns
-------
result : tvm.relay.Expr
The computed result.
"""
return _make.upsampling(data, scale, layout, method)
def batch_flatten(data):
"""BatchFlatten.
This operator flattens all the dimensions except for the batch dimension.
which results a 2D output.
For data with shape ``(d1, d2, ..., dk)``
batch_flatten(data) returns reshaped output of shape ``(d1, d2*...*dk)``.
Parameters
----------
data : tvm.relay.Expr
The input data to the operator.
Returns
-------
result : tvm.relay.Expr
The Flattened result.
"""
return _make.batch_flatten(data)
def bias_add(data, bias, axis=1):
"""add_bias operator.
Add 1D bias to the axis of data.
This function is a special case of add which allows
inference of shape of the bias from data.
Parameters
----------
data : tvm.relay.Expr
The input data to the operator.
bias : tvm.relay.Expr
The bias to be added.
axis : int, optional
The axis to add the bias.
Returns
-------
result : tvm.relay.Expr
The final result.
"""
return _make.bias_add(data, bias, axis)
def dense(data, weight, units=None):
"""Dense operator.
Applies a linear transformation
.. math::
`Y = X * W`
Parameters
----------
data : tvm.relay.Expr
The input data to the operator.
weight : tvm.relay.Expr
The weight expressions.
units : int, optional
Number of hidden units of the dense transformation.
Returns
-------
result : tvm.relay.Expr
The computed result.
"""
return _make.dense(data, weight, units)
def relu(data):
"""Rectified linear unit.
.. math::
out = max(x, 0)
Parameters
----------
data : tvm.relay.Expr
The input data
Returns
-------
result : tvm.relay.Expr
The computed result.
"""
return _make.relu(data)
def leaky_relu(data, alpha):
"""This operator takes data as input and does Leaky version
of a Rectified Linear Unit.
.. math::
`y = x > 0 ? x : alpha * x`
Parameters
----------
data : tvm.relay.Expr
The input data to the operator.
alpha : float
Slope coefficient for the negative half axis.
Returns
-------
result : tvm.relay.Expr
The computed result.
"""
return _make.leaky_relu(data, alpha)
def prelu(data, alpha, axis=1):
"""This operator takes data as input and does Leaky version
of a Rectified Linear Unit.
.. math::
`y = x > 0 ? x : alpha * x`
Parameters
----------
data : tvm.relay.Expr
The input data to the operator.
alpha : tvm.relay.Expr
Slope coefficient for the negative half axis.
axis : int, optional
Specify which shape axis the channel is specified.
Returns
-------
result : tvm.relay.Expr
The computed result.
"""
return _make.prelu(data, alpha, axis)
def pad(data,
pad_width,
pad_value=0.0):
r"""Padding
This operator takes in a tensor and pads each axis by the specified
widths using the specified value.
Parameters
----------
data: tvm.relay.Expr
The input data to the operator
pad_width: tuple of <tuple of <int>>, required
Number of values padded to the edges of each axis, in the format
of ((before_1, after_1), ..., (before_N, after_N))
pad_value: float, optional, default=0.0
The value used for padding
Returns
-------
result : tvm.relay.Expr
The computed result.
"""
return _make.pad(data, pad_width, pad_value)
def lrn(data, size=5, axis=1, bias=2, alpha=.00001, beta=0.75):
"""This operator takes data as input and does local response normalization.
Normalize the input in a local region across or within feature maps.
Each input value is divided by (data / (bias + (alpha * sum_data ^2 /size))^beta)
where n is the size of each local region, and the sum is taken over the region
centered at that value (zero padding is added where necessary).
.. math::
(data / (bias + (alpha * sum_data ^2 /size))^beta)
Parameters
----------
data : tvm.relay.Expr
The input data to the operator.
size : int, optional
The size of the local region to be considered for normalization.
axis : int, optional
Input data layout channel axis. Default value is 1 for NCHW format
bias : float, optional
The offset parameter to avoid dividing by 0.
alpha : float, optional
The scaling parameter.
beta : float, optional
The exponent parameter.
Returns
-------
result : tvm.relay.Expr
The computed result.
"""
return _make.lrn(data, size, axis, alpha, beta, bias)
def l2_normalize(data, eps, axis=None):
"""Perform L2 normalization on the input data
.. math::
y(i, j) = x(i, j) / sqrt(max(sum(x^2), eps))
Parameters
----------
data : tvm.relay.Expr
The input data to the operator.
eps : float
epsilon value
axis : list of int, optional
axis over the normalization applied
Returns
-------
result : tvm.relay.Expr
The computed result.
"""
return _make.l2_normalize(data, eps, axis)
def dropout(data, rate=0.5):
"""Applies the dropout operation to the input array.
During training, each element of the input is set to zero with
probability ``p``. The whole array is rescaled by ``1/(1-p)``
to keep the expected sum of the input unchanged.
Parameters
----------
data : tvm.relay.Expr
The input data to the operator.
rate : float, optional (default=0.5)
The probability for an element to be reset to 0.
Returns
-------
result : tvm.relay.Expr
The result of dropout
"""
result = _make.dropout(data, rate)
return TupleWrapper(result, 2)[0]
def batch_norm(data,
gamma,
beta,
moving_mean,
moving_var,
axis=1,
epsilon=1e-5,
center=True,
scale=True):
r"""
Batch normalization layer (Ioffe and Szegedy, 2014).
Normalizes the input at each batch, i.e. applies a transformation
that maintains the mean activation close to 0 and the activation
standard deviation close to 1.
.. math::
data\_mean[i] = mean(data[:,i,:,...]) \\
data\_var[i] = var(data[:,i,:,...])
Then compute the normalized output, which has the same shape as input, as following:
.. math::
out[:,i,:,...] = \frac{data[:,i,:,...] - data\_mean[i]}{\sqrt{data\_var[i]+\epsilon}}
* gamma[i] + beta[i]
Both *mean* and *var* returns a scalar by treating the input as a vector.
Assume the input has size *k* on axis 1, then both ``gamma`` and ``beta``
have shape *(k,)*.
Besides the inputs and the outputs, this operator accepts two auxiliary
states, ``moving_mean`` and ``moving_var``, which are *k*-length
vectors. They are global statistics for the whole dataset, which are updated by::
moving_mean = moving_mean * momentum + data_mean * (1 - momentum)
moving_var = moving_var * momentum + data_var * (1 - momentum)
The parameter ``axis`` specifies which axis of the input shape denotes
the 'channel' (separately normalized groups). The default is 1.
Specifying -1 sets the channel axis to be the last item in the input shape.
.. note::
This operator can be optimized away for inference.
Parameters
----------
data : tvm.relay.Expr
Input to which batch_norm will be applied.
gamma : tvm.relay.Expr
The gamma scale factor.
beta : tvm.relay.Expr
The beta offset factor.
moving_mean : tvm.relay.Expr
Running mean of input,
moving_var : tvm.relay.Expr
Running variance of input.
axis : int, optional, default=1
Specify along which shape axis the channel is specified.
epsilon : double, optional, default=1e-5
Small float added to variance to avoid diving by zero.
center : boolean, optional, default=True
If True, add offset of beta to normalized tensor, If False,
beta is ignored.
scale : boolean, optional, default=True
If true, multiply by gamma. If False, gamma is not used.
When the next layer is piecewise linear (also e.g. nn.relu),
this can be disabled since the scaling will be done by the next layer.
Returns
-------
result : relay.Tuple([tvm.relay.Expr, tvm.relay.Expr, tvm.relay.Expr])
Tuple of normed data (same shape as input),
new running mean (k-length vector),
and new running variance (k-length vector)
"""
result = _make.batch_norm(data,
gamma,
beta,
moving_mean,
moving_var,
axis,
epsilon,
center,
scale)
return TupleWrapper(result, 3)
def contrib_conv2d_winograd_without_weight_transform(data,
weight,
tile_size,
strides=(1, 1),
padding=(0, 0),
dilation=(1, 1),
groups=1,
channels=None,
kernel_size=None,
data_layout="NCHW",
kernel_layout="OIHW",
out_layout="",
out_dtype=""):
r"""2D convolution with winograd algorithm.
The basic parameters are the same as the ones in vanilla conv2d.
It assumes the weight is pre-transformed by nn.contrib_conv2d_winograd_weight_transform
Parameters
----------
data : tvm.relay.Expr
The input data to the operator.
weight : tvm.relay.Expr
The weight expressions.
tile_size : int
The Tile size of winograd. E.g. 2 for F(2x2, 3x3) and 4 for F(4x4, 3x3)
strides : tuple of int, optional
The strides of convoltution.
padding : tuple of int, optional
The padding of convolution on both sides of inputs before convolution.
dilation : tuple of int, optional
Specifies the dilation rate to be used for dilated convolution.
groups : int, optional
Number of groups for grouped convolution.
channels : int, optional
Number of output channels of this convolution.
kernel_size : tuple of int, optional
The spatial of the convolution kernel.
data_layout : str, optional
Layout of the input.
kernel_layout : str, optional
Layout of the weight.
out_layout : str, optional
Layout of the output, by default, out_layout is the same as data_layout
out_dtype : str, optional
Specifies the output data type for mixed precision conv2d.
Returns
-------
result : tvm.relay.Expr
The computed result.
"""
return _make.contrib_conv2d_winograd_without_weight_transform(
data, weight, tile_size, strides, padding, dilation,
groups, channels, kernel_size, data_layout,
kernel_layout, out_layout, out_dtype)
def contrib_conv2d_winograd_weight_transform(weight,
tile_size):
r"""Weight Transformation part for 2D convolution with winograd algorithm.
We separate this as a single op to enable pre-compute for inference.
Use this together with nn.contrib_conv2d_winograd_without_weight_transform
Parameters
----------
weight : tvm.relay.Expr
The weight expressions.
tile_size : int
The Tile size of winograd. E.g. 2 for F(2x2, 3x3) and 4 for F(4x4, 3x3)
Returns
-------
result : tvm.relay.Expr
The computed result.
"""
return _make.contrib_conv2d_winograd_weight_transform(weight, tile_size)
| log_softmax |
reg_masternode_dlg.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Author: Bertrand256
# Created on: 2018-11
import base64
import json
import logging
import time
from collections import namedtuple
from enum import Enum
from functools import partial
from typing import List, Union, Callable
import ipaddress
from PyQt5 import QtWidgets, QtGui
from PyQt5.QtCore import pyqtSlot, Qt, QTimerEvent, QTimer
from PyQt5.QtGui import QPalette
from PyQt5.QtWidgets import QDialog, QApplication, QToolButton, QAction, QWidget
from bitcoinrpc.authproxy import EncodeDecimal, JSONRPCException
import app_cache
import app_defs
import hw_intf
from app_config import MasternodeConfig, AppConfig, InputKeyType
from app_defs import FEE_DUFF_PER_BYTE
from bip44_wallet import Bip44Wallet, BreakFetchTransactionsException, find_wallet_addresses
from common import CancelException
from crown_utils import generate_bls_privkey, generate_wif_privkey, validate_address, wif_privkey_to_address, \
validate_wif_privkey, bls_privkey_to_pubkey
from crownd_intf import CrowndInterface
from thread_fun_dlg import CtrlObject
from ui import ui_reg_masternode_dlg
from wallet_common import Bip44AccountType, Bip44AddressType
from wnd_utils import WndUtils
STEP_MN_DATA = 1
STEP_DASHD_TYPE = 2
STEP_AUTOMATIC_RPC_NODE = 3
STEP_MANUAL_OWN_NODE = 4
STEP_SUMMARY = 5
NODE_TYPE_PUBLIC_RPC = 1
NODE_TYPE_OWN = 2
CACHE_ITEM_SHOW_FIELD_HINTS = 'RegMasternodeDlg_ShowFieldHints'
log = logging.getLogger('cmt.reg_masternode')
class RegMasternodeDlg(QDialog, ui_reg_masternode_dlg.Ui_RegMasternodeDlg, WndUtils):
def __init__(self, main_dlg, config: AppConfig, crownd_intf: CrowndInterface, masternode: MasternodeConfig,
on_proregtx_success_callback: Callable):
QDialog.__init__(self, main_dlg)
ui_reg_masternode_dlg.Ui_RegMasternodeDlg.__init__(self)
WndUtils.__init__(self, main_dlg.app_config)
self.main_dlg = main_dlg
self.masternode = masternode
self.app_config = config
self.crownd_intf:CrowndInterface = crownd_intf
self.on_proregtx_success_callback = on_proregtx_success_callback
self.style = '<style>.info{color:darkblue} .warning{color:#ff6600} .error{background-color:red;color:white}</style>'
self.operator_reward_saved = None
self.owner_pkey_generated: str = None
self.operator_pkey_generated: str = None
self.voting_pkey_generated: str = None
self.current_step = STEP_MN_DATA
self.step_stack: List[int] = []
self.proregtx_prepare_thread_ref = None
self.deterministic_mns_spork_active = True
self.dmn_collateral_tx: str = None
self.dmn_collateral_tx_index: int = None
self.dmn_collateral_tx_address: str = None
self.dmn_collateral_tx_address_path: str = None
self.dmn_ip: str = None
self.dmn_tcp_port: int = None
self.dmn_owner_payout_addr: str = None
self.dmn_operator_reward: int = 0
self.dmn_owner_privkey: str = None
self.dmn_owner_address: str = None
self.dmn_operator_privkey: str = None
self.dmn_operator_pubkey: str = None
self.dmn_voting_privkey: str = None
self.dmn_voting_address: str = None
self.dmn_owner_key_type = InputKeyType.PRIVATE
self.dmn_operator_key_type = InputKeyType.PRIVATE
self.dmn_voting_key_type = InputKeyType.PRIVATE
self.collateral_validation_err_msg = ''
self.ip_port_validation_err_msg = ''
self.payout_address_validation_err_msg = ''
self.operator_reward_validation_err_msg = ''
self.owner_key_validation_err_msg = ''
self.operator_key_validation_err_msg = ''
self.voting_key_validation_err_msg = ''
self.dmn_reg_tx_hash = ''
self.manual_signed_message = False
self.last_manual_prepare_string: str = None
self.wait_for_confirmation_timer_id = None
self.show_field_hinds = True
self.summary_info = []
if self.masternode:
self.dmn_collateral_tx_address_path = self.masternode.collateralBip32Path
self.bip44_wallet = Bip44Wallet(self.app_config.hw_coin_name, self.main_dlg.hw_session,
self.app_config.db_intf, self.crownd_intf, self.app_config.crown_network)
self.finishing = False
self.setupUi()
def setupUi(self):
ui_reg_masternode_dlg.Ui_RegMasternodeDlg.setupUi(self, self)
self.closeEvent = self.closeEvent
self.restore_cache_settings()
self.edtCollateralTx.setText(self.masternode.collateralTx)
if self.masternode.collateralTx:
sz = self.edtCollateralTx.fontMetrics().size(0, self.masternode.collateralTx + '000')
self.edtCollateralTx.setMinimumWidth(sz.width())
self.edtCollateralIndex.setText(self.masternode.collateralTxIndex)
self.edtIP.setText(self.masternode.ip)
self.edtPort.setText(self.masternode.port)
self.edtPayoutAddress.setText(self.masternode.collateralAddress)
self.chbWholeMNReward.setChecked(True)
self.lblProtxSummary2.linkActivated.connect(self.save_summary_info)
self.lblCollateralTxMsg.sizePolicy().setHeightForWidth(True)
self.prepare_keys()
self.btnClose.hide()
self.setIcon(self.btnManualFundingAddressPaste, '[email protected]')
self.setIcon(self.btnManualProtxPrepareCopy, '[email protected]')
self.setIcon(self.btnManualProtxPrepareResultPaste, '[email protected]')
self.setIcon(self.btnManualProtxSubmitCopy, '[email protected]')
self.setIcon(self.btnManualTxHashPaste, '[email protected]')
self.setIcon(self.btnSummaryDMNOperatorKeyCopy, '[email protected]')
self.edtSummaryDMNOperatorKey.setStyleSheet("QLineEdit{background-color: white} "
"QLineEdit:read-only{background-color: white}")
doc_url = app_defs.get_doc_url('deterministic-mn-migration.md')
if doc_url:
self.lblDocumentation.setText(f'<a href="{doc_url}">Documentation</a>')
self.update_dynamic_labels()
self.update_ctrls_visibility()
self.update_ctrl_state()
self.update_step_tab_ui()
self.update_show_hints_label()
self.minimize_dialog_height()
def closeEvent(self, event):
self.finishing = True
if self.wait_for_confirmation_timer_id is not None:
self.killTimer(self.wait_for_confirmation_timer_id)
self.save_cache_settings()
def restore_cache_settings(self):
app_cache.restore_window_size(self)
self.show_field_hinds = app_cache.get_value(CACHE_ITEM_SHOW_FIELD_HINTS, True, bool)
def save_cache_settings(self):
app_cache.save_window_size(self)
app_cache.set_value(CACHE_ITEM_SHOW_FIELD_HINTS, self.show_field_hinds)
def minimize_dialog_height(self):
def set():
self.adjustSize()
self.tm_resize_dlg = QTimer(self)
self.tm_resize_dlg.setSingleShot(True)
self.tm_resize_dlg.singleShot(100, set)
def update_dynamic_labels(self):
def style_to_color(style: str) -> str:
if style == 'hl1':
color = 'color:#00802b'
else:
color = ''
return color
def get_label_text(prefix:str, key_type: str, tooltip_anchor: str, style: str):
lbl = prefix + ' ' + \
{'privkey': 'private key', 'pubkey': 'public key', 'address': 'Crown address'}.get(key_type, '???')
change_mode = f'(<a href="{tooltip_anchor}">use {tooltip_anchor}</a>)'
return f'<table style="float:right;{style_to_color(style)}"><tr><td><b>{lbl}</b></td><td>{change_mode}</td></tr></table>'
if self.masternode:
if self.dmn_owner_key_type == InputKeyType.PRIVATE:
key_type, tooltip_anchor, placeholder_text = ('privkey', 'address', 'Enter the owner private key')
style = ''
else:
key_type, tooltip_anchor, placeholder_text = ('address', 'privkey', 'Enter the owner Crown address')
style = 'hl1'
self.lblOwnerKey.setText(get_label_text('Owner', key_type, tooltip_anchor, style))
self.edtOwnerKey.setPlaceholderText(placeholder_text)
if self.dmn_operator_key_type == InputKeyType.PRIVATE:
key_type, tooltip_anchor, placeholder_text = ('privkey', 'pubkey', 'Enter the operator private key')
style = ''
else:
key_type, tooltip_anchor, placeholder_text = ('pubkey', 'privkey', 'Enter the operator public key')
style = 'hl1'
self.lblOperatorKey.setText(get_label_text('Operator', key_type, tooltip_anchor, style))
self.edtOperatorKey.setPlaceholderText(placeholder_text)
if self.dmn_voting_key_type == InputKeyType.PRIVATE:
key_type, tooltip_anchor, placeholder_text = ('privkey','address', 'Enter the voting private key')
style = ''
else:
key_type, tooltip_anchor, placeholder_text = ('address', 'privkey', 'Enter the voting Crown address')
style = 'hl1'
self.lblVotingKey.setText(get_label_text('Voting', key_type, tooltip_anchor, style))
self.edtVotingKey.setPlaceholderText(placeholder_text)
@pyqtSlot(str)
def on_lblOwnerKey_linkActivated(self, link):
if self.dmn_owner_key_type == InputKeyType.PRIVATE:
self.dmn_owner_key_type = InputKeyType.PUBLIC
self.dmn_owner_privkey = self.edtOwnerKey.text()
self.edtOwnerKey.setText(self.dmn_owner_address)
else:
self.dmn_owner_key_type = InputKeyType.PRIVATE
self.dmn_owner_address = self.edtOwnerKey.text()
self.edtOwnerKey.setText(self.dmn_owner_privkey)
self.update_dynamic_labels()
self.update_ctrls_visibility()
self.upd_owner_key_info(False)
@pyqtSlot(str)
def on_lblOperatorKey_linkActivated(self, link):
if self.dmn_operator_key_type == InputKeyType.PRIVATE:
self.dmn_operator_key_type = InputKeyType.PUBLIC
self.dmn_operator_privkey = self.edtOperatorKey.text()
self.edtOperatorKey.setText(self.dmn_operator_pubkey)
else:
self.dmn_operator_key_type = InputKeyType.PRIVATE
self.dmn_operator_pubkey = self.edtOperatorKey.text()
self.edtOperatorKey.setText(self.dmn_operator_privkey)
self.update_dynamic_labels()
self.update_ctrls_visibility()
self.upd_operator_key_info(False)
@pyqtSlot(str)
def on_lblVotingKey_linkActivated(self, link):
if self.dmn_voting_key_type == InputKeyType.PRIVATE:
self.dmn_voting_key_type = InputKeyType.PUBLIC
self.dmn_voting_privkey = self.edtVotingKey.text()
self.edtVotingKey.setText(self.dmn_voting_address)
else:
self.dmn_voting_key_type = InputKeyType.PRIVATE
self.dmn_voting_address = self.edtVotingKey.text()
self.edtVotingKey.setText(self.dmn_voting_privkey)
self.update_dynamic_labels()
self.update_ctrls_visibility()
self.upd_voting_key_info(False)
@pyqtSlot(str)
def on_lblOwnerKey_linkHovered(self, link):
if link == 'address':
tt = 'Change input type to Crown address'
else:
tt = 'Change input type to private key'
self.lblOwnerKey.setToolTip(tt)
@pyqtSlot(str)
def on_lblOperatorKey_linkHovered(self, link):
if link == 'pubkey':
tt = 'Change input type to public key'
else:
tt = 'Change input type to private key'
self.lblOperatorKey.setToolTip(tt)
@pyqtSlot(str)
def on_lblVotingKey_linkHovered(self, link):
if link == 'address':
tt = 'Change input type to Crown address'
else:
tt = 'Change input type to private key'
self.lblVotingKey.setToolTip(tt)
def prepare_keys(self):
gen_owner = False
gen_operator = False
gen_voting = False
# if any of the owner/operator/voting key used in the configuration is the same as the corresponding
# key shown in the blockchain, replace that key by a new one
found_protx = False
protx_state = {}
try:
for protx in self.crownd_intf.protx('list', 'registered', True):
protx_state = protx.get('state')
if (protx_state and protx_state.get('service') == self.masternode.ip + ':' + self.masternode.port) or \
(protx.get('collateralHash') == self.masternode.collateralTx and
str(protx.get('collateralIndex')) == str(self.masternode.collateralTxIndex)):
found_protx = True
break
except Exception as e:
pass
if found_protx:
if self.masternode.get_dmn_owner_public_address(self.app_config.crown_network) == \
protx_state.get('ownerAddress'):
gen_owner = True
if self.masternode.get_dmn_operator_pubkey() == protx_state.get('pubKeyOperator'):
gen_operator = True
if self.masternode.get_dmn_voting_public_address(self.app_config.crown_network) == \
protx_state.get('votingAddress'):
gen_voting = True
if (self.masternode.dmn_owner_key_type == InputKeyType.PRIVATE and
not self.masternode.dmn_owner_private_key) or \
(self.masternode.dmn_owner_key_type == InputKeyType.PUBLIC and
not self.masternode.dmn_owner_address):
gen_owner = True
if (self.masternode.dmn_operator_key_type == InputKeyType.PRIVATE and
not self.masternode.dmn_operator_private_key) or \
(self.masternode.dmn_operator_key_type == InputKeyType.PUBLIC and
not self.masternode.dmn_operator_public_key):
gen_operator = True
if (self.masternode.dmn_voting_key_type == InputKeyType.PRIVATE and
not self.masternode.dmn_voting_private_key) or \
(self.masternode.dmn_voting_key_type == InputKeyType.PUBLIC and
not self.masternode.dmn_voting_address):
gen_voting = True
if gen_owner:
self.owner_pkey_generated = generate_wif_privkey(self.app_config.crown_network, compressed=True)
self.edtOwnerKey.setText(self.owner_pkey_generated)
else:
if self.masternode.dmn_owner_key_type == InputKeyType.PRIVATE:
self.edtOwnerKey.setText(self.masternode.dmn_owner_private_key)
else:
self.edtOwnerKey.setText(self.masternode.dmn_owner_address)
self.dmn_owner_key_type = self.masternode.dmn_owner_key_type
if gen_operator:
try:
self.operator_pkey_generated = generate_bls_privkey()
self.edtOperatorKey.setText(self.operator_pkey_generated)
except Exception as e:
self.errorMsg(str(e))
else:
if self.masternode.dmn_operator_key_type == InputKeyType.PRIVATE:
self.edtOperatorKey.setText(self.masternode.dmn_operator_private_key)
else:
self.edtOperatorKey.setText(self.masternode.dmn_operator_public_key)
self.dmn_operator_key_type = self.masternode.dmn_operator_key_type
if self.deterministic_mns_spork_active:
if gen_voting:
self.voting_pkey_generated = generate_wif_privkey(self.app_config.crown_network, compressed=True)
self.edtVotingKey.setText(self.voting_pkey_generated)
else:
if self.dmn_voting_key_type == InputKeyType.PRIVATE:
self.edtVotingKey.setText(self.masternode.dmn_voting_private_key)
else:
self.edtVotingKey.setText(self.masternode.dmn_voting_address)
@pyqtSlot(bool)
def on_btnCancel_clicked(self):
self.close()
@pyqtSlot(bool)
def on_btnClose_clicked(self):
self.close()
@pyqtSlot(bool)
def on_btnGenerateOwnerKey_clicked(self, active):
k = generate_wif_privkey(self.app_config.crown_network, compressed=True)
self.edtOwnerKey.setText(k)
self.edtOwnerKey.repaint()
@pyqtSlot(bool)
def on_btnGenerateOperatorKey_clicked(self, active):
self.edtOperatorKey.setText(generate_bls_privkey())
self.edtOperatorKey.repaint() # qt 5.11.3 has issue with automatic repainting after setText on mac
@pyqtSlot(bool)
def on_btnGenerateVotingKey_clicked(self, active):
k = generate_wif_privkey(self.app_config.crown_network, compressed=True)
self.edtVotingKey.setText(k)
self.edtVotingKey.repaint()
def set_ctrl_message(self, control, message: str, style: str):
if message:
control.setText(f'{self.style}<span class="{style}">{message}</span>')
control.setVisible(True)
# control.repaint()
else:
control.setVisible(False)
def update_ctrls_visibility(self):
if not self.deterministic_mns_spork_active:
# hide controls related to the voting key - if spork 15 is not active, voting key has to be the same
# as the owner key
self.lblVotingMsg.hide()
self.lblVotingKey.hide()
self.edtVotingKey.hide()
self.btnGenerateVotingKey.hide()
else:
self.btnGenerateVotingKey.setVisible(self.dmn_voting_key_type == InputKeyType.PRIVATE)
self.btnGenerateOwnerKey.setVisible(self.dmn_owner_key_type == InputKeyType.PRIVATE)
self.btnGenerateOperatorKey.setVisible(self.dmn_operator_key_type == InputKeyType.PRIVATE)
def update_fields_info(self, show_invalid_data_msg: bool):
"""
:param show_data_invalid_msg: if the argument is true and the data is invalid, an error message is shown
below the control; the argument is set to True if before moving to the next step there are some errors
found in the data provided by the user.
"""
self.upd_collateral_tx_info(show_invalid_data_msg)
self.upd_ip_info(show_invalid_data_msg)
self.upd_payout_addr_info(show_invalid_data_msg)
self.upd_oper_reward_info(show_invalid_data_msg)
self.upd_owner_key_info(show_invalid_data_msg)
self.upd_operator_key_info(show_invalid_data_msg)
self.upd_voting_key_info(show_invalid_data_msg)
def upd_collateral_tx_info(self, show_invalid_data_msg: bool):
"""
:param show_data_invalid_msg: if the argument is true and the data is invalid, an error message is shown
below the control; the argument is set to True if before moving to the next step there are some errors
found in the data provided by the user.
"""
msg = ''
style = 'info'
if show_invalid_data_msg and self.collateral_validation_err_msg:
msg = self.collateral_validation_err_msg
style = 'error'
self.set_ctrl_message(self.lblCollateralTxMsg, msg, style)
def upd_ip_info(self, show_invalid_data_msg: bool):
"""
:param show_data_invalid_msg: if the argument is true and the data is invalid, an error message is shown
below the control; the argument is set to True if before moving to the next step there are some errors
found in the data provided by the user.
"""
msg = ''
style = ''
if show_invalid_data_msg and self.ip_port_validation_err_msg:
msg = self.ip_port_validation_err_msg
style = 'error'
else:
if self.show_field_hinds:
if self.edtIP.text().strip():
msg = 'You can leave the IP address and port fields empty if you want to delegate the operator ' \
'role to an external entity and you don\'t know their values in advance.'
style = 'info'
else:
msg = 'If don\'t set the IP address and port fields, the masternode operator will ' \
'have to issue a ProUpServTx transaction using Crown wallet.'
style = 'warning'
self.set_ctrl_message(self.lblIPMsg, msg, style)
def upd_payout_addr_info(self, show_invalid_data_msg: bool):
msg = ''
style = ''
if show_invalid_data_msg and self.payout_address_validation_err_msg:
msg = self.payout_address_validation_err_msg
style = 'error'
else:
if self.show_field_hinds:
msg = 'The owner\'s payout address can be set to any valid Crown address - it no longer ' \
'has to be the same as the collateral address.'
style = 'info'
self.set_ctrl_message(self.lblPayoutMsg, msg, style)
def upd_oper_reward_info(self, show_invalid_data_msg: bool):
msg = ''
style = ''
if show_invalid_data_msg and self.operator_reward_validation_err_msg:
msg = self.operator_reward_validation_err_msg
style = 'error'
else:
if self.show_field_hinds:
if self.chbWholeMNReward.isChecked():
msg = 'Here you can specify how much of the masternode earnings will go to the ' \
'masternode operator.'
style = 'info'
else:
msg = 'The masternode operator will have to specify his reward payee address in a ProUpServTx ' \
'transaction, otherwise the full reward will go to the masternode owner.'
style = 'warning'
self.set_ctrl_message(self.lblOperatorRewardMsg, msg, style)
def upd_owner_key_info(self, show_invalid_data_msg: bool):
msg = ''
style = ''
if show_invalid_data_msg and self.owner_key_validation_err_msg:
msg = self.owner_key_validation_err_msg
style = 'error'
else:
if self.show_field_hinds:
if self.dmn_owner_key_type == InputKeyType.PRIVATE:
if self.edtOwnerKey.text().strip() == self.owner_pkey_generated:
msg = 'This is an automatically generated owner private key. You can enter your own or ' \
'generate a new one by pressing the button on the right.'
elif not self.edtOwnerKey.text().strip():
msg = 'Enter the owner private key or generate a new one by clicking the button on the right.'
style = 'info'
else:
msg = 'You can use Crown address if the related private key is stored elsewhere, eg in ' \
'the Crown Core wallet.<br><span class="warning">Note, that if you provide an address ' \
'instead of a private key, you will not be able to publish ProRegTx ' \
'transaction through public RPC nodes in the next steps.</span>'
style = 'info'
self.set_ctrl_message(self.lblOwnerMsg, msg, style)
def upd_operator_key_info(self, show_invalid_data_msg: bool):
msg = ''
style = ''
if show_invalid_data_msg and self.operator_key_validation_err_msg:
msg = self.operator_key_validation_err_msg
style = 'error'
else:
if self.show_field_hinds:
if self.dmn_operator_key_type == InputKeyType.PRIVATE:
if self.edtOperatorKey.text().strip() == self.operator_pkey_generated:
msg = 'This is an automatically generated operator BLS private key. You can enter your ' \
'own or generate a new one by pressing the button on the right.'
elif not self.edtOperatorKey.text().strip():
msg = 'Enter the operator private key or generate a new one by clicking the button on ' \
'the right.'
style = 'info'
else:
msg = 'You can use public key if your masternode is managed by a separate entity (operator) ' \
'that controls the related private key or if you prefer to keep the private key outside ' \
'the program. If necessary, you can revoke this key by sending a new ProRegTx ' \
'transaction with a new operator key.'
style = 'info'
self.set_ctrl_message(self.lblOperatorMsg, msg, style)
def upd_voting_key_info(self, show_invalid_data_msg: bool):
msg = ''
style = ''
if self.deterministic_mns_spork_active:
if show_invalid_data_msg and self.voting_key_validation_err_msg:
msg = self.voting_key_validation_err_msg
style = 'error'
else:
if self.show_field_hinds:
if self.dmn_voting_key_type == InputKeyType.PRIVATE:
if self.edtVotingKey.text().strip() == self.voting_pkey_generated:
msg = 'This is an automatically generated private key for voting. You can enter your own or ' \
'generate a new one by pressing the button on the right.'
elif not self.edtVotingKey.text().strip():
msg = 'Enter the private key for voting or generate a new one by clicking the button on ' \
'the right.'
style = 'info'
else:
msg = 'You can use Crown address if the related private key is stored elsewhere, eg in ' \
'the Crown Core wallet.<br><span class="warning">Note, that providing an address instead of ' \
'a private key will prevent you from voting on proposals in this program.</span>'
style = 'info'
self.set_ctrl_message(self.lblVotingMsg, msg, style)
def get_crown_node_type(self):
if self.rbCMTCrownNodeType.isChecked():
return NODE_TYPE_PUBLIC_RPC
elif self.rbOwnCrownNodeType.isChecked():
return NODE_TYPE_OWN
else:
return None
def upd_node_type_info(self):
nt = self.get_crown_node_type()
msg = ''
if nt is None:
msg = 'DIP-3 masternode registration involves sending a special transaction via the v0.13 Crown node ' \
'(eg Crown-Qt). <b>Note, that this requires incurring a certain transaction fee, as with any ' \
'other ("normal") transaction.</b>'
elif nt == NODE_TYPE_PUBLIC_RPC:
msg = 'The ProRegTx transaction will be processed via the remote RPC node stored in the app configuration.' \
'<br><br>' \
'<b>Note 1:</b> this operation will involve signing transaction data with your <span style="color:red">owner key on the remote node</span>, ' \
'so use this method only if you trust the operator of that node (nodes <i>alice(luna, suzy).crown-masternode-tool.org</i> are maintained by the author of this application).<br><br>' \
'<b>Note 2:</b> if the operation fails (e.g. due to a lack of funds), choose the manual method ' \
'using your own Crown wallet.'
elif nt == NODE_TYPE_OWN:
msg = 'A Crown Core wallet (v0.13) with sufficient funds to cover transaction fees is required to ' \
'complete the next steps.'
self.lblCrownNodeTypeMessage.setText(msg)
def update_ctrl_state(self):
self.edtOperatorReward.setDisabled(self.chbWholeMNReward.isChecked())
@pyqtSlot(str)
def on_edtCollateralTx_textChanged(self, text):
self.upd_collateral_tx_info(False)
@pyqtSlot(str)
def on_edtCollateralIndex_textChanged(self, text):
self.upd_collateral_tx_info(False)
@pyqtSlot(str)
def on_edtIP_textChanged(self, text):
self.upd_ip_info(False)
@pyqtSlot(str)
def on_edtPayoutAddress_textChanged(self, text):
self.upd_payout_addr_info(False)
@pyqtSlot(bool)
def on_chbWholeMNReward_toggled(self, checked):
if checked:
self.operator_reward_saved = self.edtOperatorReward.value()
self.edtOperatorReward.setValue(0.0)
else:
if not self.operator_reward_saved is None:
self.edtOperatorReward.setValue(self.operator_reward_saved)
self.update_ctrl_state()
self.upd_oper_reward_info(False)
@pyqtSlot(str)
def on_edtOwnerKey_textChanged(self, text):
self.upd_owner_key_info(False)
@pyqtSlot(str)
def on_edtOperatorKey_textChanged(self, text):
self.upd_operator_key_info(False)
@pyqtSlot(str)
def on_edtVotingKey_textChanged(self, text):
self.upd_voting_key_info(False)
@pyqtSlot(str)
def save_summary_info(self, link: str):
file_name = WndUtils.save_file_query(self.main_dlg, self.app_config,
'Enter the file name',
filter="TXT files (*.txt);;All Files (*)")
if file_name:
with open(file_name, 'wt') as fptr:
for l in self.summary_info:
lbl, val = l.split('\t')
fptr.write(f'{lbl}:\t{val}\n')
def update_step_tab_ui(self):
def show_hide_tabs(tab_idx_to_show: int):
self.edtManualProtxPrepare.setVisible(tab_idx_to_show == 3)
self.edtManualProtxPrepareResult.setVisible(tab_idx_to_show == 3)
self.edtManualProtxSubmit.setVisible(tab_idx_to_show == 3)
pass
self.btnContinue.setEnabled(False)
if self.current_step == STEP_MN_DATA:
self.stackedWidget.setCurrentIndex(0)
self.update_fields_info(False)
self.btnContinue.show()
self.btnContinue.setEnabled(True)
self.btnCancel.setEnabled(True)
elif self.current_step == STEP_DASHD_TYPE:
self.stackedWidget.setCurrentIndex(1)
self.upd_node_type_info()
self.btnContinue.setEnabled(True)
self.btnContinue.show()
self.btnCancel.setEnabled(True)
elif self.current_step == STEP_AUTOMATIC_RPC_NODE:
self.stackedWidget.setCurrentIndex(2)
self.upd_node_type_info()
elif self.current_step == STEP_MANUAL_OWN_NODE:
self.stackedWidget.setCurrentIndex(3)
self.upd_node_type_info()
self.btnContinue.setEnabled(True)
elif self.current_step == STEP_SUMMARY:
self.stackedWidget.setCurrentIndex(4)
if self.dmn_owner_key_type == InputKeyType.PRIVATE:
owner_privkey = self.dmn_owner_privkey
else:
owner_privkey = '<not available>'
if self.dmn_operator_key_type == InputKeyType.PRIVATE:
operator_privkey = self.dmn_operator_privkey
else:
operator_privkey = '<not available>'
if self.dmn_voting_key_type == InputKeyType.PRIVATE:
voting_privkey = self.dmn_voting_privkey
else:
voting_privkey = '<not available>'
self.summary_info = \
[f'Network address\t{self.dmn_ip}:{self.dmn_tcp_port}',
f'Payout address\t{self.dmn_owner_payout_addr}',
f'Owner private key\t{owner_privkey}',
f'Owner public address\t{self.dmn_owner_address}',
f'Operator private key\t{operator_privkey}',
f'Operator public key\t{self.dmn_operator_pubkey}',
f'Voting private key\t{voting_privkey}',
f'Voting public address\t{self.dmn_voting_address}',
f'Protx hash\t{self.dmn_reg_tx_hash}']
text = '<table>'
for l in self.summary_info:
lbl, val = l.split('\t')
text += f'<tr><td style="white-space: nowrap"><b>{lbl}:</b> </td><td>{val}</td></tr>'
text += '</table>'
self.edtProtxSummary.setText(text)
self.edtProtxSummary.show()
self.lblProtxSummary2.show()
if self.dmn_operator_key_type == InputKeyType.PRIVATE:
operator_message = '<b><span style="color:red">One more thing... <span></b>copy the following ' \
'line to the <code>crown.conf</code> file on your masternode server ' \
'(and restart <i>crownd</i>) or pass it to the masternode operator:'
else:
operator_message = '<b><span style="color:red">One more thing... <span></b>copy the following ' \
'line to the <code>crown.conf</code> file on your masternode server, replacing ' \
'"<your-operator-bls-private-key>" with the appropriate value or ask the operator ' \
'for it:'
self.lblProtxSummary3.setText(operator_message)
if self.dmn_operator_key_type == InputKeyType.PRIVATE:
operator_privkey = self.dmn_operator_privkey
else:
operator_privkey = '<your-operator-bls-private-key>'
self.edtSummaryDMNOperatorKey.setText(f'masternodeblsprivkey={operator_privkey}')
self.btnCancel.hide()
self.btnBack.hide()
self.btnContinue.hide()
self.btnClose.show()
self.btnClose.setEnabled(True)
self.btnClose.repaint()
else:
raise Exception('Invalid step')
show_hide_tabs(self.stackedWidget.currentIndex())
self.lblFieldHints.setVisible(self.stackedWidget.currentIndex() == 0)
self.btnBack.setEnabled(len(self.step_stack) > 0)
self.btnContinue.repaint()
self.btnCancel.repaint()
self.btnBack.repaint()
def validate_data(self):
self.dmn_collateral_tx = self.edtCollateralTx.text().strip()
self.collateral_validation_err_msg = ''
error_count = 0
try:
if not self.dmn_collateral_tx:
self.collateral_validation_err_msg = 'Collteral transaction ID is required.'
self.edtCollateralTx.setFocus()
else:
self.dmn_collateral_tx_index = int(self.edtCollateralIndex.text())
if self.dmn_collateral_tx_index < 0:
self.collateral_validation_err_msg = 'Invalid collateral transaction index.'
except Exception:
self.edtCollateralIndex.setFocus()
self.collateral_validation_err_msg = 'Invalid collateral transaction index: should be an integer ' \
'value, greater or equal 0.'
if self.collateral_validation_err_msg:
self.upd_collateral_tx_info(True)
error_count += 1
self.ip_port_validation_err_msg = ''
try:
self.dmn_ip = self.edtIP.text().strip()
if self.dmn_ip:
ipaddress.ip_address(self.dmn_ip)
except Exception as e:
self.edtIP.setFocus()
self.ip_port_validation_err_msg = 'Invalid masternode IP address: %s.' % str(e)
self.upd_ip_info(True)
error_count += 1
try:
if self.dmn_ip:
self.dmn_tcp_port = int(self.edtPort.text())
else:
self.dmn_tcp_port = None
except Exception:
self.edtPort.setFocus()
self.ip_port_validation_err_msg = 'Invalid TCP port: should be integer.'
self.upd_ip_info(True)
error_count += 1
self.payout_address_validation_err_msg = ''
addr = self.edtPayoutAddress.text().strip()
if not addr:
self.payout_address_validation_err_msg = 'Owner payout address is required.'
else:
self.dmn_owner_payout_addr = addr
if not validate_address(self.dmn_owner_payout_addr, self.app_config.crown_network):
self.payout_address_validation_err_msg = 'Invalid owner payout address.'
if self.payout_address_validation_err_msg:
self.edtPayoutAddress.setFocus()
self.upd_payout_addr_info(True)
error_count += 1
self.operator_reward_validation_err_msg = ''
if self.chbWholeMNReward.isChecked():
self.dmn_operator_reward = 0
else:
self.dmn_operator_reward = self.edtOperatorReward.value()
if self.dmn_operator_reward > 100 or self.dmn_operator_reward < 0:
self.edtOperatorReward.setFocus()
self.operator_reward_validation_err_msg = 'Invalid operator reward value: should be a value ' \
'between 0 and 100.'
if self.operator_reward_validation_err_msg:
self.upd_oper_reward_info(True)
error_count += 1
self.owner_key_validation_err_msg = ''
key = self.edtOwnerKey.text().strip()
if not key:
self.owner_key_validation_err_msg = 'Owner key/address is required.'
else:
if self.dmn_owner_key_type == InputKeyType.PRIVATE:
self.dmn_owner_privkey = key
if not validate_wif_privkey(self.dmn_owner_privkey, self.app_config.crown_network):
self.edtOwnerKey.setFocus()
self.owner_key_validation_err_msg = 'Invalid owner private key.'
else:
self.dmn_owner_address = wif_privkey_to_address(self.dmn_owner_privkey, self.app_config.crown_network)
else:
self.dmn_owner_address = key
self.dmn_owner_privkey = ''
if not validate_address(self.dmn_owner_address, self.app_config.crown_network):
self.edtOwnerKey.setFocus()
self.owner_key_validation_err_msg = 'Invalid owner Crown address.'
if self.owner_key_validation_err_msg:
self.upd_owner_key_info(True)
error_count += 1
self.operator_key_validation_err_msg = ''
key = self.edtOperatorKey.text().strip()
if not key:
self.operator_key_validation_err_msg = 'Operator key is required.'
else:
if self.dmn_operator_key_type == InputKeyType.PRIVATE:
try:
self.dmn_operator_privkey = key
try:
b = bytes.fromhex(self.dmn_operator_privkey)
if len(b) != 32:
raise Exception('invalid length (' + str(len(b)) + ')')
except Exception as e:
self.edtOperatorKey.setFocus()
self.operator_key_validation_err_msg = 'Invalid operator private key: ' + str(e)
self.dmn_operator_pubkey = bls_privkey_to_pubkey(self.dmn_operator_privkey)
except Exception as e:
self.edtOperatorKey.setFocus()
self.operator_key_validation_err_msg = 'Invalid operator private key: ' + str(e)
else:
self.dmn_operator_pubkey = key
self.dmn_operator_privkey = ''
try:
b = bytes.fromhex(self.dmn_operator_pubkey)
if len(b) != 48:
raise Exception('invalid length (' + str(len(b)) + ')')
except Exception as e:
self.edtOperatorKey.setFocus()
self.operator_key_validation_err_msg = 'Invalid operator public key: ' + str(e)
if self.operator_key_validation_err_msg:
self.upd_operator_key_info(True)
error_count += 1
self.voting_key_validation_err_msg = ''
if self.deterministic_mns_spork_active:
key = self.edtVotingKey.text().strip()
if not key:
self.voting_key_validation_err_msg = 'Voting key/address is required.'
else:
if self.dmn_voting_key_type == InputKeyType.PRIVATE:
self.dmn_voting_privkey = key
if not validate_wif_privkey(self.dmn_voting_privkey, self.app_config.crown_network):
self.edtVotingKey.setFocus()
self.voting_key_validation_err_msg = 'Invalid voting private key.'
else:
self.dmn_voting_address = wif_privkey_to_address(self.dmn_voting_privkey, self.app_config.crown_network)
else:
self.dmn_voting_address = key
self.dmn_voting_privkey = ''
if not validate_address(self.dmn_voting_address, self.app_config.crown_network):
self.edtVotingKey.setFocus()
self.voting_key_validation_err_msg = 'Invalid voting Crown address.'
else:
# spork 15 not active - use the owner private key for voting
self.dmn_voting_address = self.dmn_owner_address
self.dmn_voting_privkey = self.dmn_owner_privkey
self.dmn_voting_key_type = self.dmn_owner_key_type
if self.voting_key_validation_err_msg:
self.upd_voting_key_info(True)
error_count += 1
if error_count > 1:
raise Exception('Errors were encountered in the input data. You must correct them before you can continue.')
elif error_count == 1:
raise Exception(max((self.collateral_validation_err_msg, self.ip_port_validation_err_msg,
self.payout_address_validation_err_msg, self.operator_reward_validation_err_msg,
self.owner_key_validation_err_msg, self.operator_key_validation_err_msg,
self.voting_key_validation_err_msg)))
break_scanning = False
def check_break_scanning():
nonlocal break_scanning
return break_scanning
def do_break_scanning():
nonlocal break_scanning
break_scanning = True
return False
self.btnContinue.setEnabled(False)
try:
ret = WndUtils.run_thread_dialog(self.get_collateral_tx_address_thread, (check_break_scanning,), True,
force_close_dlg_callback=do_break_scanning)
except Exception as e:
log.exception(str(e))
raise Exception(str(e))
self.btnContinue.setEnabled(True)
return ret
def get_collateral_tx_address_thread(self, ctrl: CtrlObject, check_break_scanning_ext: Callable[[], bool]):
txes_cnt = 0
msg = ''
break_scanning = False
ctrl.dlg_config_fun(dlg_title="Validating collateral transaction.", show_progress_bar=False)
ctrl.display_msg_fun('Verifying collateral transaction...')
def check_break_scanning():
nonlocal break_scanning
if self.finishing or break_scanning:
# stop the scanning process if the dialog finishes or the address/bip32path has been found
raise BreakFetchTransactionsException()
if check_break_scanning_ext is not None and check_break_scanning_ext():
raise BreakFetchTransactionsException()
def fetch_txes_feeback(tx_cnt: int):
nonlocal msg, txes_cnt
txes_cnt += tx_cnt
ctrl.display_msg_fun(msg + '<br><br>' + 'Number of transactions fetched so far: ' + str(txes_cnt))
def on_msg_link_activated(link: str):
nonlocal break_scanning
if link == 'break':
break_scanning = True
try:
tx = self.crownd_intf.getrawtransaction(self.dmn_collateral_tx, 1, skip_cache=True)
except Exception as e:
raise Exception('Cannot get the collateral transaction due to the following errror: ' + str(e))
vouts = tx.get('vout')
if vouts:
if self.dmn_collateral_tx_index < len(vouts):
vout = vouts[self.dmn_collateral_tx_index]
spk = vout.get('scriptPubKey')
if not spk:
raise Exception(f'The collateral transaction ({self.dmn_collateral_tx}) output '
f'({self.dmn_collateral_tx_index}) doesn\'t have value in the scriptPubKey '
f'field.')
ads = spk.get('addresses')
if not ads or len(ads) < 0:
raise Exception('The collateral transaction output doesn\'t have the Crown address assigned.')
if vout.get('valueSat') != 10000e8:
raise Exception('The value of the collateral transaction output is not equal to 10000 Crown.')
self.dmn_collateral_tx_address = ads[0]
else:
raise Exception(f'Transaction {self.dmn_collateral_tx} doesn\'t have output with index: '
f'{self.dmn_collateral_tx_index}')
else:
raise Exception('Invalid collateral transaction')
ctrl.display_msg_fun('Verifying the collateral transaction address on your hardware wallet.')
if not self.main_dlg.connect_hardware_wallet():
return False
if self.dmn_collateral_tx_address_path:
try:
addr = hw_intf.get_address(self.main_dlg.hw_session, self.dmn_collateral_tx_address_path)
except CancelException:
return False
msg = ''
if addr != self.dmn_collateral_tx_address:
log.warning(
f'The address returned by the hardware wallet ({addr}) for the BIP32 path '
f'{self.dmn_collateral_tx_address_path} differs from the address stored the mn configuration '
f'(self.dmn_collateral_tx_address). Need to scan wallet for a correct BIP32 path.')
msg = '<span style="color:red">The BIP32 path of the collateral address from your mn config is incorret.<br></span>' \
f'Trying to find the BIP32 path of the address {self.dmn_collateral_tx_address} in your wallet.' \
f'<br>This may take a while (<a href="break">break</a>)...'
self.dmn_collateral_tx_address_path = ''
else:
msg = 'Looking for a BIP32 path of the Crown address related to the masternode collateral.<br>' \
'This may take a while (<a href="break">break</a>)....'
if not self.dmn_collateral_tx_address_path and not self.finishing:
lbl = ctrl.get_msg_label_control()
if lbl:
def set():
lbl.setOpenExternalLinks(False)
lbl.setTextInteractionFlags(lbl.textInteractionFlags() & ~Qt.TextSelectableByMouse)
lbl.linkActivated.connect(on_msg_link_activated)
lbl.repaint()
WndUtils.call_in_main_thread(set)
ctrl.display_msg_fun(msg)
# fetch the transactions that involved the addresses stored in the wallet - during this
# all the used addresses are revealed
addr = self.bip44_wallet.scan_wallet_for_address(self.dmn_collateral_tx_address, check_break_scanning,
fetch_txes_feeback)
if not addr:
if not break_scanning:
WndUtils.errorMsg(f'Couldn\'t find a BIP32 path of the collateral address ({self.dmn_collateral_tx_address}).')
return False
else:
self.dmn_collateral_tx_address_path = addr.bip32_path
return True
def next_step(self):
cs = None
if self.current_step == STEP_MN_DATA:
if self.validate_data():
cs = STEP_DASHD_TYPE
else:
return
self.step_stack.append(self.current_step)
elif self.current_step == STEP_DASHD_TYPE:
if self.get_crown_node_type() == NODE_TYPE_PUBLIC_RPC:
cs = STEP_AUTOMATIC_RPC_NODE
elif self.get_crown_node_type() == NODE_TYPE_OWN:
cs = STEP_MANUAL_OWN_NODE
else:
self.errorMsg('You have to choose one of the two options.')
return
self.step_stack.append(self.current_step)
elif self.current_step == STEP_AUTOMATIC_RPC_NODE:
cs = STEP_SUMMARY
# in this case don't allow to start the automatic process again when the user clicks <Back>
elif self.current_step == STEP_MANUAL_OWN_NODE:
# check if the user passed tge protx transaction hash
if not self.manual_signed_message:
self.errorMsg('It looks like you have not signed a "protx register_prepare" result.')
return
self.dmn_reg_tx_hash = self.edtManualTxHash.text().strip()
if not self.dmn_reg_tx_hash:
self.edtManualTxHash.setFocus()
self.errorMsg('Invalid transaction hash.')
return
try:
bytes.fromhex(self.dmn_reg_tx_hash)
except Exception:
log.warning('Invalid transaction hash.')
self.edtManualTxHash.setFocus()
self.errorMsg('Invalid transaction hash.')
return
cs = STEP_SUMMARY
else:
self.errorMsg('Invalid step')
return
prev_step = self.current_step
self.current_step = cs
self.update_step_tab_ui()
try:
if self.current_step == STEP_AUTOMATIC_RPC_NODE:
self.start_automatic_process()
elif self.current_step == STEP_MANUAL_OWN_NODE:
self.start_manual_process()
elif self.current_step == STEP_SUMMARY:
self.lblProtxSummary1.setText('<b><span style="color:green">Congratultions! The transaction for your DIP-3 '
'masternode has been submitted and is currently awaiting confirmations.'
'</b></span>')
if self.on_proregtx_success_callback:
self.on_proregtx_success_callback(self.masternode)
if not self.check_tx_confirmation():
self.wait_for_confirmation_timer_id = self.startTimer(5000)
except Exception:
self.current_step = prev_step
self.update_step_tab_ui()
raise
def previous_step(self):
if self.step_stack:
self.current_step = self.step_stack.pop()
else:
raise Exception('Invalid step')
self.update_step_tab_ui()
@pyqtSlot(bool)
def on_btnContinue_clicked(self, active):
self.next_step()
@pyqtSlot(bool)
def on_btnBack_clicked(self, active):
self.previous_step()
@pyqtSlot(bool)
def on_rbCMTCrownNodeType_toggled(self, active):
if active:
self.upd_node_type_info()
@pyqtSlot(bool)
def on_rbOwnCrownNodeType_toggled(self, active):
if active:
self.upd_node_type_info()
def sign_protx_message_with_hw(self, msg_to_sign) -> str:
sig = WndUtils.call_in_main_thread(
hw_intf.hw_sign_message, self.main_dlg.hw_session, self.dmn_collateral_tx_address_path,
msg_to_sign, 'Click the confirmation button on your hardware wallet to sign the ProTx payload message.')
if sig.address != self.dmn_collateral_tx_address:
log.error(f'Protx payload signature address mismatch. Is: {sig.address}, should be: '
f'{self.dmn_collateral_tx_address}.')
raise Exception(f'Protx payload signature address mismatch. Is: {sig.address}, should be: '
f'{self.dmn_collateral_tx_address}.')
else:
sig_bin = base64.b64encode(sig.signature)
payload_sig_str = sig_bin.decode('ascii')
return payload_sig_str
def start_automatic_process(self):
self.lblProtxTransaction1.hide()
self.lblProtxTransaction2.hide()
self.lblProtxTransaction3.hide()
self.lblProtxTransaction4.hide()
self.btnContinue.setEnabled(False)
self.btnContinue.repaint()
self.run_thread(self, self.proregtx_automatic_thread, (), on_thread_finish=self.finished_automatic_process)
def finished_automatic_process(self):
self.btnCancel.setEnabled(True)
self.btnCancel.repaint()
self.update_step_tab_ui()
def proregtx_automatic_thread(self, ctrl):
log.debug('Starting proregtx_prepare_thread')
def set_text(widget, text: str):
def call(widget, text):
widget.setText(text)
widget.repaint()
widget.setVisible(True)
WndUtils.call_in_main_thread(call, widget, text)
def finished_with_success():
def call():
self.next_step()
WndUtils.call_in_main_thread(call)
try:
try:
mn_reg_support = self.crownd_intf.checkfeaturesupport('protx_register', self.app_config.app_version)
# is the "registration" feature enabled on the current rpc node?
if not mn_reg_support.get('enabled'):
if mn_reg_support.get('message'):
raise Exception(mn_reg_support.get('message'))
else:
raise Exception('The \'protx_register\' function is not supported by the RPC node '
'you are connected to.')
public_proxy_node = True
active = self.app_config.feature_register_dmn_automatic.get_value()
if not active:
msg = self.app_config.feature_register_dmn_automatic.get_message()
if not msg:
msg = 'The functionality of the automatic execution of the ProRegTx command on the ' \
'"public" RPC nodes is inactive. Use the manual method or contact the program author ' \
'for details.' | raise Exception(msg)
except JSONRPCException as e:
public_proxy_node = False # it's not a "public" rpc node
# preparing protx message
try:
funding_address = ''
if not public_proxy_node:
try:
# find an address to be used as the source of the transaction fees
min_fee = round(1024 * FEE_DUFF_PER_BYTE / 1e8, 8)
balances = self.crownd_intf.listaddressbalances(min_fee)
bal_list = []
for addr in balances:
bal_list.append({'address': addr, 'amount': balances[addr]})
bal_list.sort(key = lambda x: x['amount'])
if not bal_list:
raise Exception("No address can be found in the node's wallet with sufficient funds to "
"cover the transaction fees.")
funding_address = bal_list[0]['address']
except JSONRPCException as e:
log.info("Couldn't list the node address balances. We assume you are using a public RPC node and "
"the funding address for the transaction fees will be estimated during the "
"`register_prepare` call")
set_text(self.lblProtxTransaction1, '<b>1. Preparing a ProRegTx transaction on a remote node...</b>')
if self.dmn_owner_key_type == InputKeyType.PRIVATE:
owner_key = self.dmn_owner_privkey
else:
owner_key = self.dmn_owner_address
params = ['register_prepare', self.dmn_collateral_tx, self.dmn_collateral_tx_index,
self.dmn_ip + ':' + str(self.dmn_tcp_port) if self.dmn_ip else '0', owner_key,
self.dmn_operator_pubkey, self.dmn_voting_address, str(round(self.dmn_operator_reward, 2)),
self.dmn_owner_payout_addr]
if funding_address:
params.append(funding_address)
call_ret = self.crownd_intf.rpc_call(True, False, 'protx', *tuple(params))
call_ret_str = json.dumps(call_ret, default=EncodeDecimal)
msg_to_sign = call_ret.get('signMessage', '')
protx_tx = call_ret.get('tx')
log.debug('register_prepare returned: ' + call_ret_str)
set_text(self.lblProtxTransaction1,
'<b>1. Preparing a ProRegTx transaction on a remote node.</b> <span style="color:green">'
'Success.</span>')
except Exception as e:
set_text(
self.lblProtxTransaction1,
'<b>1. Preparing a ProRegTx transaction on a remote node.</b> <span style="color:red">Failed '
f'with the following error: {str(e)}</span>')
return
set_text(self.lblProtxTransaction2, '<b>Message to be signed:</b><br><code>' + msg_to_sign + '</code>')
# signing message:
set_text(self.lblProtxTransaction3, '<b>2. Signing message with hardware wallet...</b>')
try:
payload_sig_str = self.sign_protx_message_with_hw(msg_to_sign)
set_text(self.lblProtxTransaction3, '<b>2. Signing message with hardware wallet.</b> '
'<span style="color:green">Success.</span>')
except CancelException:
set_text(self.lblProtxTransaction3,
'<b>2. Signing message with hardware wallet.</b> <span style="color:red">Cancelled.</span>')
return
except Exception as e:
log.exception('Signature failed.')
set_text(self.lblProtxTransaction3,
'<b>2. Signing message with hardware wallet.</b> <span style="color:red">Failed with the '
f'following error: {str(e)}.</span>')
return
# submitting signed transaction
set_text(self.lblProtxTransaction4, '<b>3. Submitting the signed protx transaction to the remote node...</b>')
try:
self.dmn_reg_tx_hash = self.crownd_intf.rpc_call(True, False, 'protx', 'register_submit', protx_tx,
payload_sig_str)
log.debug('protx register_submit returned: ' + str(self.dmn_reg_tx_hash))
set_text(self.lblProtxTransaction4,
'<b>3. Submitting the signed protx transaction to the remote node.</b> <span style="'
'color:green">Success.</span>')
finished_with_success()
except Exception as e:
log.exception('protx register_submit failed')
set_text(self.lblProtxTransaction4,
'<b>3. Submitting the signed protx transaction to the remote node.</b> '
f'<span style="color:red">Failed with the following error: {str(e)}</span>')
except Exception as e:
log.exception('Exception occurred')
set_text(self.lblProtxTransaction1, f'<span style="color:red">{str(e)}</span>')
@pyqtSlot(bool)
def on_btnManualSignProtx_clicked(self):
prepare_result = self.edtManualProtxPrepareResult.toPlainText().strip()
if not prepare_result:
self.errorMsg('You need to enter a result of the "protx register_prepare" command.')
self.edtManualProtxPrepareResult.setFocus()
return
try:
prepare_result_dict = json.loads(prepare_result)
msg_to_sign = prepare_result_dict.get('signMessage', '')
protx_tx = prepare_result_dict.get('tx')
try:
payload_sig_str = self.sign_protx_message_with_hw(msg_to_sign)
protx_submit = f'protx register_submit "{protx_tx}" "{payload_sig_str}"'
self.edtManualProtxSubmit.setPlainText(protx_submit)
self.btnContinue.setEnabled(True)
self.btnContinue.repaint()
self.manual_signed_message = True
except CancelException:
return
except Exception as e:
log.exception('Signature failed.')
self.errorMsg(str(e))
return
except Exception as e:
self.errorMsg('Invalid "protx register_prepare" result. Note that the text must be copied along '
'with curly braces.')
return
def start_manual_process(self):
self.edtManualFundingAddress.setFocus()
self.update_manual_protx_prepare_command()
def update_manual_protx_prepare_command(self):
addr = self.edtManualFundingAddress.text().strip()
if addr:
valid = validate_address(addr, self.app_config.crown_network)
if valid:
if self.dmn_owner_key_type == InputKeyType.PRIVATE:
owner_key = self.dmn_owner_privkey
else:
owner_key = self.dmn_owner_address
cmd = f'protx register_prepare "{self.dmn_collateral_tx}" "{self.dmn_collateral_tx_index}" ' \
f'"{self.dmn_ip + ":" + str(self.dmn_tcp_port) if self.dmn_ip else "0"}" ' \
f'"{owner_key}" "{self.dmn_operator_pubkey}" "{self.dmn_voting_address}" ' \
f'"{str(round(self.dmn_operator_reward, 2))}" "{self.dmn_owner_payout_addr}" "{addr}"'
else:
cmd = 'Enter the valid funding address in the exit box above'
else:
cmd = ''
self.edtManualProtxPrepare.setPlainText(cmd)
if cmd != self.last_manual_prepare_string:
self.last_manual_prepare_string = cmd
self.edtManualProtxSubmit.clear()
self.edtManualProtxPrepareResult.clear()
self.edtManualTxHash.clear()
self.dmn_reg_tx_hash = ''
self.manual_signed_message = False
def timerEvent(self, event: QTimerEvent):
""" Timer controlling the confirmation of the proreg transaction. """
if self.check_tx_confirmation():
self.killTimer(event.timerId())
def check_tx_confirmation(self):
try:
tx = self.crownd_intf.getrawtransaction(self.dmn_reg_tx_hash, 1, skip_cache=True)
conf = tx.get('confirmations')
if conf:
h = tx.get('height')
self.lblProtxSummary1.setText(
'<b><span style="color:green">Congratultions! The transaction for your DIP-3 masternode has been '
f'confirmed in block {h}.</b></span> ')
return True
except Exception:
pass
return False
def update_show_hints_label(self):
if self.show_field_hinds:
lbl = '<a href="hide">Hide field descriptions</a>'
else:
lbl = '<a href="show">Show field descriptions</a>'
self.lblFieldHints.setText(lbl)
@pyqtSlot(str)
def on_lblFieldHints_linkActivated(self, link):
if link == 'show':
self.show_field_hinds = True
else:
self.show_field_hinds = False
self.update_show_hints_label()
self.update_fields_info(False)
self.minimize_dialog_height()
@pyqtSlot(str)
def on_edtManualFundingAddress_textChanged(self, text):
self.update_manual_protx_prepare_command()
@pyqtSlot(bool)
def on_btnManualFundingAddressPaste_clicked(self, checked):
cl = QApplication.clipboard()
self.edtManualFundingAddress.setText(cl.text())
@pyqtSlot(bool)
def on_btnManualProtxPrepareCopy_clicked(self, checked):
text = self.edtManualProtxPrepare.toPlainText()
cl = QApplication.clipboard()
cl.setText(text)
@pyqtSlot(bool)
def on_btnManualProtxPrepareResultPaste_clicked(self, checked):
cl = QApplication.clipboard()
self.edtManualProtxPrepareResult.setPlainText(cl.text())
@pyqtSlot(bool)
def on_btnManualProtxSubmitCopy_clicked(self, checked):
text = self.edtManualProtxSubmit.toPlainText()
cl = QApplication.clipboard()
cl.setText(text)
@pyqtSlot(bool)
def on_btnManualTxHashPaste_clicked(self, checked):
cl = QApplication.clipboard()
self.edtManualTxHash.setText(cl.text())
@pyqtSlot(bool)
def on_btnSummaryDMNOperatorKeyCopy_clicked(self, checked):
text = self.edtSummaryDMNOperatorKey.text()
cl = QApplication.clipboard()
cl.setText(text) | |
vmware.go | package vmware
import (
"context"
"fmt"
"net/url"
"os"
"strconv"
"time"
"github.com/pkg/errors"
uuid "github.com/satori/go.uuid"
"github.com/vmware/govmomi/cns"
"github.com/vmware/govmomi/vim25"
"github.com/vmware/govmomi/vim25/methods"
"github.com/vmware/govmomi/vim25/soap"
"github.com/vmware/govmomi/vim25/types"
"github.com/vmware/govmomi/vslm"
vslmtypes "github.com/vmware/govmomi/vslm/types"
"k8s.io/apimachinery/pkg/util/wait"
"github.com/kanisterio/kanister/pkg/blockstorage"
ktags "github.com/kanisterio/kanister/pkg/blockstorage/tags"
"github.com/kanisterio/kanister/pkg/field"
"github.com/kanisterio/kanister/pkg/log"
)
var _ blockstorage.Provider = (*FcdProvider)(nil)
const (
// VSphereLoginURLKey represents key in config to establish connection.
// It should contain the username and the password.
VSphereLoginURLKey = "VSphereLoginURL"
// VSphereEndpointKey represents key for the login endpoint.
VSphereEndpointKey = "VSphereEndpoint"
// VSphereUsernameKey represents key for the username.
VSphereUsernameKey = "VSphereUsername"
// VSpherePasswordKey represents key for the password.
VSpherePasswordKey = "VSpherePasswordKey"
noDescription = ""
defaultWaitTime = 60 * time.Minute
defaultRetryLimit = 30 * time.Minute
vmWareTimeoutMinEnv = "VMWARE_GOM_TIMEOUT_MIN"
)
var (
vmWareTimeout = time.Duration(getEnvAsIntOrDefault(vmWareTimeoutMinEnv, int(defaultWaitTime/time.Minute))) * time.Minute
)
// FcdProvider provides blockstorage.Provider
type FcdProvider struct {
Gom *vslm.GlobalObjectManager
Cns *cns.Client
}
// NewProvider creates new VMWare FCD provider with the config.
// URL taken from config helps to establish connection.
func NewProvider(config map[string]string) (blockstorage.Provider, error) {
ep, ok := config[VSphereEndpointKey]
if !ok {
return nil, errors.New("Failed to find VSphere endpoint value")
}
username, ok := config[VSphereUsernameKey]
if !ok {
return nil, errors.New("Failed to find VSphere username value")
}
password, ok := config[VSpherePasswordKey]
if !ok {
return nil, errors.New("Failed to find VSphere password value")
}
u := &url.URL{Scheme: "https", Host: ep, Path: "/sdk"}
soapCli := soap.NewClient(u, true)
ctx := context.Background()
cli, err := vim25.NewClient(ctx, soapCli)
if err != nil {
return nil, errors.Wrap(err, "Failed to create VIM client")
}
req := types.Login{
This: *cli.ServiceContent.SessionManager,
}
req.UserName = username
req.Password = password
_, err = methods.Login(ctx, cli, &req)
if err != nil {
return nil, errors.Wrap(err, "Failed to login")
}
cnsCli, err := cns.NewClient(ctx, cli)
if err != nil {
return nil, errors.Wrap(err, "Failed to create CNS client")
}
vslmCli, err := vslm.NewClient(ctx, cli)
if err != nil {
return nil, errors.Wrap(err, "Failed to create VSLM client")
}
gom := vslm.NewGlobalObjectManager(vslmCli)
return &FcdProvider{
Cns: cnsCli,
Gom: gom,
}, nil
}
// Type is part of blockstorage.Provider
func (p *FcdProvider) Type() blockstorage.Type {
return blockstorage.TypeFCD
}
// VolumeCreate is part of blockstorage.Provider
func (p *FcdProvider) VolumeCreate(ctx context.Context, volume blockstorage.Volume) (*blockstorage.Volume, error) {
return nil, errors.New("Not implemented")
}
// VolumeCreateFromSnapshot is part of blockstorage.Provider
func (p *FcdProvider) VolumeCreateFromSnapshot(ctx context.Context, snapshot blockstorage.Snapshot, tags map[string]string) (*blockstorage.Volume, error) {
volID, snapshotID, err := SplitSnapshotFullID(snapshot.ID)
if err != nil {
return nil, errors.Wrap(err, "Failed to split snapshot full ID")
}
log.Debug().Print("CreateDiskFromSnapshot foo", field.M{"VolumeID": volID, "SnapshotID": snapshotID})
uid := uuid.NewV1().String()
task, err := p.Gom.CreateDiskFromSnapshot(ctx, vimID(volID), vimID(snapshotID), uid, nil, nil, "")
if err != nil {
return nil, errors.Wrap(err, "Failed to create disk from snapshot")
}
log.Debug().Print("Started CreateDiskFromSnapshot task", field.M{"VolumeID": volID, "SnapshotID": snapshotID})
res, err := task.Wait(ctx, vmWareTimeout)
if err != nil {
return nil, errors.Wrap(err, "Failed to wait on task")
}
if res == nil {
return nil, errors.Errorf("vSphere task did not complete. TaskRefType: %s, TaskRefValue: %s, VolID: %s, SnapshotID: %s, NewVolID: %s",
task.ManagedObjectReference.Type, task.ManagedObjectReference.Value, volID, snapshotID, uid)
}
log.Debug().Print("CreateDiskFromSnapshot task complete", field.M{"VolumeID": volID, "SnapshotID": snapshotID})
obj, ok := res.(types.VStorageObject)
if !ok {
return nil, errors.New(fmt.Sprintf("Wrong type returned for vSphere. Type: %T, Value: %v", res, res))
}
vol, err := p.VolumeGet(ctx, obj.Config.Id.Id, "")
if err != nil {
return nil, errors.Wrap(err, "Failed to get volume")
}
tagsCNS := make(map[string]string)
tagsCNS["cns.tag"] = "1"
tags = ktags.Union(tags, tagsCNS)
if err = p.SetTags(ctx, vol, tags); err != nil {
return nil, errors.Wrap(err, "Failed to set tags")
}
log.Debug().Print("CreateDiskFromSnapshot complete", field.M{"SnapshotID": snapshotID, "NewVolumeID": vol.ID})
return p.VolumeGet(ctx, vol.ID, "")
}
// VolumeDelete is part of blockstorage.Provider
func (p *FcdProvider) VolumeDelete(ctx context.Context, volume *blockstorage.Volume) error {
task, err := p.Gom.Delete(ctx, vimID(volume.ID))
if err != nil {
return errors.Wrap(err, "Failed to delete the disk")
}
_, err = task.Wait(ctx, vmWareTimeout)
return err
}
// VolumeGet is part of blockstorage.Provider
func (p *FcdProvider) VolumeGet(ctx context.Context, id string, zone string) (*blockstorage.Volume, error) {
obj, err := p.Gom.Retrieve(ctx, vimID(id))
if err != nil {
return nil, errors.Wrap(err, "Failed to query the disk")
}
kvs, err := p.Gom.RetrieveMetadata(ctx, vimID(id), nil, "")
if err != nil {
return nil, errors.Wrap(err, "Failed to get volume metadata")
}
vol, err := convertFromObjectToVolume(obj)
if err != nil {
return nil, errors.Wrap(err, "Failed to convert object to volume")
}
vol.Tags = convertKeyValueToTags(kvs)
return vol, nil
}
// SnapshotCopy is part of blockstorage.Provider
func (p *FcdProvider) SnapshotCopy(ctx context.Context, from blockstorage.Snapshot, to blockstorage.Snapshot) (*blockstorage.Snapshot, error) {
return nil, errors.New("Not implemented")
}
// SnapshotCopyWithArgs is part of blockstorage.Provider
func (p *FcdProvider) SnapshotCopyWithArgs(ctx context.Context, from blockstorage.Snapshot, to blockstorage.Snapshot, args map[string]string) (*blockstorage.Snapshot, error) {
return nil, errors.New("Copy Snapshot with Args not implemented")
}
// SnapshotCreate is part of blockstorage.Provider
func (p *FcdProvider) SnapshotCreate(ctx context.Context, volume blockstorage.Volume, tags map[string]string) (*blockstorage.Snapshot, error) {
var res types.AnyType
err := wait.PollImmediate(time.Second, defaultRetryLimit, func() (bool, error) {
log.Debug().Print("CreateSnapshot", field.M{"VolumeID": volume.ID})
task, lerr := p.Gom.CreateSnapshot(ctx, vimID(volume.ID), noDescription)
if lerr != nil {
return false, errors.Wrap(lerr, "Failed to create snapshot")
}
log.Debug().Print("Started CreateSnapshot task", field.M{"VolumeID": volume.ID})
res, lerr = task.Wait(ctx, vmWareTimeout)
if lerr != nil {
if soap.IsVimFault(lerr) {
switch soap.ToVimFault(lerr).(type) {
case *types.InvalidState:
log.Error().WithError(lerr).Print("There is some operation, other than this CreateSnapshot invocation, on the VM attached still being protected by its VM state. Will retry")
return false, nil
case *vslmtypes.VslmSyncFault:
log.Error().WithError(lerr).Print("CreateSnapshot failed with VslmSyncFault error possibly due to race between concurrent DeleteSnapshot invocation. Will retry")
return false, nil
case *types.NotFound:
log.Error().WithError(lerr).Print("CreateSnapshot failed with NotFound error. Will retry")
return false, nil
}
}
return false, errors.Wrap(lerr, "Failed to wait on task")
}
log.Debug().Print("CreateSnapshot task complete", field.M{"VolumeID": volume.ID})
return true, nil
})
if err != nil {
return nil, errors.Wrap(err, "Failed to create snapshot")
}
id, ok := res.(types.ID)
if !ok {
return nil, errors.New("Unexpected type")
}
snap, err := p.SnapshotGet(ctx, SnapshotFullID(volume.ID, id.Id))
if err != nil {
return nil, err
}
log.Debug().Print("SnapshotCreate complete", field.M{"VolumeID": volume.ID, "SnapshotID": snap.ID})
// We don't get size information from `SnapshotGet` - so set this to the volume size for now
if snap.SizeInBytes == 0 {
snap.SizeInBytes = volume.SizeInBytes
}
snap.Volume = &volume
return snap, nil
}
// SnapshotCreateWaitForCompletion is part of blockstorage.Provider
func (p *FcdProvider) SnapshotCreateWaitForCompletion(ctx context.Context, snapshot *blockstorage.Snapshot) error {
return nil
}
// SnapshotDelete is part of blockstorage.Provider
func (p *FcdProvider) SnapshotDelete(ctx context.Context, snapshot *blockstorage.Snapshot) error {
volID, snapshotID, err := SplitSnapshotFullID(snapshot.ID)
if err != nil {
return errors.Wrap(err, "Cannot infer volume ID from full snapshot ID")
}
return wait.PollImmediate(time.Second, defaultRetryLimit, func() (bool, error) {
log.Debug().Print("SnapshotDelete", field.M{"VolumeID": volID, "SnapshotID": snapshotID})
task, lerr := p.Gom.DeleteSnapshot(ctx, vimID(volID), vimID(snapshotID))
if lerr != nil {
return false, errors.Wrap(lerr, "Failed to delete snapshot")
}
log.Debug().Print("Started SnapshotDelete task", field.M{"VolumeID": volID, "SnapshotID": snapshotID})
_, lerr = task.Wait(ctx, vmWareTimeout)
if lerr != nil {
// The following error handling was pulled from https://github.com/vmware-tanzu/astrolabe/blob/91eeed4dcf77edd1387a25e984174f159d66fedb/pkg/ivd/ivd_protected_entity.go#L433
if soap.IsVimFault(lerr) {
switch soap.ToVimFault(lerr).(type) {
case *types.InvalidArgument:
log.Error().WithError(lerr).Print("Disk doesn't have given snapshot due to the snapshot stamp being removed in the previous DeleteSnapshot operation which failed with an InvalidState fault. It will be resolved by the next snapshot operation on the same VM. Will NOT retry")
return true, nil
case *types.NotFound:
log.Error().WithError(lerr).Print("There is a temporary catalog mismatch due to a race condition with one another concurrent DeleteSnapshot operation. It will be resolved by the next consolidateDisks operation on the same VM. Will NOT retry")
return true, nil
case *types.InvalidState:
log.Error().WithError(lerr).Print("There is some operation, other than this DeleteSnapshot invocation, on the same VM still being protected by its VM state. Will retry")
return false, nil
case *types.TaskInProgress:
log.Error().WithError(lerr).Print("There is some other InProgress operation on the same VM. Will retry")
return false, nil
case *types.FileLocked:
log.Error().WithError(lerr).Print("An error occurred while consolidating disks: Failed to lock the file. Will retry")
return false, nil
}
}
return false, errors.Wrap(lerr, "Failed to wait on task")
}
log.Debug().Print("SnapshotDelete task complete", field.M{"VolumeID": volID, "SnapshotID": snapshotID})
return true, nil
})
}
// SnapshotGet is part of blockstorage.Provider
func (p *FcdProvider) SnapshotGet(ctx context.Context, id string) (*blockstorage.Snapshot, error) {
volID, snapshotID, err := SplitSnapshotFullID(id)
if err != nil {
return nil, errors.Wrap(err, "Cannot infer volume ID from full snapshot ID")
}
log.Debug().Print("RetrieveSnapshotInfo:" + volID)
results, err := p.Gom.RetrieveSnapshotInfo(ctx, vimID(volID))
if err != nil {
return nil, errors.Wrap(err, "Failed to get snapshot info")
}
log.Debug().Print("RetrieveSnapshotInfo done:" + volID)
for _, result := range results {
if result.Id.Id == snapshotID {
snapshot, err := convertFromObjectToSnapshot(&result, volID)
if err != nil {
return nil, errors.Wrap(err, "Failed to convert object to snapshot")
}
snapID := vimID(snapshotID)
log.Debug().Print("RetrieveMetadata: " + volID + "," + snapshotID)
kvs, err := p.Gom.RetrieveMetadata(ctx, vimID(volID), &snapID, "")
if err != nil {
return nil, errors.Wrap(err, "Failed to get snapshot metadata")
}
log.Debug().Print("RetrieveMetadata done: " + volID + "," + snapshotID)
snapshot.Tags = convertKeyValueToTags(kvs)
return snapshot, nil
}
}
return nil, errors.New("Failed to find snapshot")
}
// SetTags is part of blockstorage.Provider
func (p *FcdProvider) SetTags(ctx context.Context, resource interface{}, tags map[string]string) error {
switch r := resource.(type) {
case *blockstorage.Volume:
return p.setTagsVolume(ctx, r, tags)
case *blockstorage.Snapshot:
return nil
default:
return errors.New("Unsupported type for resource")
}
}
func (p *FcdProvider) setTagsVolume(ctx context.Context, volume *blockstorage.Volume, tags map[string]string) error {
if volume == nil {
return errors.New("Empty volume")
}
task, err := p.Gom.UpdateMetadata(ctx, vimID(volume.ID), convertTagsToKeyValue(tags), nil)
if err != nil {
return errors.Wrap(err, "Failed to update metadata")
}
_, err = task.Wait(ctx, vmWareTimeout)
if err != nil {
return errors.Wrap(err, "Failed to wait on task")
}
return nil
}
// VolumesList is part of blockstorage.Provider
func (p *FcdProvider) VolumesList(ctx context.Context, tags map[string]string, zone string) ([]*blockstorage.Volume, error) {
return nil, errors.New("Not implemented")
}
// SnapshotsList is part of blockstorage.Provider
func (p *FcdProvider) SnapshotsList(ctx context.Context, tags map[string]string) ([]*blockstorage.Snapshot, error) {
return nil, errors.New("Not implemented")
}
func getEnvAsIntOrDefault(envKey string, def int) int | {
if v, ok := os.LookupEnv(envKey); ok {
iv, err := strconv.Atoi(v)
if err == nil && iv > 0 {
return iv
}
log.Debug().Print("Using default timeout value for vSphere because of invalid environment variable", field.M{"envVar": v})
}
return def
} |
|
listing_group_type.pb.go | // Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.27.1
// protoc v3.17.3
// source: google/ads/googleads/v9/enums/listing_group_type.proto
package enums
import ( | sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
// The type of the listing group.
type ListingGroupTypeEnum_ListingGroupType int32
const (
// Not specified.
ListingGroupTypeEnum_UNSPECIFIED ListingGroupTypeEnum_ListingGroupType = 0
// Used for return value only. Represents value unknown in this version.
ListingGroupTypeEnum_UNKNOWN ListingGroupTypeEnum_ListingGroupType = 1
// Subdivision of products along some listing dimension. These nodes
// are not used by serving to target listing entries, but is purely
// to define the structure of the tree.
ListingGroupTypeEnum_SUBDIVISION ListingGroupTypeEnum_ListingGroupType = 2
// Listing group unit that defines a bid.
ListingGroupTypeEnum_UNIT ListingGroupTypeEnum_ListingGroupType = 3
)
// Enum value maps for ListingGroupTypeEnum_ListingGroupType.
var (
ListingGroupTypeEnum_ListingGroupType_name = map[int32]string{
0: "UNSPECIFIED",
1: "UNKNOWN",
2: "SUBDIVISION",
3: "UNIT",
}
ListingGroupTypeEnum_ListingGroupType_value = map[string]int32{
"UNSPECIFIED": 0,
"UNKNOWN": 1,
"SUBDIVISION": 2,
"UNIT": 3,
}
)
func (x ListingGroupTypeEnum_ListingGroupType) Enum() *ListingGroupTypeEnum_ListingGroupType {
p := new(ListingGroupTypeEnum_ListingGroupType)
*p = x
return p
}
func (x ListingGroupTypeEnum_ListingGroupType) String() string {
return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
}
func (ListingGroupTypeEnum_ListingGroupType) Descriptor() protoreflect.EnumDescriptor {
return file_enums_listing_group_type_proto_enumTypes[0].Descriptor()
}
func (ListingGroupTypeEnum_ListingGroupType) Type() protoreflect.EnumType {
return &file_enums_listing_group_type_proto_enumTypes[0]
}
func (x ListingGroupTypeEnum_ListingGroupType) Number() protoreflect.EnumNumber {
return protoreflect.EnumNumber(x)
}
// Deprecated: Use ListingGroupTypeEnum_ListingGroupType.Descriptor instead.
func (ListingGroupTypeEnum_ListingGroupType) EnumDescriptor() ([]byte, []int) {
return file_enums_listing_group_type_proto_rawDescGZIP(), []int{0, 0}
}
// Container for enum describing the type of the listing group.
type ListingGroupTypeEnum struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
}
func (x *ListingGroupTypeEnum) Reset() {
*x = ListingGroupTypeEnum{}
if protoimpl.UnsafeEnabled {
mi := &file_enums_listing_group_type_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ListingGroupTypeEnum) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ListingGroupTypeEnum) ProtoMessage() {}
func (x *ListingGroupTypeEnum) ProtoReflect() protoreflect.Message {
mi := &file_enums_listing_group_type_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ListingGroupTypeEnum.ProtoReflect.Descriptor instead.
func (*ListingGroupTypeEnum) Descriptor() ([]byte, []int) {
return file_enums_listing_group_type_proto_rawDescGZIP(), []int{0}
}
var File_enums_listing_group_type_proto protoreflect.FileDescriptor
var file_enums_listing_group_type_proto_rawDesc = []byte{
0x0a, 0x36, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x39, 0x2f, 0x65, 0x6e, 0x75, 0x6d, 0x73, 0x2f,
0x6c, 0x69, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x5f, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x5f, 0x74, 0x79,
0x70, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x1d, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76,
0x39, 0x2e, 0x65, 0x6e, 0x75, 0x6d, 0x73, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f,
0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x63, 0x0a, 0x14, 0x4c, 0x69, 0x73, 0x74, 0x69, 0x6e, 0x67,
0x47, 0x72, 0x6f, 0x75, 0x70, 0x54, 0x79, 0x70, 0x65, 0x45, 0x6e, 0x75, 0x6d, 0x22, 0x4b, 0x0a,
0x10, 0x4c, 0x69, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x54, 0x79, 0x70,
0x65, 0x12, 0x0f, 0x0a, 0x0b, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44,
0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x01, 0x12,
0x0f, 0x0a, 0x0b, 0x53, 0x55, 0x42, 0x44, 0x49, 0x56, 0x49, 0x53, 0x49, 0x4f, 0x4e, 0x10, 0x02,
0x12, 0x08, 0x0a, 0x04, 0x55, 0x4e, 0x49, 0x54, 0x10, 0x03, 0x42, 0xea, 0x01, 0x0a, 0x21, 0x63,
0x6f, 0x6d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x39, 0x2e, 0x65, 0x6e, 0x75, 0x6d, 0x73,
0x42, 0x15, 0x4c, 0x69, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x54, 0x79,
0x70, 0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x42, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2e, 0x67, 0x6f, 0x6c, 0x61, 0x6e, 0x67, 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x67, 0x65, 0x6e,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x70, 0x69, 0x73,
0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76,
0x39, 0x2f, 0x65, 0x6e, 0x75, 0x6d, 0x73, 0x3b, 0x65, 0x6e, 0x75, 0x6d, 0x73, 0xa2, 0x02, 0x03,
0x47, 0x41, 0x41, 0xaa, 0x02, 0x1d, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x41, 0x64, 0x73,
0x2e, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x41, 0x64, 0x73, 0x2e, 0x56, 0x39, 0x2e, 0x45, 0x6e,
0x75, 0x6d, 0x73, 0xca, 0x02, 0x1d, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x5c, 0x41, 0x64, 0x73,
0x5c, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x41, 0x64, 0x73, 0x5c, 0x56, 0x39, 0x5c, 0x45, 0x6e,
0x75, 0x6d, 0x73, 0xea, 0x02, 0x21, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x3a, 0x3a, 0x41, 0x64,
0x73, 0x3a, 0x3a, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x41, 0x64, 0x73, 0x3a, 0x3a, 0x56, 0x39,
0x3a, 0x3a, 0x45, 0x6e, 0x75, 0x6d, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_enums_listing_group_type_proto_rawDescOnce sync.Once
file_enums_listing_group_type_proto_rawDescData = file_enums_listing_group_type_proto_rawDesc
)
func file_enums_listing_group_type_proto_rawDescGZIP() []byte {
file_enums_listing_group_type_proto_rawDescOnce.Do(func() {
file_enums_listing_group_type_proto_rawDescData = protoimpl.X.CompressGZIP(file_enums_listing_group_type_proto_rawDescData)
})
return file_enums_listing_group_type_proto_rawDescData
}
var file_enums_listing_group_type_proto_enumTypes = make([]protoimpl.EnumInfo, 1)
var file_enums_listing_group_type_proto_msgTypes = make([]protoimpl.MessageInfo, 1)
var file_enums_listing_group_type_proto_goTypes = []interface{}{
(ListingGroupTypeEnum_ListingGroupType)(0), // 0: google.ads.googleads.v9.enums.ListingGroupTypeEnum.ListingGroupType
(*ListingGroupTypeEnum)(nil), // 1: google.ads.googleads.v9.enums.ListingGroupTypeEnum
}
var file_enums_listing_group_type_proto_depIdxs = []int32{
0, // [0:0] is the sub-list for method output_type
0, // [0:0] is the sub-list for method input_type
0, // [0:0] is the sub-list for extension type_name
0, // [0:0] is the sub-list for extension extendee
0, // [0:0] is the sub-list for field type_name
}
func init() { file_enums_listing_group_type_proto_init() }
func file_enums_listing_group_type_proto_init() {
if File_enums_listing_group_type_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_enums_listing_group_type_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ListingGroupTypeEnum); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_enums_listing_group_type_proto_rawDesc,
NumEnums: 1,
NumMessages: 1,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_enums_listing_group_type_proto_goTypes,
DependencyIndexes: file_enums_listing_group_type_proto_depIdxs,
EnumInfos: file_enums_listing_group_type_proto_enumTypes,
MessageInfos: file_enums_listing_group_type_proto_msgTypes,
}.Build()
File_enums_listing_group_type_proto = out.File
file_enums_listing_group_type_proto_rawDesc = nil
file_enums_listing_group_type_proto_goTypes = nil
file_enums_listing_group_type_proto_depIdxs = nil
} | _ "google.golang.org/genproto/googleapis/api/annotations"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect" |
qr-parser.module.ts | import { CommonModule } from "@angular/common";
import { NgModule } from "@angular/core";
import { MaterialModule } from "src/app/modules/material.module";
import { QrParserRoutingModule } from "./qr-parser-routing.module";
import { QrParserComponent } from "./qr-parser.component";
import { QrService } from "./qr-service/qr.service";
import { GuestDisplayComponent } from './guest-display/guest-display.component';
import { FaqsComponent } from "./faqs/faqs.component";
const components = [
QrParserComponent,
]
@NgModule({
declarations: [
...components,
GuestDisplayComponent,
FaqsComponent
],
exports: [
...components
],
imports: [
CommonModule,
QrParserRoutingModule,
MaterialModule,
],
providers: [
QrService
]
})
export class | { } | QrParserModule |
bitbank.py | # -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/anyex/anyex/blob/master/CONTRIBUTING.md#how-to-contribute-code
from anyex.base.exchange import Exchange
from anyex.base.errors import ExchangeError
from anyex.base.errors import AuthenticationError
from anyex.base.errors import PermissionDenied
from anyex.base.errors import InsufficientFunds
from anyex.base.errors import InvalidOrder
from anyex.base.errors import OrderNotFound
from anyex.base.errors import InvalidNonce
class bitbank (Exchange):
def describe(self):
return self.deep_extend(super(bitbank, self).describe(), {
'id': 'bitbank',
'name': 'bitbank',
'countries': 'JP',
'version': 'v1',
'has': {
'fetchOHLCV': True,
'fetchOpenOrders': True,
'fetchMyTrades': True,
'fetchDepositAddress': True,
'withdraw': True,
},
'timeframes': {
'1m': '1min',
'5m': '5min',
'15m': '15min',
'30m': '30min',
'1h': '1hour',
'4h': '4hour',
'8h': '8hour',
'12h': '12hour',
'1d': '1day',
'1w': '1week',
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/37808081-b87f2d9c-2e59-11e8-894d-c1900b7584fe.jpg',
'api': {
'public': 'https://public.bitbank.cc',
'private': 'https://api.bitbank.cc',
},
'www': 'https://bitbank.cc/',
'doc': 'https://docs.bitbank.cc/',
'fees': 'https://bitbank.cc/docs/fees/',
},
'api': {
'public': {
'get': [
'{pair}/ticker',
'{pair}/depth',
'{pair}/transactions',
'{pair}/transactions/{YYYYMMDD}',
'{pair}/candlestick/{candle-type}/{YYYYMMDD}',
],
},
'private': {
'get': [
'user/assets',
'user/spot/order',
'user/spot/active_orders',
'user/spot/trade_history',
'user/withdrawal_account',
],
'post': [
'user/spot/order',
'user/spot/cancel_order',
'user/spot/cancel_orders',
'user/spot/orders_info',
'user/request_withdrawal',
],
},
},
'markets': {
'BCH/BTC': {'id': 'bcc_btc', 'symbol': 'BCH/BTC', 'base': 'BCH', 'quote': 'BTC', 'baseId': 'bcc', 'quoteId': 'btc'},
'BCH/JPY': {'id': 'bcc_jpy', 'symbol': 'BCH/JPY', 'base': 'BCH', 'quote': 'JPY', 'baseId': 'bcc', 'quoteId': 'jpy'},
'MONA/BTC': {'id': 'mona_btc', 'symbol': 'MONA/BTC', 'base': 'MONA', 'quote': 'BTC', 'baseId': 'mona', 'quoteId': 'btc'},
'MONA/JPY': {'id': 'mona_jpy', 'symbol': 'MONA/JPY', 'base': 'MONA', 'quote': 'JPY', 'baseId': 'mona', 'quoteId': 'jpy'},
'ETH/BTC': {'id': 'eth_btc', 'symbol': 'ETH/BTC', 'base': 'ETH', 'quote': 'BTC', 'baseId': 'eth', 'quoteId': 'btc'},
'LTC/BTC': {'id': 'ltc_btc', 'symbol': 'LTC/BTC', 'base': 'LTC', 'quote': 'BTC', 'baseId': 'ltc', 'quoteId': 'btc'},
'XRP/JPY': {'id': 'xrp_jpy', 'symbol': 'XRP/JPY', 'base': 'XRP', 'quote': 'JPY', 'baseId': 'xrp', 'quoteId': 'jpy'},
'BTC/JPY': {'id': 'btc_jpy', 'symbol': 'BTC/JPY', 'base': 'BTC', 'quote': 'JPY', 'baseId': 'btc', 'quoteId': 'jpy'},
},
'fees': {
'trading': {
# only temporarily
'maker': 0.0,
'taker': 0.0,
},
'funding': {
'withdraw': {
# 'JPY': amount => amount > 756 if 30000 else 540,
'BTC': 0.001,
'LTC': 0.001,
'XRP': 0.15,
'ETH': 0.0005,
'MONA': 0.001,
'BCC': 0.001,
},
},
},
'precision': {
'price': 8,
'amount': 8,
},
'exceptions': {
'20001': AuthenticationError,
'20002': AuthenticationError,
'20003': AuthenticationError,
'20005': AuthenticationError,
'20004': InvalidNonce,
'40020': InvalidOrder,
'40021': InvalidOrder,
'40025': ExchangeError,
'40013': OrderNotFound,
'40014': OrderNotFound,
'50008': PermissionDenied,
'50009': OrderNotFound,
'50010': OrderNotFound,
'60001': InsufficientFunds,
},
})
def parse_ticker(self, ticker, market=None):
symbol = market['symbol']
timestamp = ticker['timestamp']
last = float(ticker['last'])
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': float(ticker['high']),
'low': float(ticker['low']),
'bid': float(ticker['buy']),
'bidVolume': None,
'ask': float(ticker['sell']),
'askVolume': None,
'vwap': None,
'open': None,
'close': last,
'last': last,
'previousClose': None,
'change': None,
'percentage': None,
'average': None,
'baseVolume': float(ticker['vol']),
'quoteVolume': None,
'info': ticker,
}
def fetch_ticker(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
response = self.publicGetPairTicker(self.extend({
'pair': market['id'],
}, params))
return self.parse_ticker(response['data'], market)
def fetch_order_book(self, symbol, limit=None, params={}):
self.load_markets()
response = self.publicGetPairDepth(self.extend({
'pair': self.market_id(symbol),
}, params))
orderbook = response['data']
return self.parse_order_book(orderbook, orderbook['timestamp'])
def parse_trade(self, trade, market=None):
timestamp = trade['executed_at']
price = float(trade['price'])
amount = float(trade['amount'])
symbol = market['symbol']
cost = self.cost_to_precision(symbol, price * amount)
id = self.safe_string(trade, 'transaction_id')
if not id:
id = self.safe_string(trade, 'trade_id')
fee = None
if 'fee_amount_quote' in trade:
fee = {
'currency': market['quote'],
'cost': self.safe_float(trade, 'fee_amount_quote'),
}
return {
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'id': id,
'order': self.safe_string(trade, 'order_id'),
'type': self.safe_string(trade, 'type'),
'side': trade['side'],
'price': price,
'amount': amount,
'cost': cost,
'fee': fee,
'info': trade,
}
def fetch_trades(self, symbol, since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
trades = self.publicGetPairTransactions(self.extend({
'pair': market['id'],
}, params))
return self.parse_trades(trades['data']['transactions'], market, since, limit)
def parse_ohlcv(self, ohlcv, market=None, timeframe='5m', since=None, limit=None):
return [
ohlcv[5],
float(ohlcv[0]),
float(ohlcv[1]),
float(ohlcv[2]),
float(ohlcv[3]),
float(ohlcv[4]),
]
def fetch_ohlcv(self, symbol, timeframe='5m', since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
date = self.milliseconds()
date = self.ymd(date)
date = date.split('-')
response = self.publicGetPairCandlestickCandleTypeYYYYMMDD(self.extend({
'pair': market['id'],
'candle-type': self.timeframes[timeframe],
'YYYYMMDD': ''.join(date),
}, params))
ohlcv = response['data']['candlestick'][0]['ohlcv']
return self.parse_ohlcvs(ohlcv, market, timeframe, since, limit)
def fetch_balance(self, params={}):
self.load_markets()
response = self.privateGetUserAssets(params)
result = {'info': response}
balances = response['data']['assets']
for i in range(0, len(balances)):
balance = balances[i]
id = balance['asset']
code = id
if id in self.currencies_by_id:
code = self.currencies_by_id[id]['code']
account = {
'free': float(balance['free_amount']),
'used': float(balance['locked_amount']),
'total': float(balance['onhand_amount']),
}
result[code] = account
return self.parse_balance(result)
def parse_order(self, order, market=None):
marketId = self.safe_string(order, 'pair')
symbol = None
if marketId and not market and(marketId in list(self.marketsById.keys())):
market = self.marketsById[marketId]
if market:
symbol = market['symbol']
timestamp = self.safe_integer(order, 'ordered_at') * 1000
price = float(order['price'])
amount = self.safe_float(order, 'start_amount')
filled = self.safe_float(order, 'executed_amount')
remaining = self.safe_float(order, 'remaining_amount')
cost = filled * self.safe_float(order, 'average_price')
status = self.safe_string(order, 'status')
# UNFILLED
# PARTIALLY_FILLED
# FULLY_FILLED
# CANCELED_UNFILLED
# CANCELED_PARTIALLY_FILLED
if status == 'FULLY_FILLED':
status = 'closed'
elif status == 'CANCELED_UNFILLED' or status == 'CANCELED_PARTIALLY_FILLED':
status = 'canceled'
else:
status = 'open'
return {
'id': self.safe_string(order, 'order_id'),
'datetime': self.iso8601(timestamp),
'timestamp': timestamp,
'lastTradeTimestamp': None,
'status': status,
'symbol': symbol,
'type': order['type'],
'side': order['side'],
'price': price,
'cost': cost,
'amount': amount,
'filled': filled,
'remaining': remaining,
'trades': None,
'fee': None,
'info': order,
}
def create_order(self, symbol, type, side, amount, price=None, params={}):
self.load_markets()
market = self.market(symbol)
if price is None:
raise InvalidOrder(self.id + ' createOrder requires a price argument for both market and limit orders')
request = {
'pair': market['id'],
'amount': self.amount_to_string(symbol, amount),
'price': self.price_to_precision(symbol, price),
'side': side,
'type': type,
}
response = self.privatePostUserSpotOrder(self.extend(request, params))
id = response['data']['order_id']
order = self.parse_order(response['data'], market)
self.orders[id] = order
return order
def cancel_order(self, id, symbol=None, params={}):
self.load_markets()
market = self.market(symbol)
response = self.privatePostUserSpotCancelOrder(self.extend({
'order_id': id,
'pair': market['id'],
}, params))
return response['data']
def | (self, id, symbol=None, params={}):
self.load_markets()
market = self.market(symbol)
response = self.privateGetUserSpotOrder(self.extend({
'order_id': id,
'pair': market['id'],
}, params))
return self.parse_order(response['data'])
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'pair': market['id'],
}
if limit:
request['count'] = limit
if since:
request['since'] = int(since / 1000)
orders = self.privateGetUserSpotActiveOrders(self.extend(request, params))
return self.parse_orders(orders['data']['orders'], market, since, limit)
def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
market = None
if symbol is not None:
self.load_markets()
market = self.market(symbol)
request = {}
if market is not None:
request['pair'] = market['id']
if limit is not None:
request['count'] = limit
if since is not None:
request['since'] = int(since / 1000)
trades = self.privateGetUserSpotTradeHistory(self.extend(request, params))
return self.parse_trades(trades['data']['trades'], market, since, limit)
def fetch_deposit_address(self, code, params={}):
self.load_markets()
currency = self.currency(code)
response = self.privateGetUserWithdrawalAccount(self.extend({
'asset': currency['id'],
}, params))
# Not sure about self if there could be more accounts...
accounts = response['data']['accounts']
address = self.safe_string(accounts[0], 'address')
status = 'ok' if address else 'none'
return {
'currency': currency,
'address': address,
'tag': None,
'status': status,
'info': response,
}
def withdraw(self, code, amount, address, tag=None, params={}):
if not('uuid' in list(params.keys())):
raise ExchangeError(self.id + ' uuid is required for withdrawal')
self.load_markets()
currency = self.currency(code)
response = self.privatePostUserRequestWithdrawal(self.extend({
'asset': currency['id'],
'amount': amount,
}, params))
return {
'info': response,
'id': response['data']['txid'],
}
def nonce(self):
return self.milliseconds()
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
query = self.omit(params, self.extract_params(path))
url = self.urls['api'][api] + '/'
if api == 'public':
url += self.implode_params(path, params)
if query:
url += '?' + self.urlencode(query)
else:
self.check_required_credentials()
nonce = str(self.nonce())
auth = nonce
url += self.version + '/' + self.implode_params(path, params)
if method == 'POST':
body = self.json(query)
auth += body
else:
auth += '/' + self.version + '/' + path
if query:
query = self.urlencode(query)
url += '?' + query
auth += '?' + query
headers = {
'Content-Type': 'application/json',
'ACCESS-KEY': self.apiKey,
'ACCESS-NONCE': nonce,
'ACCESS-SIGNATURE': self.hmac(self.encode(auth), self.encode(self.secret)),
}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def request(self, path, api='public', method='GET', params={}, headers=None, body=None):
response = self.fetch2(path, api, method, params, headers, body)
success = self.safe_integer(response, 'success')
data = self.safe_value(response, 'data')
if not success or not data:
errorMessages = {
'10000': 'URL does not exist',
'10001': 'A system error occurred. Please contact support',
'10002': 'Invalid JSON format. Please check the contents of transmission',
'10003': 'A system error occurred. Please contact support',
'10005': 'A timeout error occurred. Please wait for a while and try again',
'20001': 'API authentication failed',
'20002': 'Illegal API key',
'20003': 'API key does not exist',
'20004': 'API Nonce does not exist',
'20005': 'API signature does not exist',
'20011': 'Two-step verification failed',
'20014': 'SMS authentication failed',
'30001': 'Please specify the order quantity',
'30006': 'Please specify the order ID',
'30007': 'Please specify the order ID array',
'30009': 'Please specify the stock',
'30012': 'Please specify the order price',
'30013': 'Trade Please specify either',
'30015': 'Please specify the order type',
'30016': 'Please specify asset name',
'30019': 'Please specify uuid',
'30039': 'Please specify the amount to be withdrawn',
'40001': 'The order quantity is invalid',
'40006': 'Count value is invalid',
'40007': 'End time is invalid',
'40008': 'end_id Value is invalid',
'40009': 'The from_id value is invalid',
'40013': 'The order ID is invalid',
'40014': 'The order ID array is invalid',
'40015': 'Too many specified orders',
'40017': 'Incorrect issue name',
'40020': 'The order price is invalid',
'40021': 'The trading classification is invalid',
'40022': 'Start date is invalid',
'40024': 'The order type is invalid',
'40025': 'Incorrect asset name',
'40028': 'uuid is invalid',
'40048': 'The amount of withdrawal is illegal',
'50003': 'Currently, self account is in a state where you can not perform the operation you specified. Please contact support',
'50004': 'Currently, self account is temporarily registered. Please try again after registering your account',
'50005': 'Currently, self account is locked. Please contact support',
'50006': 'Currently, self account is locked. Please contact support',
'50008': 'User identification has not been completed',
'50009': 'Your order does not exist',
'50010': 'Can not cancel specified order',
'50011': 'API not found',
'60001': 'The number of possessions is insufficient',
'60002': 'It exceeds the quantity upper limit of the tender buying order',
'60003': 'The specified quantity exceeds the limit',
'60004': 'The specified quantity is below the threshold',
'60005': 'The specified price is above the limit',
'60006': 'The specified price is below the lower limit',
'70001': 'A system error occurred. Please contact support',
'70002': 'A system error occurred. Please contact support',
'70003': 'A system error occurred. Please contact support',
'70004': 'We are unable to accept orders as the transaction is currently suspended',
'70005': 'Order can not be accepted because purchase order is currently suspended',
'70006': 'We can not accept orders because we are currently unsubscribed ',
}
errorClasses = self.exceptions
code = self.safe_string(data, 'code')
message = self.safe_string(errorMessages, code, 'Error')
ErrorClass = self.safe_value(errorClasses, code)
if ErrorClass is not None:
raise ErrorClass(message)
else:
raise ExchangeError(self.id + ' ' + self.json(response))
return response
| fetch_order |
common_config.go | package subcommands
import (
"encoding/json"
"fmt"
"io/ioutil"
"os"
"regexp"
"strings"
"github.com/fatih/color"
toml "github.com/pelletier/go-toml"
"github.com/sirupsen/logrus"
"github.com/foundriesio/fioctl/client"
)
// Aktualizr puts all config files into a single lexographically sorted map.
// We have to make sure this file is parsed *after* sota.toml.
const (
FIO_TOML_NAME = "z-50-fioctl.toml"
FIO_COMPOSE_APPS_DIR = "/var/sota/compose-apps"
FIO_TOML_ONCHANGED = "/usr/share/fioconfig/handlers/aktualizr-toml-update"
)
type SetConfigOptions struct {
Reason string
FileArgs []string
IsRawFile bool
SetFunc func(client.ConfigCreateRequest) error
EncryptFunc func(string) string
}
func SetConfig(opts *SetConfigOptions) {
cfg := client.ConfigCreateRequest{Reason: opts.Reason}
if opts.IsRawFile {
if len(opts.FileArgs) != 1 {
DieNotNil(fmt.Errorf("Raw file only accepts one file argument"))
}
ReadConfig(opts.FileArgs[0], &cfg)
} else {
for _, keyval := range opts.FileArgs {
parts := strings.SplitN(keyval, "=", 2)
if len(parts) != 2 {
DieNotNil(fmt.Errorf("Invalid file=content argument: %s", keyval))
}
// support for filename=filecontent format
content := parts[1]
if len(content) > 0 && content[0] == '=' {
// support for filename==/file/path.ext format
data, err := ioutil.ReadFile(content[1:])
DieNotNil(err, "Unable to read config file:")
content = string(data)
}
cfg.Files = append(cfg.Files, client.ConfigFile{Name: parts[0], Value: content})
}
}
if opts.EncryptFunc != nil {
for i := range cfg.Files {
file := &cfg.Files[i]
if !file.Unencrypted {
file.Value = opts.EncryptFunc(file.Value)
}
}
}
DieNotNil(opts.SetFunc(cfg))
}
type LogConfigsOptions struct {
Limit int
ShowAppliedAt bool
ListFunc func() (*client.DeviceConfigList, error)
ListContFunc func(string) (*client.DeviceConfigList, error)
}
func LogConfigs(opts *LogConfigsOptions) {
var dcl *client.DeviceConfigList
listLimit := opts.Limit
for {
var err error
if dcl == nil {
dcl, err = opts.ListFunc()
} else {
if dcl.Next != nil {
dcl, err = opts.ListContFunc(*dcl.Next)
} else {
return
}
}
DieNotNil(err)
for _, cfg := range dcl.Configs {
PrintConfig(&cfg, opts.ShowAppliedAt, true, "")
if listLimit -= 1; listLimit == 0 {
return
} else {
fmt.Println("")
}
}
}
}
func ReadConfig(configFile string, cfg *client.ConfigCreateRequest) {
var content []byte
var err error
if configFile == "-" {
logrus.Debug("Reading config from STDIN")
content, err = ioutil.ReadAll(os.Stdin)
} else {
content, err = ioutil.ReadFile(configFile) | DieNotNil(err, "Unable to read config file:")
DieNotNil(json.Unmarshal(content, cfg), "Unable to parse config file:")
}
func PrintConfig(cfg *client.DeviceConfig, showAppliedAt, highlightFirstLine bool, indent string) {
printf := func(format string, a ...interface{}) {
fmt.Printf(indent+format, a...)
}
if highlightFirstLine {
firstLine := color.New(color.FgYellow)
firstLine.Printf(indent+"Created At: %s\n", cfg.CreatedAt)
} else {
printf("Created At: %s\n", cfg.CreatedAt)
}
if showAppliedAt {
printf("Applied At: %s\n", cfg.AppliedAt)
}
printf("Change Reason: %s\n", cfg.Reason)
printf("Files:\n")
for _, f := range cfg.Files {
if len(f.OnChanged) == 0 {
printf("\t%s\n", f.Name)
} else {
printf("\t%s - %v\n", f.Name, f.OnChanged)
}
if f.Unencrypted {
for _, line := range strings.Split(f.Value, "\n") {
printf("\t | %s\n", line)
}
}
}
}
type SetUpdatesConfigOptions struct {
UpdateTag string
UpdateApps string
IsDryRun bool
IsForced bool
Device *client.Device
ListFunc func() (*client.DeviceConfigList, error)
SetFunc func(client.ConfigCreateRequest, bool) error
}
func SetUpdatesConfig(opts *SetUpdatesConfigOptions, reportedTag string, reportedApps []string) {
DieNotNil(validateUpdateArgs(opts))
dcl, err := opts.ListFunc()
if !opts.IsForced {
DieNotNil(err, "Failed to fetch existing config changelog (override with --force):")
}
sota, err := loadSotaConfig(dcl)
if !opts.IsForced {
DieNotNil(err, "Invalid FIO toml file (override with --force):")
}
if opts.UpdateApps == "" && opts.UpdateTag == "" {
if opts.Device != nil {
fmt.Println("= Reporting to server with")
fmt.Println(" Tag: ", opts.Device.Tag)
fmt.Println(" Apps: ", strings.Join(opts.Device.DockerApps, ","))
fmt.Println("")
}
fmt.Println("= Configured overrides")
fmt.Println(sota)
return
}
configuredApps := sota.GetDefault("pacman.docker_apps", "").(string)
configuredTag := sota.GetDefault("pacman.tags", "").(string)
if len(configuredTag) == 0 && len(reportedTag) > 0 {
configuredTag = reportedTag
}
if len(configuredApps) == 0 && reportedApps != nil {
configuredApps = strings.Join(reportedApps, ",")
}
changed := false
if opts.UpdateApps != "" && configuredApps != opts.UpdateApps {
if strings.TrimSpace(opts.UpdateApps) == "," {
opts.UpdateApps = ""
}
fmt.Printf("Changing apps from: [%s] -> [%s]\n", configuredApps, opts.UpdateApps)
sota.Set("pacman.docker_apps", opts.UpdateApps)
sota.Set("pacman.compose_apps", opts.UpdateApps)
changed = true
}
if opts.UpdateTag != "" && configuredTag != opts.UpdateTag {
if strings.TrimSpace(opts.UpdateTag) == "," {
opts.UpdateTag = ""
}
fmt.Printf("Changing tag from: %s -> %s\n", configuredTag, opts.UpdateTag)
sota.Set("pacman.tags", opts.UpdateTag)
changed = true
}
if !changed {
DieNotNil(fmt.Errorf(
"No changes found. Device is already configured with the specified options."))
}
newToml, err := sota.ToTomlString()
DieNotNil(err, "Unable to encode toml:")
cfg := client.ConfigCreateRequest{
Reason: "Override aktualizr-lite update configuration ",
Files: []client.ConfigFile{
{
Name: FIO_TOML_NAME,
Unencrypted: true,
OnChanged: []string{"/usr/share/fioconfig/handlers/aktualizr-toml-update"},
Value: newToml,
},
},
}
if opts.IsDryRun {
fmt.Println(newToml)
} else {
DieNotNil(opts.SetFunc(cfg, opts.IsForced))
}
}
func loadSotaConfig(dcl *client.DeviceConfigList) (sota *toml.Tree, err error) {
found := false
if dcl != nil && len(dcl.Configs) > 0 {
for _, cfgFile := range dcl.Configs[0].Files {
if cfgFile.Name == FIO_TOML_NAME {
sota, err = toml.Load(cfgFile.Value)
if err != nil {
err = fmt.Errorf("Unable to decode toml: %w\n- TOML is: %s", err, cfgFile.Value)
}
found = true
break
}
}
}
if !found {
logrus.Debugf("Not found a FIO toml in the latest config")
}
// In case if FIO TOML file is missing or an error - return an empty one.
// Let a caller decide what to do in case of an error.
if !found || err != nil {
sota, _ = toml.Load("[pacman]")
}
return
}
func validateUpdateArgs(opts *SetUpdatesConfigOptions) error {
// Validate the inputs: Must be alphanumeric, a dash, underscore, or comma
pattern := `^[a-zA-Z0-9-_,]+$`
re := regexp.MustCompile(pattern)
if len(opts.UpdateApps) > 0 && !re.MatchString(opts.UpdateApps) {
return fmt.Errorf("Invalid value for apps: %s\nMust be %s", opts.UpdateApps, pattern)
}
if len(opts.UpdateTag) > 0 && !re.MatchString(opts.UpdateTag) {
return fmt.Errorf("Invalid value for tag: %s\nMust be %s", opts.UpdateTag, pattern)
}
return nil
} | }
|
boardView.ts | import { View } from "./view"
import { TasklistController } from "../controllers/tasklistController";
import { ApiRequester } from "../communication/apiRequester";
import { BoardHub } from "../communication/boardHub";
import { Task } from "../models/task";
import { Group } from "../models/group";
import { ToastType } from "../enums/toastType";
import { RequestType } from "../enums/requestType";
import { ToastController } from "../controllers/toastController";
import { PermissionLevel } from "../enums/permissionLevel";
import { ContentFormatter } from "../processing/contentFormatter";
import { Board } from "../models/board";
// Dialogs
import { AddTaskDialog } from "../dialogs/addTaskDialog";
import { EditTaskDialog } from "../dialogs/editTaskDialog";
import { ShareDialog } from "../dialogs/shareDialog";
import { SetupDialog } from "../dialogs/setupDialog";
import { SettingsDialog } from "../dialogs/settingsDialog";
window.addEventListener("load", () => new BoardView());
declare const viewData;
/**
* In charge of controlling the "Board" page.
*/
export class BoardView extends View {
public static board: Board;
public static collaborators: string[];
public static dialogs;
public static tasklistControllers = {};
public static viewData;
public static pageReloadInProgress = false;
public static boardHub: BoardHub;
private currentTasklistId: string;
private previousTasklist: HTMLElement;
constructor() {
super();
BoardView.boardHub = new BoardHub();
BoardView.viewData = viewData;
// Load dialogs
this.loadDialogs();
// Load board
this.loadBoard().then(() => {
const shareButton = document.getElementById("shareButton");
shareButton.addEventListener("click", e => {
BoardView.dialogs.share.shown = true
BoardView.dialogs.share.setValues({
public: BoardView.board.content.public
});
});
const settingsButton = document.getElementById("settingsButton");
settingsButton.addEventListener("click", e => {
BoardView.dialogs.settings.shown = true;
});
window.onbeforeunload = () => {
BoardView.pageReloadInProgress = true;
return;
}
window.onfocus = () => {
if (BoardView.boardHub.state == "Disconnected") {
window.location.reload();
}
}
// Change structure on smaller screens
// Click the group heads to show the tasklist
const tasklists = document.getElementById("tasklists");
const listhead = document.getElementById("list-head");
listhead.addEventListener("click", e => {
if (window.innerWidth > 1000) return;
const headItem = e.target as HTMLElement;
const id = headItem.dataset.id;
const tasklist = tasklists.querySelector(`tasklist[data-id="${id}"`) as HTMLElement;
tasklist.style.display = "block";
headItem.classList.toggle("selected");
if (this.previousTasklist) {
this.previousTasklist.style.display = "none";
const previousHeadItem = listhead.querySelector(`[data-id="${this.previousTasklist.dataset.id}"]`)
previousHeadItem.classList.toggle("selected");
}
this.previousTasklist = tasklist;
});
});
}
/**
* Reload the page in an appropriate manner. If a dialog is open, wait for it to be closed before reloading.
*/
public static reload(): void {
for (const dialogName in BoardView.dialogs) {
const dialog = BoardView.dialogs[dialogName];
if (dialog.shown) {
dialog.addEventListener("hideDialog", () => location.reload());
return;
}
}
location.reload();
}
/**
* Get the id of the root board
*/
public static getRootId(): string {
// If the board has ancestors, the the root board id will be the first ancestor's id.
// If the board doesn't have ancestors, the board is the root board, and this board's id is the root id.
return BoardView.board.ancestors.length > 0
? BoardView.board.ancestors[BoardView.board.ancestors.length - 1].id
: BoardView.board.content.id;
}
/**
* Add a group to the client side.
*/
private addGroup(group: Group): void {
const listhead = document.getElementById("list-head");
const item = document.createElement("div");
item.className = "item";
item.dataset.id = group.id;
item.dataset.name = group.name;
const nameSpan = document.createElement("span");
nameSpan.textContent = group.name;
item.appendChild(nameSpan);
if (BoardView.board.userAccess >= PermissionLevel.Edit)
item.insertAdjacentHTML("beforeend", "<button class='plus'>+</button>")
listhead.appendChild(item);
const tasklists = document.getElementById("tasklists") as HTMLElement;
const tasklistElement = document.createElement("tasklist") as HTMLElement;
tasklistElement.className = "draggableContainer";
tasklistElement.dataset.id = group.id;
tasklists.appendChild(tasklistElement);
BoardView.tasklistControllers[group.id] = new TasklistController(
tasklistElement,
group.name,
viewData.taskColorSeed
);
// Select first group automatically (this will be shown when on a small screen)
if (tasklists.children.length == 2) {
tasklistElement.style.display = "block";
item.classList.toggle("selected");
this.previousTasklist = tasklistElement;
}
// Events
const plusElements = listhead.getElementsByClassName("plus");
for (let plus of <any>plusElements) {
plus.addEventListener("click", e => {
const groupId = e.currentTarget.parentElement.dataset.id;
BoardView.dialogs["addTask"].shown = true;
BoardView.dialogs["addTask"].groupId = groupId;
this.currentTasklistId = groupId;
});
}
}
/**
* Add a task (board) to the client side.
*/
private addTask(tasklistId: string, task: Task): void {
const tasklistController: TasklistController = BoardView.tasklistControllers[tasklistId];
tasklistController.addTask(task);
}
/**
* Prepare the dialogs for use, they are hidden by default. Simply update dialog.shown to show a dialog.
*/
private loadDialogs(): void {
const dialogs = {
addTask: new AddTaskDialog(),
editTask: new EditTaskDialog(),
share: new ShareDialog(), | document.body.appendChild(dialogs[dialogName]);
}
BoardView.dialogs = dialogs;
}
/**
* Set the page title (with links to ancestors as well)
*/
private async setTitle(title: string): Promise<void> {
document.title = title + " - Kolan";
let html = BoardView.board.userAccess >= PermissionLevel.Edit
? `<a href="/">Boards</a> / `
: `<a href="/">Kolan</a> / `;
// Ancestors
if (BoardView.board.ancestors) {
const ancestors = BoardView.board.ancestors;
for (let i = 0; i < ancestors.length; i++) {
// If there are a lot of ancestors, hide the middle ones
if (ancestors.length >= 5 && i == 1)
{
html += `<span>...</span> / `
i = ancestors.length - 2;
continue;
}
const ancestor = ancestors[ancestors.length - i - 1]; // Do backwards for correct order
html += `<a href="./${ancestor.id}">${ancestor.name}</a> / `;
}
}
// Current board
html += `<span>${title}</span>`;
document.getElementById("title").insertAdjacentHTML("afterbegin", html);
}
/**
* Load the contents of the board from the backend.
*/
private async loadBoard(): Promise<void> {
try {
// Get board content
let board = await ApiRequester.boards.get(viewData.id);
BoardView.board = board;
// If the request returns nothing, the board hasn't been set up yet. Display the setup dialog.
if (!board.groups) {
const setupDialog = new SetupDialog();
document.body.appendChild(setupDialog);
setupDialog.shown = true;
setupDialog.addEventListener("submitDialog", () => {
this.loadBoard();
});
return;
}
// Set title on the client side, both on the board page and in the document title.
await this.setTitle(board.content.name);
const tasklists = document.getElementById("tasklists");
const listHead = document.getElementById("list-head");
tasklists.style.gridTemplateColumns = `repeat(${board.groups.length}, 1fr)`;
listHead.style.gridTemplateColumns = tasklists.style.gridTemplateColumns;
for (const group of board.groups) {
this.addGroup(group.groupNode as Group);
for (const task of group.tasks) {
this.addTask(group.groupNode.id, task);
}
}
if (BoardView.board.userAccess >= PermissionLevel.Edit) {
// Connect to SignalR
BoardView.boardHub.join(BoardView.board.content.id);
} else {
// Remove header icons
const headerIcons = document.querySelector("header .right");
headerIcons.parentNode.removeChild(headerIcons);
}
// Get collaborators
BoardView.collaborators = await ApiRequester.boards.getUsers(BoardView.board.content.id);
ToastController.new("Loaded board", ToastType.Info);
} catch (req) {
if (req.status == 404) this.setTitle("404 - Board does not exist");
else if (req.status == 401) this.setTitle("401 - Unauthorized. Are you logged in?");
else if (req.status > 400 && req.status < 499) this.setTitle(`${req.status} - Client error`);
else if (req.status > 500 && req.status < 599) this.setTitle(`${req.status} - Server error`);
console.log(req);
}
}
} | settings: new SettingsDialog()
}
for (const dialogName in dialogs) { |
main.py | from machine import Pin, Timer
def check_sensor(timer):
|
#GP4 - 5v output
gp4 = Pin(4,Pin.OUT)
gp4.value(1)
#GP1 - output for LED
gp1= Pin(1,Pin.OUT)
#GP5 - input from sensor
sensor = Pin(5,Pin.IN)
tim = Timer()
tim.init(freq=1.5, mode=Timer.PERIODIC, callback=check_sensor)
| global sensor
if sensor.value() == 1:
gp1.value(1)
else:
gp1.value(0) |
settings.py | '''
* File: settings.py
* Author: George Ungureanu <[email protected]>
* Purpose: This file contains methods for collecting configuration options
and initialize the settings object which holds the parameters
throughout the program execution.
* License: BSD3
'''
'''
Copyright (c) 2014, George Ungureanu
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
import __init__
import os
import re
import utils
import logging
## Model class for storing configuration parameters
#
# This class is a container for the configuration settins and
# provides methods to gather or parse from two main sources: the
# configuration file and the comman-line arguments
class | :
## Class constructor
# @param Settings $self
# The object pointer
# @param ArgumentParser $args
# The comman-line arguments
def __init__(self, args):
self.logger = logging.getLogger('f2dot.settings')
self.logger.debug('Configuring the runtime execution...')
self.runPath = os.path.dirname(os.path.abspath(__file__))
self.configFileName = args.mode + '.conf'
# if -g option chosen
if args.generate_config:
path = args.output
if not path:
path = os.getcwd()
self.createConfFile(path, force=True)
self.logger.info('Generated config file in ' + path)
os._exit(1)
# set paths & names
self.inPathAndFile = os.path.abspath(args.input)
self.inPath, self.inFile = os.path.split(self.inPathAndFile)
if args.output:
self.outPath = os.path.abspath(args.output)
else:
self.outPath = self.inPath
# resolve config file
if args.config:
self.confFile = os.path.abspath(args.config)
else:
self.confFile = self.createConfFile(self.inPath, force=False)
self.logger.info("Using the configuration in %s", self.confFile)
for line in open(self.confFile):
if line.strip().startswith("# works with : f2dot"):
confVer = line.strip().split("# works with : f2dot-",1)[1]
if not confVer == __init__.__version__:
self.logger.warn('The config file was created by another version '
+ 'of the tool. Errors may occur.')
self.settingDict = {}
self.constraintDict = {}
# loading default settings & constraints
for line in utils.getConfigInSection(os.path.join(self.runPath,'config','general.conf'), '[default settings]'):
tag, value = utils.strBeforeAfter(line,"=")
self.settingDict[tag] = value
for line in utils.getConfigInSection(os.path.join(self.runPath,'config',self.configFileName), '[default settings]'):
tag, value = utils.strBeforeAfter(line,"=")
self.settingDict[tag] = value
for line in utils.getConfigInSection(os.path.join(self.runPath,'config','general.conf'), '[setting constraints]'):
tag, value = utils.strBeforeAfter(line,"=")
self.constraintDict[tag] = value
for line in utils.getConfigInSection(os.path.join(self.runPath,'config',self.configFileName), '[setting constraints]'):
tag, value = utils.strBeforeAfter(line,"=")
self.constraintDict[tag] = value
# loading custom settings and comparing them against the constraints
for line in utils.getConfigInSection(self.confFile):
tag, value = utils.strBeforeAfter(line,"=")
if tag in self.constraintDict:
if self.constraintDict[tag]:
pattern=re.compile(self.constraintDict[tag])
if not pattern.match(value):
self.logger.warn("The value for %s (%s) does not match pattern %s. Choosing the default value: %s",
tag, value, self.constraintDict[tag], self.settingDict[tag])
continue
self.settingDict[tag] = value
if args.format:
self.settingDict['FORMAT'] = args.format
if args.prog:
self.settingDict['PROG'] = args.prog
self.outPathAndFile = os.path.join(self.outPath, utils.getFileName(self.inFile) + '.' + self.settingDict['FORMAT'])
self.logger.debug('Runtime configuration successful')
## Creates a config file in the specified path.
# @param str $path
# The directory where the configuration file should be
# @param bool $force
# \cTrue to overwrite existing configuration file
# @return A string with the absolute path to the config file
def createConfFile(self, path, force=False):
confFile=os.path.join(path, self.configFileName)
if (os.path.isfile(confFile)) and not force:
return confFile
with open(confFile,'w') as f:
header = '' +\
'# file : ' + self.configFileName + ' \n' +\
'# description : automatically generated configuration file\n' +\
'# usage : change the right-hand values as suggested \n' +\
'# works with : f2dot-' + __init__.__version__ + '\n' +\
'# ####################################################################\n'
f.write(header)
utils.copySection(os.path.join(self.runPath,'config','general.conf'), confFile, '[default settings]')
utils.copySection(os.path.join(self.runPath,'config',self.configFileName), confFile, '[default settings]')
return confFile
## Method to enable treating a Settings object as a dictionary.
# @param str $key
# the setting name, as defined in the .conf file
# @return The value of the config parameter with the name 'key'
def __getitem__(self, key):
return self.settingDict[key]
## Prints the current settings
# @param Settings $self The object pointer
def printSettings(self):
msg = 'The current settings are:\n' \
+ '\t* runPath : ' + self.runPath + '\n' \
+ '\t* inPathAndFile : ' + self.inPathAndFile + '\n' \
+ '\t* inPath : ' + self.inPath + '\n' \
+ '\t* inFile : ' + self.inFile + '\n' \
+ '\t* outPath : ' + self.outPath + '\n' \
+ '\t* outPathAndFile : ' + self.outPathAndFile + '\n' \
+ '\t* confFileName : ' + self.outPathAndFile + '\n' \
+ '\t* confFile : ' + self.configFileName + '\n'
for key, value in self.settingDict.iteritems():
msg = msg + '\t* ' + key + " : " + value + '\n'
return msg
## @var logger
# Logger (logging object)
## @var runPath
# The path where the runnable is located (str)
## @var inPathAndFile
# The full path to the input file (str)
## @var inFile
# Input file name (str)
## @var outPath
# Absolute path to the output directory (str)
## @var configFileName
# Name of the configuration file based on the parse mode (str)
## @var confFile
# Absolte path to the configuration file (str)
## @var outPathAndFile
# Absolute path to the output file (str)
## @var settingDict
# Dictionary containing all other settings (dict)
## @var constraintDict
# Dictionary containing lists with allowed values for the same keys in settingDict
| Settings |
util.rs | pub mod user {
use crate::AppState;
use actix_web::HttpRequest;
pub fn extract_token(req: &HttpRequest<AppState>) -> Option<String> {
let tok = req
.headers()
.get("AUTHORIZATION")
.map(|value| value.to_str().ok());
let token = if let Some(t) = tok {
t.unwrap()
.split(" ")
.map(|x| x.to_string())
.collect::<Vec<String>>()
.pop()
.unwrap()
.into()
} else {
None
};
token
}
}
pub mod idea {
use ideadog::Pagination;
pub fn | (offset: Option<u32>, count: Option<u32>) -> Option<Pagination> {
let page = if offset.is_some() && count.is_some() {
Pagination {
count: count.unwrap(),
offset: offset.unwrap(),
}
.into()
} else {
None
};
page
}
}
| paginate |
challenge-3.go | package main
import (
"fmt"
)
func main() {
c := gen()
receive(c)
fmt.Println("about to exit")
}
func receive(c <- chan int) {
for v := range c {
fmt.Println(v)
}
} |
func gen() <-chan int {
c := make(chan int)
go func() {
for i := 0; i < 100; i++ {
c <- i
}
close(c)
}()
return c
} | |
replica_key.go | package kms
import (
"fmt"
"log"
"strings"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/arn"
"github.com/aws/aws-sdk-go/service/kms"
"github.com/hashicorp/aws-sdk-go-base/v2/awsv1shim/v2/tfawserr"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation"
"github.com/hashicorp/terraform-provider-aws/internal/conns"
tftags "github.com/hashicorp/terraform-provider-aws/internal/tags"
"github.com/hashicorp/terraform-provider-aws/internal/tfresource"
"github.com/hashicorp/terraform-provider-aws/internal/verify"
)
func ResourceReplicaKey() *schema.Resource {
return &schema.Resource{
Create: resourceReplicaKeyCreate,
Read: resourceReplicaKeyRead,
Update: resourceReplicaKeyUpdate, |
Importer: &schema.ResourceImporter{
State: schema.ImportStatePassthrough,
},
CustomizeDiff: verify.SetTagsDiff,
Schema: map[string]*schema.Schema{
"arn": {
Type: schema.TypeString,
Computed: true,
},
"bypass_policy_lockout_safety_check": {
Type: schema.TypeBool,
Optional: true,
Default: false,
},
"deletion_window_in_days": {
Type: schema.TypeInt,
Optional: true,
Default: 30,
ValidateFunc: validation.IntBetween(7, 30),
},
"description": {
Type: schema.TypeString,
Optional: true,
ValidateFunc: validation.StringLenBetween(0, 8192),
},
"enabled": {
Type: schema.TypeBool,
Optional: true,
Default: true,
},
"key_id": {
Type: schema.TypeString,
Computed: true,
},
"key_rotation_enabled": {
Type: schema.TypeBool,
Computed: true,
},
"key_spec": {
Type: schema.TypeString,
Computed: true,
},
"key_usage": {
Type: schema.TypeString,
Computed: true,
},
"policy": {
Type: schema.TypeString,
Optional: true,
Computed: true,
DiffSuppressFunc: verify.SuppressEquivalentPolicyDiffs,
ValidateFunc: validation.StringIsJSON,
},
"primary_key_arn": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
ValidateFunc: verify.ValidARN,
},
"tags": tftags.TagsSchema(),
"tags_all": tftags.TagsSchemaComputed(),
},
}
}
func resourceReplicaKeyCreate(d *schema.ResourceData, meta interface{}) error {
conn := meta.(*conns.AWSClient).KMSConn
defaultTagsConfig := meta.(*conns.AWSClient).DefaultTagsConfig
tags := defaultTagsConfig.MergeTags(tftags.New(d.Get("tags").(map[string]interface{})))
// e.g. arn:aws:kms:us-east-2:111122223333:key/mrk-1234abcd12ab34cd56ef1234567890ab
primaryKeyARN, err := arn.Parse(d.Get("primary_key_arn").(string))
if err != nil {
return fmt.Errorf("error parsing primary key ARN: %w", err)
}
input := &kms.ReplicateKeyInput{
KeyId: aws.String(strings.TrimPrefix(primaryKeyARN.Resource, "key/")),
ReplicaRegion: aws.String(meta.(*conns.AWSClient).Region),
}
if v, ok := d.GetOk("bypass_policy_lockout_safety_check"); ok {
input.BypassPolicyLockoutSafetyCheck = aws.Bool(v.(bool))
}
if v, ok := d.GetOk("description"); ok {
input.Description = aws.String(v.(string))
}
if v, ok := d.GetOk("policy"); ok {
input.Policy = aws.String(v.(string))
}
if len(tags) > 0 {
input.Tags = Tags(tags.IgnoreAWS())
}
// Replication is initiated in the primary key's region.
session, err := conns.NewSessionForRegion(&conn.Config, primaryKeyARN.Region, meta.(*conns.AWSClient).TerraformVersion)
if err != nil {
return fmt.Errorf("error creating AWS session: %w", err)
}
replicateConn := kms.New(session)
log.Printf("[DEBUG] Creating KMS Replica Key: %s", input)
outputRaw, err := WaitIAMPropagation(func() (interface{}, error) {
return replicateConn.ReplicateKey(input)
})
if err != nil {
return fmt.Errorf("error creating KMS Replica Key: %w", err)
}
d.SetId(aws.StringValue(outputRaw.(*kms.ReplicateKeyOutput).ReplicaKeyMetadata.KeyId))
if _, err := WaitReplicaKeyCreated(conn, d.Id()); err != nil {
return fmt.Errorf("error waiting for KMS Replica Key (%s) create: %w", d.Id(), err)
}
d.Set("key_id", d.Id())
if enabled := d.Get("enabled").(bool); !enabled {
if err := updateKmsKeyEnabled(conn, d.Id(), enabled); err != nil {
return err
}
}
// Wait for propagation since KMS is eventually consistent.
if v, ok := d.GetOk("policy"); ok {
if err := WaitKeyPolicyPropagated(conn, d.Id(), v.(string)); err != nil {
return fmt.Errorf("error waiting for KMS Replica Key (%s) policy propagation: %w", d.Id(), err)
}
}
if len(tags) > 0 {
if err := WaitTagsPropagated(conn, d.Id(), tags); err != nil {
return fmt.Errorf("error waiting for KMS Replica Key (%s) tag propagation: %w", d.Id(), err)
}
}
return resourceReplicaKeyRead(d, meta)
}
func resourceReplicaKeyRead(d *schema.ResourceData, meta interface{}) error {
conn := meta.(*conns.AWSClient).KMSConn
defaultTagsConfig := meta.(*conns.AWSClient).DefaultTagsConfig
ignoreTagsConfig := meta.(*conns.AWSClient).IgnoreTagsConfig
key, err := findKmsKey(conn, d.Id(), d.IsNewResource())
if !d.IsNewResource() && tfresource.NotFound(err) {
log.Printf("[WARN] KMS Replica Key (%s) not found, removing from state", d.Id())
d.SetId("")
return nil
}
if err != nil {
return err
}
if keyManager := aws.StringValue(key.metadata.KeyManager); keyManager != kms.KeyManagerTypeCustomer {
return fmt.Errorf("KMS Key (%s) has invalid KeyManager: %s", d.Id(), keyManager)
}
if origin := aws.StringValue(key.metadata.Origin); origin != kms.OriginTypeAwsKms {
return fmt.Errorf("KMS Key (%s) has invalid Origin: %s", d.Id(), origin)
}
if !aws.BoolValue(key.metadata.MultiRegion) ||
aws.StringValue(key.metadata.MultiRegionConfiguration.MultiRegionKeyType) != kms.MultiRegionKeyTypeReplica {
return fmt.Errorf("KMS Key (%s) is not a multi-Region replica key", d.Id())
}
d.Set("arn", key.metadata.Arn)
d.Set("description", key.metadata.Description)
d.Set("enabled", key.metadata.Enabled)
d.Set("key_id", key.metadata.KeyId)
d.Set("key_rotation_enabled", key.rotation)
d.Set("key_spec", key.metadata.KeySpec)
d.Set("key_usage", key.metadata.KeyUsage)
policyToSet, err := verify.SecondJSONUnlessEquivalent(d.Get("policy").(string), key.policy)
if err != nil {
return fmt.Errorf("while setting policy (%s), encountered: %w", key.policy, err)
}
d.Set("policy", policyToSet)
d.Set("primary_key_arn", key.metadata.MultiRegionConfiguration.PrimaryKey.Arn)
tags := key.tags.IgnoreAWS().IgnoreConfig(ignoreTagsConfig)
//lintignore:AWSR002
if err := d.Set("tags", tags.RemoveDefaultConfig(defaultTagsConfig).Map()); err != nil {
return fmt.Errorf("error setting tags: %w", err)
}
if err := d.Set("tags_all", tags.Map()); err != nil {
return fmt.Errorf("error setting tags_all: %w", err)
}
return nil
}
func resourceReplicaKeyUpdate(d *schema.ResourceData, meta interface{}) error {
conn := meta.(*conns.AWSClient).KMSConn
if hasChange, enabled := d.HasChange("enabled"), d.Get("enabled").(bool); hasChange && enabled {
// Enable before any attributes are modified.
if err := updateKmsKeyEnabled(conn, d.Id(), enabled); err != nil {
return err
}
}
if d.HasChange("description") {
if err := updateKmsKeyDescription(conn, d.Id(), d.Get("description").(string)); err != nil {
return err
}
}
if d.HasChange("policy") {
if err := updateKmsKeyPolicy(conn, d.Id(), d.Get("policy").(string), d.Get("bypass_policy_lockout_safety_check").(bool)); err != nil {
return err
}
}
if hasChange, enabled := d.HasChange("enabled"), d.Get("enabled").(bool); hasChange && !enabled {
// Only disable after all attributes have been modified because we cannot modify disabled keys.
if err := updateKmsKeyEnabled(conn, d.Id(), enabled); err != nil {
return err
}
}
if d.HasChange("tags_all") {
o, n := d.GetChange("tags_all")
if err := UpdateTags(conn, d.Id(), o, n); err != nil {
return fmt.Errorf("error updating KMS Replica Key (%s) tags: %w", d.Id(), err)
}
if err := WaitTagsPropagated(conn, d.Id(), tftags.New(n)); err != nil {
return fmt.Errorf("error waiting for KMS Replica Key (%s) tag propagation: %w", d.Id(), err)
}
}
return resourceReplicaKeyRead(d, meta)
}
func resourceReplicaKeyDelete(d *schema.ResourceData, meta interface{}) error {
conn := meta.(*conns.AWSClient).KMSConn
input := &kms.ScheduleKeyDeletionInput{
KeyId: aws.String(d.Id()),
}
if v, ok := d.GetOk("deletion_window_in_days"); ok {
input.PendingWindowInDays = aws.Int64(int64(v.(int)))
}
log.Printf("[DEBUG] Deleting KMS Replica Key: (%s)", d.Id())
_, err := conn.ScheduleKeyDeletion(input)
if tfawserr.ErrCodeEquals(err, kms.ErrCodeNotFoundException) {
return nil
}
if tfawserr.ErrMessageContains(err, kms.ErrCodeInvalidStateException, "is pending deletion") {
return nil
}
if err != nil {
return fmt.Errorf("error deleting KMS Replica Key (%s): %w", d.Id(), err)
}
if _, err := WaitKeyDeleted(conn, d.Id()); err != nil {
return fmt.Errorf("error waiting for KMS Replica Key (%s) delete: %w", d.Id(), err)
}
return nil
} | Delete: resourceReplicaKeyDelete, |
test.go | package main
import (
"encoding/json"
"fmt"
"log"
"math/rand"
"net/http"
"time"
)
const (
server = `http://localhost:8080/numbers`
endpointPrimes = `http://localhost:8079/primes`
endpointRand = `http://localhost:8079/rand`
endpointFibo = `http://localhost:8079/fibo`
endpointOdd = `http://localhost:8079/odd`
timeout = 0.5
)
type data struct {
Numbers []int `json:"numbers"`
}
func main() | {
rand.Seed(time.Now().UnixNano())
endpoints := []string{endpointPrimes, endpointFibo, endpointRand, endpointOdd}
numQueries := 10
for i := 0; i < numQueries; i++ {
queryURL := fmt.Sprintf("%s?", server)
numEndpoints := rand.Intn(1000)
for j := 0; j < numEndpoints; j++ {
endpoint := rand.Intn(4)
queryURL = fmt.Sprintf("%su=%s&", queryURL, endpoints[endpoint])
}
//request start time
//here latency and request construction
//are negligible
start := time.Now()
res, err := http.Get(queryURL)
if err != nil {
log.Fatalf("e2e error: %v", err)
}
defer res.Body.Close()
if res.StatusCode != 200 {
log.Fatalf("e2e error. Status code: %d", res.StatusCode)
}
var expectedData data
err = json.NewDecoder(res.Body).Decode(&expectedData)
if err != nil {
log.Fatalf("e2e error: %v", err)
}
elapsed := time.Since(start)
if elapsed.Seconds() >= timeout {
log.Fatalf("e2e test 500ms exceeded!")
}
log.Printf("E2E test successfully passed!")
log.Printf("Number of endpoints: %d", numEndpoints)
log.Printf("Elapsed time: %v", elapsed)
log.Printf("___________")
}
} |
|
test_reindex.py | from __future__ import absolute_import, division, print_function
import os
import procrunner
import pytest
import six
from cctbx import sgtbx
from dxtbx.serialize import load
from six.moves import cPickle as pickle
def pickle_loads(data):
if six.PY3:
return pickle.loads(data, encoding="bytes")
else:
return pickle.loads(data)
def test_reindex(dials_regression, tmpdir):
data_dir = os.path.join(dials_regression, "indexing_test_data", "i04_weak_data")
pickle_path = os.path.join(data_dir, "indexed.pickle")
experiments_path = os.path.join(data_dir, "experiments.json")
commands = [
"dials.reindex",
pickle_path,
experiments_path,
"change_of_basis_op=2a,b,c",
"space_group=P1",
]
result = procrunner.run(commands, working_directory=tmpdir)
assert not result.returncode and not result.stderr
old_reflections = pickle_loads(open(pickle_path, "rb").read())
assert tmpdir.join("reindexed.refl").check()
new_reflections = pickle_loads(tmpdir.join("reindexed.refl").read("rb"))
old_experiments = load.experiment_list(experiments_path, check_format=False)
assert tmpdir.join("reindexed.expt").check()
new_experiments = load.experiment_list(
tmpdir.join("reindexed.expt").strpath, check_format=False
)
h1, k1, l1 = old_reflections["miller_index"].as_vec3_double().parts()
h2, k2, l2 = new_reflections["miller_index"].as_vec3_double().parts()
assert 2 * h1 == pytest.approx(h2)
assert k1 == pytest.approx(k2)
assert l1 == pytest.approx(l2)
old_uc_params = old_experiments[0].crystal.get_unit_cell().parameters()
new_uc_params = new_experiments[0].crystal.get_unit_cell().parameters()
assert new_uc_params[0] == pytest.approx(2 * old_uc_params[0])
assert new_uc_params[1:] == pytest.approx(old_uc_params[1:])
assert old_experiments[0].crystal.get_space_group().type().hall_symbol() == " P 1"
assert new_experiments[0].crystal.get_space_group().type().hall_symbol() == " P 1"
# set space group P4
cb_op = sgtbx.change_of_basis_op("a,b,c")
commands = [
"dials.reindex",
experiments_path,
"space_group=P4",
"change_of_basis_op=%s" % str(cb_op),
"output.experiments=P4.expt",
]
result = procrunner.run(commands, working_directory=tmpdir)
assert not result.returncode and not result.stderr
assert tmpdir.join("P4.expt").check()
# apply one of the symops from the space group
cb_op = sgtbx.change_of_basis_op("-x,-y,z")
commands = [
"dials.reindex",
"P4.expt",
"change_of_basis_op=%s" % str(cb_op),
"output.experiments=P4_reindexed.expt",
]
result = procrunner.run(commands, working_directory=tmpdir)
assert not result.returncode and not result.stderr
assert tmpdir.join("P4_reindexed.expt").check()
new_experiments1 = load.experiment_list(
tmpdir.join("P4_reindexed.expt").strpath, check_format=False
)
assert new_experiments1[0].crystal.get_A() == pytest.approx(
old_experiments[0].crystal.change_basis(cb_op).get_A()
)
cb_op = sgtbx.change_of_basis_op("-x,-y,z")
commands = [
"dials.reindex",
"P4.expt",
"change_of_basis_op=auto",
"reference.experiments=P4_reindexed.expt",
"output.experiments=P4_reindexed2.expt",
]
result = procrunner.run(commands, working_directory=tmpdir)
assert not result.returncode and not result.stderr
new_experiments2 = load.experiment_list(
tmpdir.join("P4_reindexed2.expt").strpath, check_format=False
)
assert new_experiments1[0].crystal.get_A() == pytest.approx(
new_experiments2[0].crystal.get_A()
)
def test_reindex_multi_sequence(dials_regression, tmpdir):
|
def test_reindex_against_reference(dials_regression, tmpdir):
"""Test the reindexing against a reference dataset functionality."""
tmpdir.chdir()
data_dir = os.path.join(dials_regression, "indexing_test_data", "i04_weak_data")
pickle_path = os.path.join(data_dir, "indexed.pickle")
experiments_path = os.path.join(data_dir, "experiments.json")
commands = [
"dials.reindex",
pickle_path,
experiments_path,
"change_of_basis_op=a,b,c",
"space_group=P4",
"output.reflections=P4.refl",
"output.experiments=P4.expt",
]
result = procrunner.run(commands, working_directory=tmpdir)
assert not result.returncode and not result.stderr
assert os.path.exists("P4.refl")
assert os.path.exists("P4.expt")
new_experiments = load.experiment_list("P4.expt", check_format=False)
assert new_experiments[0].crystal.get_space_group().type().hall_symbol() == " P 4"
# Now have something in P4, get another dataset in a different indexing scheme
cb_op = sgtbx.change_of_basis_op("a,-b,-c")
commands = [
"dials.reindex",
"P4.refl",
"P4.expt",
"change_of_basis_op=%s" % str(cb_op),
"output.experiments=P4_reindexed.expt",
"output.reflections=P4_reindexed.refl",
]
result = procrunner.run(commands, working_directory=tmpdir)
assert not result.returncode and not result.stderr
# now run reference reindexing
commands = [
"dials.reindex",
"P4.refl",
"P4.expt",
"reference.experiments=P4_reindexed.expt",
"reference.reflections=P4_reindexed.refl",
]
result = procrunner.run(commands, working_directory=tmpdir)
assert not result.returncode and not result.stderr
# expect reindexed_reflections to be same as P4_reindexed, not P4_reflections
reindexed_reflections = pickle_loads(tmpdir.join("reindexed.refl").read("rb"))
P4_reindexed = pickle_loads(tmpdir.join("P4_reindexed.refl").read("rb"))
P4_reflections = pickle_loads(tmpdir.join("P4.refl").read("rb"))
h1, k1, l1 = reindexed_reflections["miller_index"].as_vec3_double().parts()
h2, k2, l2 = P4_reindexed["miller_index"].as_vec3_double().parts()
h3, k3, l3 = P4_reflections["miller_index"].as_vec3_double().parts()
# hkl1 and hkl2 should be same, as should have been reindexed by against the
# reference, with the program determing a reindexing operator of a,-b,-c
assert list(h1) == pytest.approx(list(h2))
assert list(l1) == pytest.approx(list(l2))
assert list(k1) == pytest.approx(list(k2))
# h1 and h3 should be same, but not l and k, as these dataset should differ
# by an a twinning operator of a,-b,-c
assert list(h1) == pytest.approx(list(h3))
assert list(l1) != pytest.approx(list(l3))
assert list(k1) != pytest.approx(list(k3))
| data_dir = os.path.join(dials_regression, "indexing_test_data", "multi_sweep")
pickle_path = os.path.join(data_dir, "indexed.pickle")
experiments_path = os.path.join(data_dir, "experiments.json")
commands = [
"dials.reindex",
pickle_path,
experiments_path,
"change_of_basis_op=x+y,x-z,y-z",
]
result = procrunner.run(commands, working_directory=tmpdir)
assert not result.returncode and not result.stderr
assert tmpdir.join("reindexed.refl").check()
assert tmpdir.join("reindexed.expt").check()
old_reflections = pickle_loads(open(pickle_path, "rb").read())
new_reflections = pickle_loads(tmpdir.join("reindexed.refl").read("rb"))
assert len(old_reflections) == len(new_reflections)
new_experiments = load.experiment_list(
tmpdir.join("reindexed.expt").strpath, check_format=False
)
new_cs = new_experiments[0].crystal.get_crystal_symmetry()
assert new_cs.unit_cell().parameters() == pytest.approx(
(
6.189939294071243,
6.189939294071243,
6.189939294071242,
113.16417286469935,
107.65690626466579,
107.65690626466579,
)
)
assert (
new_experiments[0].crystal.get_space_group().type().hall_symbol()
== " I 4 (x+y,y+z,x+z)"
) |
majordomo_time.js | /**
* @license
* Visual Blocks Editor
*
* Copyright 2012 Google Inc.
* https://blockly.googlecode.com/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @fileoverview Variable blocks for Blockly.
* @author [email protected] (Neil Fraser)
*/
'use strict';
|
goog.require('Blockly.Blocks');
Blockly.Blocks['majordomo_timeis'] = {
/**
* Block for null data type.
* @this Blockly.Block
*/
init: function() {
// Assign 'this' to a variable for use in the closure below.
var thisBlock = this;
this.setColour(220);
this.appendDummyInput()
.appendField(Blockly.Msg.MAJORDOMO_TIMEIS);
this.appendValueInput("TIME")
.setCheck("String");
this.setInputsInline(true);
this.setOutput(true);
this.setTooltip('');
}
};
Blockly.Blocks['majordomo_timenow'] = {
/**
* Block for null data type.
* @this Blockly.Block
*/
init: function() {
// Assign 'this' to a variable for use in the closure below.
var thisBlock = this;
this.setColour(220);
this.appendDummyInput()
.appendField(Blockly.Msg.MAJORDOMO_TIMENOW);
this.setOutput(true);
this.setTooltip('');
}
};
Blockly.Blocks['majordomo_isweekend'] = {
/**
* Block for null data type.
* @this Blockly.Block
*/
init: function() {
// Assign 'this' to a variable for use in the closure below.
var thisBlock = this;
this.setColour(220);
this.appendDummyInput()
.appendField(Blockly.Msg.MAJORDOMO_ISWEEKEND);
this.setOutput(true);
this.setTooltip('');
}
};
Blockly.Blocks['majordomo_isworkday'] = {
/**
* Block for null data type.
* @this Blockly.Block
*/
init: function() {
// Assign 'this' to a variable for use in the closure below.
var thisBlock = this;
this.setColour(220);
this.appendDummyInput()
.appendField(Blockly.Msg.MAJORDOMO_ISWORKDAY);
this.setOutput(true);
this.setTooltip('');
}
};
Blockly.Blocks['majordomo_timebefore'] = {
/**
* Block for null data type.
* @this Blockly.Block
*/
init: function() {
// Assign 'this' to a variable for use in the closure below.
var thisBlock = this;
this.setColour(220);
this.appendDummyInput()
.appendField(Blockly.Msg.MAJORDOMO_TIMEBEFORE);
this.appendValueInput("TIME")
.setCheck("String");
this.setInputsInline(true);
this.setOutput(true);
this.setTooltip('');
}
};
Blockly.Blocks['majordomo_timeafter'] = {
/**
* Block for null data type.
* @this Blockly.Block
*/
init: function() {
// Assign 'this' to a variable for use in the closure below.
var thisBlock = this;
this.setColour(220);
this.appendDummyInput()
.appendField(Blockly.Msg.MAJORDOMO_TIMEAFTER);
this.appendValueInput("TIME")
.setCheck("String");
this.setInputsInline(true);
this.setOutput(true);
this.setTooltip('');
}
};
Blockly.Blocks['majordomo_timebetween'] = {
/**
* Block for null data type.
* @this Blockly.Block
*/
init: function() {
// Assign 'this' to a variable for use in the closure below.
var thisBlock = this;
this.setColour(220);
this.appendDummyInput()
.appendField(Blockly.Msg.MAJORDOMO_TIMEBETWEEN);
this.appendValueInput("TIME1")
.setCheck("String");
this.appendDummyInput()
.appendField(' - ');
this.appendValueInput("TIME2")
.setCheck("String");
this.setInputsInline(true);
this.setOutput(true);
this.setTooltip('');
}
};
Blockly.Blocks['majordomo_cleartimeout'] = {
/**
* Block for null data type.
* @this Blockly.Block
*/
init: function() {
// Assign 'this' to a variable for use in the closure below.
var thisBlock = this;
this.setColour(220);
this.appendDummyInput()
.appendField(Blockly.Msg.MAJORDOMO_CLEARTIMEOUT);
this.appendValueInput("TIMER")
.setCheck("String");
this.setInputsInline(true);
this.setOutput(false);
this.setPreviousStatement(true);
this.setNextStatement(true);
this.setTooltip('');
}
};
Blockly.Blocks['majordomo_settimeout'] = {
/**
* Block for null data type.
* @this Blockly.Block
*/
init: function() {
// Assign 'this' to a variable for use in the closure below.
var thisBlock = this;
this.setColour(220);
this.appendDummyInput()
.appendField(Blockly.Msg.MAJORDOMO_SETTIMEOUT);
this.appendValueInput("TIMER")
.setCheck("String");
this.appendDummyInput()
.appendField(Blockly.Msg.MAJORDOMO_SETTIMEOUTDELAY);
this.appendValueInput("DELAY")
.setCheck("String");
this.appendStatementInput('DO')
.appendField(Blockly.Msg.MAJORDOMO_SETTIMEOUTOPERATIONS);
this.setInputsInline(true);
this.setOutput(false);
this.setPreviousStatement(true);
this.setNextStatement(true);
this.setTooltip('');
}
};
//setTimeOut
//clearTimeOut | goog.provide('Blockly.Blocks.majordomo_time'); |
Witness.py | # automatically generated by the FlatBuffers compiler, do not modify
# namespace: zkinterface
import flatbuffers
from flatbuffers.compat import import_numpy
np = import_numpy()
# Witness represents an assignment of values to variables.
#
# - Does not include variables already given in `CircuitHeader.instance_variables`.
# - Does not include the constant one variable.
# - Multiple such messages are equivalent to the concatenation of `Variables` arrays.
class Witness(object):
__slots__ = ['_tab']
@classmethod
def GetRootAs(cls, buf, offset=0):
n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
x = Witness()
x.Init(buf, n + offset)
return x
@classmethod
def GetRootAsWitness(cls, buf, offset=0):
"""This method is deprecated. Please switch to GetRootAs.""" | return cls.GetRootAs(buf, offset)
@classmethod
def WitnessBufferHasIdentifier(cls, buf, offset, size_prefixed=False):
return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x7A\x6B\x69\x66", size_prefixed=size_prefixed)
# Witness
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
# Witness
def AssignedVariables(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
x = self._tab.Indirect(o + self._tab.Pos)
from zkinterface.Variables import Variables
obj = Variables()
obj.Init(self._tab.Bytes, x)
return obj
return None
def Start(builder): builder.StartObject(1)
def WitnessStart(builder):
"""This method is deprecated. Please switch to Start."""
return Start(builder)
def AddAssignedVariables(builder, assignedVariables): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(assignedVariables), 0)
def WitnessAddAssignedVariables(builder, assignedVariables):
"""This method is deprecated. Please switch to AddAssignedVariables."""
return AddAssignedVariables(builder, assignedVariables)
def End(builder): return builder.EndObject()
def WitnessEnd(builder):
"""This method is deprecated. Please switch to End."""
return End(builder) | |
registers.go | package proc
import (
"bytes"
"encoding/binary"
"errors"
"fmt"
"math"
"os"
"strings"
)
// Registers is an interface for a generic register type. The
// interface encapsulates the generic values / actions
// we need independent of arch. The concrete register types
// will be different depending on OS/Arch.
type Registers interface {
PC() uint64
SP() uint64
BP() uint64
CX() uint64
TLS() uint64
// GAddr returns the address of the G variable if it is known, 0 and false otherwise
GAddr() (uint64, bool)
Get(int) (uint64, error)
SetPC(Thread, uint64) error
Slice() []Register
}
type Register struct {
Name string
Value string
}
// AppendWordReg appends a word (16 bit) register to regs.
func AppendWordReg(regs []Register, name string, value uint16) []Register {
return append(regs, Register{name, fmt.Sprintf("%#04x", value)})
}
// AppendDwordReg appends a double word (32 bit) register to regs.
func AppendDwordReg(regs []Register, name string, value uint32) []Register {
return append(regs, Register{name, fmt.Sprintf("%#08x", value)})
}
// AppendQwordReg appends a quad word (64 bit) register to regs.
func AppendQwordReg(regs []Register, name string, value uint64) []Register {
return append(regs, Register{name, fmt.Sprintf("%#016x", value)})
}
func appendFlagReg(regs []Register, name string, value uint64, descr flagRegisterDescr, size int) []Register {
return append(regs, Register{name, descr.Describe(value, size)})
}
// AppendEflagReg appends EFLAG register to regs.
func AppendEflagReg(regs []Register, name string, value uint64) []Register {
return appendFlagReg(regs, name, value, eflagsDescription, 64)
}
// AppendMxcsrReg appends MXCSR register to regs.
func AppendMxcsrReg(regs []Register, name string, value uint64) []Register {
return appendFlagReg(regs, name, value, mxcsrDescription, 32)
}
// AppendX87Reg appends a 80 bit float register to regs.
func AppendX87Reg(regs []Register, index int, exponent uint16, mantissa uint64) []Register {
var f float64
fset := false
const (
_SIGNBIT = 1 << 15
_EXP_BIAS = (1 << 14) - 1 // 2^(n-1) - 1 = 16383
_SPECIALEXP = (1 << 15) - 1 // all bits set
_HIGHBIT = 1 << 63
_QUIETBIT = 1 << 62
)
sign := 1.0
if exponent&_SIGNBIT != 0 {
sign = -1.0
}
exponent &= ^uint16(_SIGNBIT)
NaN := math.NaN()
Inf := math.Inf(+1)
switch exponent {
case 0:
switch {
case mantissa == 0:
f = sign * 0.0
fset = true
case mantissa&_HIGHBIT != 0:
f = NaN
fset = true
}
case _SPECIALEXP:
switch {
case mantissa&_HIGHBIT == 0:
f = sign * Inf
fset = true
default:
f = NaN // signaling NaN
fset = true
}
default:
if mantissa&_HIGHBIT == 0 {
f = NaN
fset = true
}
}
if !fset {
significand := float64(mantissa) / (1 << 63)
f = sign * math.Ldexp(significand, int(exponent-_EXP_BIAS))
}
return append(regs, Register{fmt.Sprintf("ST(%d)", index), fmt.Sprintf("%#04x%016x\t%g", exponent, mantissa, f)})
}
// AppendSSEReg appends a 256 bit SSE register to regs.
func AppendSSEReg(regs []Register, name string, xmm []byte) []Register {
buf := bytes.NewReader(xmm)
var out bytes.Buffer
var vi [16]uint8
for i := range vi {
binary.Read(buf, binary.LittleEndian, &vi[i])
}
fmt.Fprintf(&out, "0x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x%02x", vi[15], vi[14], vi[13], vi[12], vi[11], vi[10], vi[9], vi[8], vi[7], vi[6], vi[5], vi[4], vi[3], vi[2], vi[1], vi[0])
fmt.Fprintf(&out, "\tv2_int={ %02x%02x%02x%02x%02x%02x%02x%02x %02x%02x%02x%02x%02x%02x%02x%02x }", vi[7], vi[6], vi[5], vi[4], vi[3], vi[2], vi[1], vi[0], vi[15], vi[14], vi[13], vi[12], vi[11], vi[10], vi[9], vi[8])
fmt.Fprintf(&out, "\tv4_int={ %02x%02x%02x%02x %02x%02x%02x%02x %02x%02x%02x%02x %02x%02x%02x%02x }", vi[3], vi[2], vi[1], vi[0], vi[7], vi[6], vi[5], vi[4], vi[11], vi[10], vi[9], vi[8], vi[15], vi[14], vi[13], vi[12])
fmt.Fprintf(&out, "\tv8_int={ %02x%02x %02x%02x %02x%02x %02x%02x %02x%02x %02x%02x %02x%02x %02x%02x }", vi[1], vi[0], vi[3], vi[2], vi[5], vi[4], vi[7], vi[6], vi[9], vi[8], vi[11], vi[10], vi[13], vi[12], vi[15], vi[14])
fmt.Fprintf(&out, "\tv16_int={ %02x %02x %02x %02x %02x %02x %02x %02x %02x %02x %02x %02x %02x %02x %02x %02x }", vi[0], vi[1], vi[2], vi[3], vi[4], vi[5], vi[6], vi[7], vi[8], vi[9], vi[10], vi[11], vi[12], vi[13], vi[14], vi[15])
buf.Seek(0, os.SEEK_SET)
var v2 [2]float64
for i := range v2 {
binary.Read(buf, binary.LittleEndian, &v2[i])
}
fmt.Fprintf(&out, "\tv2_float={ %g %g }", v2[0], v2[1])
buf.Seek(0, os.SEEK_SET)
var v4 [4]float32
for i := range v4 {
binary.Read(buf, binary.LittleEndian, &v4[i])
}
fmt.Fprintf(&out, "\tv4_float={ %g %g %g %g }", v4[0], v4[1], v4[2], v4[3])
return append(regs, Register{name, out.String()})
}
var UnknownRegisterError = errors.New("unknown register")
type flagRegisterDescr []flagDescr
type flagDescr struct {
name string
mask uint64
}
var mxcsrDescription flagRegisterDescr = []flagDescr{
{"FZ", 1 << 15},
{"RZ/RN", 1<<14 | 1<<13},
{"PM", 1 << 12},
{"UM", 1 << 11},
{"OM", 1 << 10},
{"ZM", 1 << 9},
{"DM", 1 << 8},
{"IM", 1 << 7},
{"DAZ", 1 << 6},
{"PE", 1 << 5},
{"UE", 1 << 4},
{"OE", 1 << 3},
{"ZE", 1 << 2},
{"DE", 1 << 1},
{"IE", 1 << 0},
}
var eflagsDescription flagRegisterDescr = []flagDescr{
{"CF", 1 << 0},
{"", 1 << 1},
{"PF", 1 << 2},
{"AF", 1 << 4},
{"ZF", 1 << 6},
{"SF", 1 << 7},
{"TF", 1 << 8},
{"IF", 1 << 9},
{"DF", 1 << 10},
{"OF", 1 << 11},
{"IOPL", 1<<12 | 1<<13},
{"NT", 1 << 14},
{"RF", 1 << 16},
{"VM", 1 << 17},
{"AC", 1 << 18},
{"VIF", 1 << 19},
{"VIP", 1 << 20},
{"ID", 1 << 21},
}
func (descr flagRegisterDescr) Mask() uint64 {
var r uint64
for _, f := range descr {
r = r | f.mask
}
return r
}
func (descr flagRegisterDescr) Describe(reg uint64, bitsize int) string {
var r []string
for _, f := range descr {
if f.name == "" {
continue
}
// rbm is f.mask with only the right-most bit set:
// 0001 1100 -> 0000 0100
rbm := f.mask & -f.mask
if rbm == f.mask {
if reg&f.mask != 0 {
r = append(r, f.name)
}
} else {
x := (reg & f.mask) >> uint64(math.Log2(float64(rbm)))
r = append(r, fmt.Sprintf("%s=%x", f.name, x))
}
}
if reg & ^descr.Mask() != 0 {
r = append(r, fmt.Sprintf("unknown_flags=%x", reg&^descr.Mask()))
}
return fmt.Sprintf("%#0*x\t[%s]", bitsize/4, reg, strings.Join(r, " "))
}
// tracks user_fpregs_struct in /usr/include/x86_64-linux-gnu/sys/user.h
type PtraceFpRegs struct {
Cwd uint16
Swd uint16
Ftw uint16
Fop uint16
Rip uint64
Rdp uint64
Mxcsr uint32
MxcrMask uint32
StSpace [32]uint32
XmmSpace [256]byte
Padding [24]uint32
}
// LinuxX86Xstate represents amd64 XSAVE area. See Section 13.1 (and
// following) of Intel® 64 and IA-32 Architectures Software Developer’s
// Manual, Volume 1: Basic Architecture.
type LinuxX86Xstate struct {
PtraceFpRegs
AvxState bool // contains AVX state
YmmSpace [256]byte
}
// Decode decodes an XSAVE area to a list of name/value pairs of registers. | // x87 registers
regs = AppendWordReg(regs, "CW", xsave.Cwd)
regs = AppendWordReg(regs, "SW", xsave.Swd)
regs = AppendWordReg(regs, "TW", xsave.Ftw)
regs = AppendWordReg(regs, "FOP", xsave.Fop)
regs = AppendQwordReg(regs, "FIP", xsave.Rip)
regs = AppendQwordReg(regs, "FDP", xsave.Rdp)
for i := 0; i < len(xsave.StSpace); i += 4 {
regs = AppendX87Reg(regs, i/4, uint16(xsave.StSpace[i+2]), uint64(xsave.StSpace[i+1])<<32|uint64(xsave.StSpace[i]))
}
// SSE registers
regs = AppendMxcsrReg(regs, "MXCSR", uint64(xsave.Mxcsr))
regs = AppendDwordReg(regs, "MXCSR_MASK", xsave.MxcrMask)
for i := 0; i < len(xsave.XmmSpace); i += 16 {
regs = AppendSSEReg(regs, fmt.Sprintf("XMM%d", i/16), xsave.XmmSpace[i:i+16])
if xsave.AvxState {
regs = AppendSSEReg(regs, fmt.Sprintf("YMM%d", i/16), xsave.YmmSpace[i:i+16])
}
}
return
}
const (
_XSAVE_HEADER_START = 512
_XSAVE_HEADER_LEN = 64
_XSAVE_EXTENDED_REGION_START = 576
_XSAVE_SSE_REGION_LEN = 416
)
// LinuxX86XstateRead reads a byte array containing an XSAVE area into regset.
// If readLegacy is true regset.PtraceFpRegs will be filled with the
// contents of the legacy region of the XSAVE area.
// See Section 13.1 (and following) of Intel® 64 and IA-32 Architectures
// Software Developer’s Manual, Volume 1: Basic Architecture.
func LinuxX86XstateRead(xstateargs []byte, readLegacy bool, regset *LinuxX86Xstate) error {
if _XSAVE_HEADER_START+_XSAVE_HEADER_LEN >= len(xstateargs) {
return nil
}
if readLegacy {
rdr := bytes.NewReader(xstateargs[:_XSAVE_HEADER_START])
if err := binary.Read(rdr, binary.LittleEndian, ®set.PtraceFpRegs); err != nil {
return err
}
}
xsaveheader := xstateargs[_XSAVE_HEADER_START : _XSAVE_HEADER_START+_XSAVE_HEADER_LEN]
xstate_bv := binary.LittleEndian.Uint64(xsaveheader[0:8])
xcomp_bv := binary.LittleEndian.Uint64(xsaveheader[8:16])
if xcomp_bv&(1<<63) != 0 {
// compact format not supported
return nil
}
if xstate_bv&(1<<2) == 0 {
// AVX state not present
return nil
}
avxstate := xstateargs[_XSAVE_EXTENDED_REGION_START:]
regset.AvxState = true
copy(regset.YmmSpace[:], avxstate[:len(regset.YmmSpace)])
return nil
} | func (xsave *LinuxX86Xstate) Decode() (regs []Register) { |
SwipeableImages.js | import React from "react";
import { makeStyles } from "@material-ui/core/styles";
const useStyles = makeStyles((theme) => ({
img: {
height: 240,
display: "block",
maxWidth: 400,
overflow: "hidden",
width: "100%",
},
imgRest: {
height: 360,
display: "block",
maxWidth: 500,
overflow: "hidden",
width: "100%",
borderRadius: "5%",
},
buttonLeft: {
position: "relative",
top: "-132px",
left: "-4%",
color: "white",
},
buttonRight: {
position: "relative",
top: "-132px",
right: "-62%",
color: "white",
},
buttonRightRest: {
right: "-72.5%",
},
}));
function SwipeableImages(props) {
const classes = useStyles();
const imagesArray = props.images;
let newImagesArray;
newImagesArray = imagesArray.map((image) => {
const imageUrlSplit = image.split("\\");
const imageUrl = imageUrlSplit[0] + "/" + imageUrlSplit[1];
return `${process.env.REACT_APP_SERVER_URL}/${imageUrl}`;
});
return (
<>
<div>
{newImagesArray.map((step, index) => (
<div key={step}>
<img
className={
props.type === "home" ? classes.img : classes.imgRest
}
src={step}
alt={step}
/>
</div>
))} | </div>
</>
);
}
export default SwipeableImages; | |
util.py | from typing import Dict, List
import requests
from bs4 import BeautifulSoup # type: ignore[import]
class _RequestCache:
def __init__(self) -> None:
self._cache: Dict[str, BeautifulSoup] = {}
def __call__(self, page: str) -> BeautifulSoup:
if page.endswith(".html"):
page = page[:-5]
if page not in self._cache:
html = requests.get(
f"https://html.spec.whatwg.org/multipage/{page}.html"
).text
self._cache[page] = BeautifulSoup(html, "html5lib")
return self._cache[page]
request_cache = _RequestCache()
def get_input_type_keywords() -> List[str]:
soup = request_cache("input")
| return keywords | table = soup.find(id="attr-input-type-keywords")
keywords = [
row.contents[0].find("code").string for row in table.find("tbody").children
] |
model.py | import requests
import hashlib
from six.moves.urllib_parse import quote_plus, parse_qs
class HashMismatchException(Exception):
"""
Exception thrown when hash from Paynow does not match locally generated hash
"""
def __init__(self, message):
super(HashMismatchException, self).__init__(message)
# TODO: Update status response class to support dictionary
class StatusResponse:
paid=bool
"""
bool: Boolean value indication whether the transaction was paid or not
"""
status=str
"""
str: The status of the transaction in Paynow
"""
amount=float
"""
float: The total amount of the transaction
"""
reference=str
"""
any: The unique identifier for the transaction
"""
paynow_reference=str
"""
any: Paynow's unique identifier for the transaction
"""
hash=str
"""
any: Hash of the transaction in paynow
"""
def __status_update(self, data):
"""Parses the incoming status update from Paynow
Args:
data (any): The data from paynow
"""
print('Not implemented')
# TODO: Implement method
def __init__(self, data, update):
if update:
self.__status_update(data)
else:
self.status = data['status'].lower()
self.paid = self.status == 'paid'
if 'amount' in data:
self.amount = float(data['amount'])
if 'reference' in data:
self.reference = data['reference']
if 'paynowreference' in data:
self.paynow_reference = data['paynowreference']
if 'hash' in data:
self.hash = data['hash']
class InitResponse:
"""Wrapper class for response from Paynow during transaction initiation
"""
success=bool
"""
bool: Boolean indicating whether initiate request was successful or not
"""
instructions=str
"""
bool: Boolean indicating whether the response contains a url to redirect to
"""
has_redirect=bool
"""
bool: Boolean indicating whether the response contains a url to redirect to
"""
hash=str
"""
str: Hashed transaction returned from Paynow
"""
redirect_url=str
"""
str: The url the user should be taken to so they can make a payment
"""
error=str
"""
str: the error message from Paynow, if any
"""
poll_url=str
"""
str: The poll URL sent from Paynow
"""
def __init__(self, data):
# TODO return dict of kwargs
self.status = data['status']
self.success = data['status'].lower() != 'error'
self.has_redirect = 'browserurl' in data
self.hash = 'hash' in data
if not self.success:
self.error = data['error']
return
self.poll_url = data['pollurl']
if self.has_redirect:
self.redirect_url = data['browserurl']
if 'instructions' in data:
self.instruction = data['instructions']
def __repr__(self):
'''Print friendly message, especially on errors'''
return self.status
class Payment:
"""Helper class for building up a transaction before sending it off to Paynow
Attributes:
reference (str): Unique identifier for the transaction
items ([]): Array of items in the 'cart'
"""
reference=str
"""
str: Unique identifier for the transaction
"""
items=[]
"""
[]: Array of items in the 'cart'
"""
auth_email=str
"""
str: The user's email address.
"""
def __init__(self, reference, auth_email):
self.reference = reference
self.auth_email = auth_email
# auto-check to ensure clear list
self.clearCart()
def add(self, title: str, amount: float):
""" Add an item to the 'cart'
Args:
title (str): The name of the item
amount (float): The cost of the item
"""
self.items.append([title, amount])
return self
def clearCart(self):
'''
clear all added items
'''
self.items.clear()
def total(self):
"""Get the total cost of the items in the transaction
Returns:
float: The total
"""
total = 0.0
for item in self.items:
total += float(item[1])
return total
def info(self):
"""Generate text which represents the items in cart
Returns:
str: The text representation of the cart
"""
out = ""
for item in self.items:
out += (item[0] + ", ")
return out
def __repr__(self):
# TODO: how woll this be presented when printed
# information is too vague
pass
class Paynow:
"""Contains helper methods to interact with the Paynow API
Attributes:
integration_id (str): Merchant's integration id.
integration_key (str): Merchant's integration key.
return_url (str): Merchant's return url
result_url (str): Merchant's result url
Args:
integration_id (str): Merchant's integration id. (You can generate this in your merchant dashboard)
integration_key (str): Merchant's integration key.
return_url (str): Merchant's return url
result_url (str): Merchant's result url
"""
URL_INITIATE_TRANSACTION = "https://www.paynow.co.zw/interface/initiatetransaction"
"""
str: Transaction initation url (constant)
"""
URL_INITIATE_MOBILE_TRANSACTION = "https://www.paynow.co.zw/interface/remotetransaction"
"""
str: Transaction initation url (constant)
"""
integration_id=str
"""
str: Merchant's integration id
"""
integration_key=str
"""
str: Merchant's integration key
"""
return_url = ""
"""
str: Merchant's return url
"""
result_url = ""
"""
str: Merchant's result url
"""
# is it necessary to have return and results url ?
# why not just combine these two; kill two birds with one stone
# Leave the autonomy to the merchant ie merchant knows what to do with
# a successful payment else its an error, merchant will debug, paynow
# provides information about error
def __init__(self, integration_id, integration_key,
return_url='https://www.google.com', result_url='https://www.google.com'):
|
def set_result_url(self, url):
"""Sets the url where the status of the transaction will be sent when payment status is updated within Paynow
Args:
url (str): The url where the status of the transaction will be sent when
payment status is updated within Paynow
"""
self.result_url = url
def set_return_url(self, url):
"""Sets the url where the user will be redirected to after they are done on Paynow
Args:
url (str): The url to redirect user to once they are done on Paynow's side
"""
self.return_url = url
def create_payment(self, reference, auth_email):
"""Create a new payment
Args:
reference (str): Unique identifier for the transaction.
auth_email (str): The phone number to send to Paynow. This is required for mobile transactions
Note:
Auth email is required for mobile transactions.
Returns:
Payment: An object which provides an easy to use API to add items to Payment
"""
return Payment(reference, auth_email)
def send(self, payment):
"""Send a transaction to Paynow
Args:
payment (Payment): The payment object with details about transaction
Returns:
StatusResponse: An object with information about the status of the transaction
"""
return self.__init(payment)
def send_mobile(self, payment, phone, method):
"""Send a mobile transaction to Paynow
Args:
payment (Payment): The payment object with details about transaction
phone (str): The phone number to send to Paynow
method (str): The mobile money method being employed
Returns:
StatusResponse: An object with information about the status of the transaction
"""
return self.__init_mobile(payment, phone, method)
def process_status_update(self, data):
"""This method parses the status update data from Paynow into an easier to use format
Args:
data (dict): A dictionary with the data from Paynow. This is the POST data sent by Paynow
to your result url after the status of a transaction has changed (see Django usage example)
Returns:
StatusResponse: An object with information about the status of the transaction
"""
return StatusResponse(data, True)
def __init(self, payment):
"""Initiate the given transaction with Paynow
Args:
payment (Payment): The payment object with details about transaction
Returns:
InitResponse: An object with misc information about the initiated transaction i.e
redirect url (if available), status of initiation etc (see `InitResponse` declaration above)
"""
if payment.total() <= 0:
raise ValueError('Transaction total cannot be less than 1')
# Build up the object
data = self.__build(payment)
# Save response from Paynow
response = requests.post(self.URL_INITIATE_TRANSACTION, data=data)
# Reconstruct the response into key-value pairs
response_object = self.__rebuild_response(parse_qs(response.text))
# If an error was encountered return a new InitResponse object without validating hash since hash is not
# generated for error responses
if str(response_object['status']).lower() == 'error':
return InitResponse(response_object)
# Verify the hash from Paynow with the locally generated one
if not self.__verify_hash(response_object, self.integration_key):
raise HashMismatchException("Hashes do not match")
# Create a new InitResponse object object passing in the data from Paynow
return InitResponse(response_object)
def __init_mobile(self, payment, phone, method):
"""Initiate a mobile transaction
Args:
payment (Payment): The payment object with details about transaction
phone (str): The phone number to send to Paynow
method (str): The mobile money method being employed
Returns:
InitResponse: An object with misc information about the initiated transaction i.e
redirect url (if available), status of initiation etc (see `InitResponse` declaration above)
"""
if payment.total() <= 0:
raise ValueError('Transaction total cannot be less than 1')
if not payment.auth_email or len(payment.auth_email) <= 0:
raise ValueError('Auth email is required for mobile transactions. You can pass the auth email as the '
'second parameter in the create_payment method call')
# Build up the object
data = self.__build_mobile(payment, phone, method)
# Save response from Paynow
response = requests.post(
self.URL_INITIATE_MOBILE_TRANSACTION, data=data)
# Reconstruct the response into key-value pairs
response_object = self.__rebuild_response(parse_qs(response.text))
# If an error was encountered return a new InitResponse object without validating hash since hash is not
# generated for error responses
if str(response_object['status']).lower() == 'error':
return InitResponse(response_object)
# Verify the hash from Paynow with the locally generated one
if not self.__verify_hash(response_object, self.integration_key):
raise HashMismatchException("Hashes do not match")
# Create a new InitResponse object object passing in the data from Paynow
return InitResponse(response_object)
def check_transaction_status(self, poll_url):
"""Check the status transaction of the transaction with the given poll url
Args:
poll_url (str): Poll url of the transaction
Returns:
StatusResponse: An object with information about the status of the transaction
"""
response = requests.post(poll_url, data={})
_parsed = parse_qs(response.text)
response_object = self.__rebuild_response(_parsed)
return StatusResponse(
response_object, False)
def __build(self, payment):
"""Build up a payment into the format required by Paynow
Args:
payment (Payment): The payment object to format
Returns:
dict: A dictionary properly formatted in the format required by Paynow
"""
body = {
"reference": payment.reference,
"amount": payment.total(),
"id": self.integration_id,
"additionalinfo": payment.info(),
"authemail": payment.auth_email or "",
"status": "Message"
}
for key, value in body.items():
body[key] = quote_plus(str(value))
body['resulturl'] = self.result_url
body['returnurl'] = self.return_url
body['hash'] = self.__hash(body, self.integration_key)
return body
def __build_mobile(self, payment, phone, method):
"""Build up a mobile payment into the format required by Paynow
Args:
payment (Payment): The payment object to format
phone (str): The phone number to send to Paynow
method (str): The mobile money method being employed
Note:
Currently supported methods are `ecocash` and `onemoney`
Returns:
dict: A dictionary properly formatted in the format required by Paynow
"""
body = {
"reference": payment.reference,
"amount": payment.total(),
"id": self.integration_id,
"additionalinfo": payment.info(),
"authemail": payment.auth_email,
"phone": phone,
"method": method,
"status": "Message"
}
for key, value in body.items():
if(key == 'authemail'):
continue
body[key] = quote_plus(str(value)) # Url encode the
body['resulturl'] = self.result_url
body['returnurl'] = self.return_url
body['hash'] = self.__hash(body, self.integration_key)
return body
def __hash(self, items, integration_key):
"""Generates a SHA512 hash of the transaction
Args:
items (dict): The transaction dictionary to hash
integration_key (str): Merchant integration key to use during hashing
Returns:
str: The hashed transaction
"""
out = ""
for key, value in items.items():
if(str(key).lower() == 'hash'):
continue
out += str(value)
out += integration_key.lower()
return hashlib.sha512(out.encode('utf-8')).hexdigest().upper()
def __verify_hash(self, response, integration_key):
"""Verify the hash coming from Paynow
Args:
response (dict): The response from Paynow
integration_key (str): Merchant integration key to use during hashing
"""
if('hash' not in response):
raise ValueError("Response from Paynow does not contain a hash")
old_hash = response['hash']
new_hash = self.__hash(response, integration_key)
return old_hash == new_hash
def __rebuild_response(self, response):
"""
Rebuild a response into key value pairs (as opposed to nested array returned from parse_qs)
Args:
response (dict): The response from Paynow
Returns:
dict: Key value pairs of the data from Paynow
"""
res = {}
for key, value in response.items():
res[key] = str(value[0])
return res
| self.integration_id = integration_id
self.integration_key = integration_key
self.return_url = return_url
self.result_url = result_url |
RewardManager.ts | // Copyright 2021 Cartesi Pte. Ltd.
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
import { expect, use } from "chai";
import { deployments, ethers } from "hardhat";
import { BigNumberish, Signer } from "ethers";
import {
deployMockContract,
MockContract,
} from "@ethereum-waffle/mock-contract";
import { solidity } from "ethereum-waffle";
import { RewardManager } from "../src/types/RewardManager";
import { RewardManager__factory } from "../src/types/factories/RewardManager__factory";
use(solidity);
describe("RewardManager", async () => {
let signer: Signer;
let alice: Signer;
let aliceAddress: string;
let rewardManager: RewardManager;
let mockToken: MockContract;
let minReward = 500;
let maxReward = 1200;
let numerator = 5;
let denominator = 100;
const deployRewardManager = async ({
pos,
ctsi,
numerator,
denominator,
isConstant,
}: {
pos?: string;
ctsi?: string;
minReward?: BigNumberish;
maxReward?: BigNumberish;
numerator?: BigNumberish;
denominator?: BigNumberish;
isConstant?: boolean;
} = {}): Promise<RewardManager> => {
const [signer] = await ethers.getSigners();
const posAddress = pos || (await deployments.get("PoS")).address;
const ctsiAddress =
ctsi || (await deployments.get("CartesiToken")).address;
const n = numerator || 5000;
const d = denominator || 100000;
const rewardFactory = new RewardManager__factory(signer);
let rewardManager;
if (isConstant) {
rewardManager = await rewardFactory.deploy(
posAddress,
ctsiAddress,
maxReward,
minReward,
d,
d
);
} else {
rewardManager = await rewardFactory.deploy(
posAddress,
ctsiAddress,
maxReward,
minReward,
n,
d
);
}
return rewardManager;
};
beforeEach(async () => {
//await deployments.fixture();
[signer, alice] = await ethers.getSigners();
aliceAddress = await alice.getAddress();
const CartesiToken = await deployments.getArtifact("CartesiToken");
mockToken = await deployMockContract(signer, CartesiToken.abi);
});
it("reward function can only be called by PoS", async () => {
rewardManager = await deployRewardManager({
pos: mockToken.address, // not signer's address
ctsi: mockToken.address,
numerator,
denominator,
});
await mockToken.mock.balanceOf.returns(50000);
await mockToken.mock.transfer.returns(true);
await mockToken.mock.transferFrom.returns(true);
await expect(
rewardManager.reward(aliceAddress, 0),
"function can only be called by operator contract"
).to.be.revertedWith(
"Only the operator contract can call this function"
);
});
it("current currentReward has to be bigger than zero", async () => {
// deploy contract with signer as pos address
rewardManager = await deployRewardManager({
pos: await signer.getAddress(),
ctsi: mockToken.address,
numerator,
denominator,
});
await mockToken.mock.balanceOf.returns(0);
await mockToken.mock.transfer.reverts();
await expect(rewardManager.reward(aliceAddress, 0)).to.be.revertedWith(
"Mock revert"
);
});
it("reward should emit event", async () => {
let balance = 50000;
let currentReward = (balance * numerator) / denominator;
// deploy contract with signer as pos address
rewardManager = await deployRewardManager({
pos: await signer.getAddress(),
ctsi: mockToken.address,
numerator,
denominator,
});
await mockToken.mock.balanceOf.returns(balance);
await mockToken.mock.transfer.returns(true);
await mockToken.mock.transferFrom.returns(true);
await rewardManager.reward(aliceAddress, currentReward);
});
it("numerator == denominator should generate constant reward of max prize", async function () {
let balance = 25000; //12500000;
let lastReward = 0;
let isConstant = true; //is constant
// deploy contract with signer as pos address
rewardManager = await deployRewardManager({
pos: await signer.getAddress(),
ctsi: mockToken.address,
numerator,
denominator,
isConstant, //constant
});
await mockToken.mock.transfer.returns(true);
await mockToken.mock.transferFrom.returns(true);
// loops until balance is zero
while (true) {
balance = Math.floor(balance - maxReward);
if (balance < maxReward) break;
await mockToken.mock.balanceOf.returns(balance);
expect(
await rewardManager.getCurrentReward(),
"current reward has to be correct"
).to.equal(maxReward);
await mockToken.mock.balanceOf.returns(balance - maxReward);
}
});
it("current currentReward should generate currentRewards correctly", async function () {
this.timeout(60000);
let balance = 25000; //12500000;
let lastReward = 0;
// deploy contract with signer as pos address
rewardManager = await deployRewardManager({
pos: await signer.getAddress(),
ctsi: mockToken.address,
numerator,
denominator,
});
await mockToken.mock.transfer.returns(true);
await mockToken.mock.transferFrom.returns(true); | balance = Math.floor(balance - lastReward);
await mockToken.mock.balanceOf.returns(balance);
expect(
await rewardManager.getBalance(),
"current reward has to be correct"
).to.equal(balance);
if (balance == 0) break;
lastReward = Math.floor((balance * numerator) / denominator);
lastReward = lastReward > minReward ? lastReward : minReward;
lastReward = lastReward > maxReward ? maxReward : lastReward;
lastReward = Math.floor(
lastReward > balance ? balance : lastReward
);
await mockToken.mock.balanceOf.returns(balance);
expect(
await rewardManager.getCurrentReward(),
"current reward has to be correct"
).to.equal(lastReward);
await mockToken.mock.balanceOf.returns(balance);
//await rewardManager.reward(aliceAddress, lastReward);
}
});
}); |
// loops until balance is zero
while (true) { |
role.actions.ts | // NGRX
import { Action } from '@ngrx/store';
import { Update } from '@ngrx/entity';
// CRUD
import { QueryParamsModel } from '../../_base/crud';
// Models
import { Role } from '../_models/role.model';
export enum RoleActionTypes {
AllRolesRequested = '[Roles Home Page] All Roles Requested',
AllRolesLoaded = '[Roles API] All Roles Loaded',
RoleOnServerCreated = '[Edit Role Dialog] Role On Server Created',
RoleCreated = '[Edit Roles Dialog] Roles Created',
RoleUpdated = '[Edit Role Dialog] Role Updated',
RoleDeleted = '[Roles List Page] Role Deleted',
RolesPageRequested = '[Roles List Page] Roles Page Requested',
RolesPageLoaded = '[Roles API] Roles Page Loaded',
RolesPageCancelled = '[Roles API] Roles Page Cancelled',
RolesPageToggleLoading = '[Roles page] Roles Page Toggle Loading',
RolesActionToggleLoading = '[Roles] Roles Action Toggle Loading'
}
export class RoleOnServerCreated implements Action {
readonly type = RoleActionTypes.RoleOnServerCreated;
constructor(public payload: { role: Role }) { }
}
export class RoleCreated implements Action {
readonly type = RoleActionTypes.RoleCreated;
constructor(public payload: { role: Role }) { }
}
export class RoleUpdated implements Action {
readonly type = RoleActionTypes.RoleUpdated;
constructor(public payload: {
partialrole: Update<Role>,
role: Role
}) { }
}
export class RoleDeleted implements Action {
readonly type = RoleActionTypes.RoleDeleted;
constructor(public payload: { id: number }) {}
}
export class RolesPageRequested implements Action {
readonly type = RoleActionTypes.RolesPageRequested;
constructor(public payload: { page: QueryParamsModel }) { }
}
export class RolesPageLoaded implements Action {
readonly type = RoleActionTypes.RolesPageLoaded;
constructor(public payload: { roles: Role[], totalCount: number, page: QueryParamsModel }) { }
}
export class RolesPageCancelled implements Action {
readonly type = RoleActionTypes.RolesPageCancelled;
}
export class | implements Action {
readonly type = RoleActionTypes.AllRolesRequested;
}
export class AllRolesLoaded implements Action {
readonly type = RoleActionTypes.AllRolesLoaded;
constructor(public payload: { roles: Role[] }) { }
}
export class RolesPageToggleLoading implements Action {
readonly type = RoleActionTypes.RolesPageToggleLoading;
constructor(public payload: { isLoading: boolean }) { }
}
export class RolesActionToggleLoading implements Action {
readonly type = RoleActionTypes.RolesActionToggleLoading;
constructor(public payload: { isLoading: boolean }) { }
}
export type RoleActions = RoleCreated
| RoleUpdated
| RoleDeleted
| RolesPageRequested
| RolesPageLoaded
| RolesPageCancelled
| AllRolesLoaded
| AllRolesRequested
| RoleOnServerCreated
| RolesPageToggleLoading
| RolesActionToggleLoading;
| AllRolesRequested |
cli.py | import argparse
import requests
from cromwell_tools.cromwell_api import CromwellAPI
from cromwell_tools.cromwell_auth import CromwellAuth
from cromwell_tools.diag import task_runtime
from cromwell_tools import __version__
diagnostic_index = {
'task_runtime': task_runtime.run
}
def parser(arguments=None):
# TODO: dynamically walk through the commands and automatcally create parsers here
main_parser = argparse.ArgumentParser()
# Check the installed version of Cromwell-tools
main_parser.add_argument(
'-V', '--version', action='version', version=f'%(prog)s {__version__}'
)
subparsers = main_parser.add_subparsers(help='sub-command help', dest='command')
# sub-commands of cromwell-tools
submit = subparsers.add_parser(
'submit', help='submit help', description='Submit a WDL workflow on Cromwell.'
)
wait = subparsers.add_parser(
'wait',
help='wait help',
description='Wait for one or more running workflow to finish.',
)
status = subparsers.add_parser(
'status',
help='status help',
description='Get the status of one or more workflows.',
)
abort = subparsers.add_parser(
'abort',
help='abort help',
description='Request Cromwell to abort a running workflow by UUID.',
)
release_hold = subparsers.add_parser( | 'release_hold',
help='release_hold help',
description='Request Cromwell to release the hold on a workflow.',
)
metadata = subparsers.add_parser(
'metadata',
help='metadata help',
description='Retrieve the workflow and call-level metadata for a specified workflow by UUID.',
)
query = subparsers.add_parser(
'query',
help='query help',
description='[NOT IMPLEMENTED IN CLI] Query for workflows.',
)
health = subparsers.add_parser(
'health',
help='health help',
description='Check that cromwell is running and that provided authentication is valid.',
)
task_runtime = subparsers.add_parser(
'task_runtime',
help='task_runtime help',
description='Output tsv breakdown of task runtimes by execution event categories',
)
# cromwell url and authentication arguments apply to all sub-commands
cromwell_sub_commands = (
submit,
wait,
status,
abort,
release_hold,
metadata,
query,
health,
task_runtime,
)
auth_args = {
'url': 'The URL to the Cromwell server. e.g. "https://cromwell.server.org/"',
'username': 'Cromwell username for HTTPBasicAuth.',
'password': 'Cromwell password for HTTPBasicAuth.',
'secrets_file': 'Path to the JSON file containing username, password, and url fields.',
'service_account_key': 'Path to the JSON key file for authenticating with CaaS.',
}
def add_auth_args(subcommand_parser):
for arg_dest, help_text in auth_args.items():
subcommand_parser.add_argument(
'--{arg}'.format(arg=arg_dest.replace('_', '-')),
dest=arg_dest,
default=None,
type=str,
help=help_text,
)
# TODO: this should be a group which is called authentication
for p in cromwell_sub_commands:
add_auth_args(p)
# submit arguments
submit.add_argument(
'-w',
'--wdl-file',
dest='wdl_file',
type=str,
required=True,
help='Path to the workflow source file to submit for execution.',
)
submit.add_argument(
'-i',
'--inputs-files',
dest='inputs_files',
nargs='+',
type=str,
required=True,
help='Path(s) to the input file(s) containing input data in JSON format, separated by space.',
)
submit.add_argument(
'-d',
'--deps-file',
dest='dependencies',
nargs='+',
type=str,
help='Path to the Zip file containing dependencies, or a list of raw dependency files to '
'be zipped together separated by space.',
)
submit.add_argument(
'-o',
'--options-file',
dest='options_file',
type=str,
help='Path to the Cromwell configs JSON file.',
)
# TODO: add a mutually exclusive group to make it easy to add labels for users
submit.add_argument(
'-l',
'--label-file',
dest='label_file',
type=str,
default=None,
help='Path to the JSON file containing a collection of key/value pairs for workflow labels.',
)
submit.add_argument(
'-c',
'--collection-name',
dest='collection_name',
type=str,
default=None,
help='Collection in SAM that the workflow should belong to, if use CaaS.',
)
submit.add_argument(
'--on-hold',
dest='on_hold',
type=bool,
default=False,
help='Whether to submit the workflow in "On Hold" status.',
)
submit.add_argument(
'--validate-labels',
dest='validate_labels',
type=bool,
default=False,
help='Whether to validate cromwell labels.',
)
# wait arguments
wait.add_argument('workflow_ids', nargs='+')
wait.add_argument(
'--timeout-minutes',
dest='timeout_minutes',
type=int,
default=120,
help='number of minutes to wait before timeout.',
)
wait.add_argument(
'--poll-interval-seconds',
dest='poll_interval_seconds',
type=int,
default=30,
help='seconds between polling cromwell for workflow status.',
)
wait.add_argument(
'--silent',
dest='verbose',
action='store_false',
help='whether to silently print verbose workflow information while polling cromwell.',
)
# status arguments
status.add_argument(
'--uuid',
required=True,
help='A Cromwell workflow UUID, which is the workflow identifier.',
)
# abort arguments
abort.add_argument(
'--uuid',
required=True,
help='A Cromwell workflow UUID, which is the workflow identifier.',
)
# release_hold arguments
release_hold.add_argument(
'--uuid',
required=True,
help='A Cromwell workflow UUID, which is the workflow identifier.',
)
# metadata arguments
metadata.add_argument(
'--uuid',
required=True,
help='A Cromwell workflow UUID, which is the workflow identifier.',
)
# TODO: add a mutually exclusive group to make it fail early
metadata.add_argument(
'--includeKey',
nargs='+',
default=None,
help='When specified key(s) to include from the metadata. Matches any key starting with the value. May not be used with excludeKey.',
)
metadata.add_argument(
'--excludeKey',
nargs='+',
default=None,
help='When specified key(s) to exclude from the metadata. Matches any key starting with the value. May not be used with includeKey.',
)
metadata.add_argument(
'--expandSubWorkflows',
default=False,
help='When true, metadata for sub workflows will be fetched and inserted automatically in the metadata response.',
)
either_runtime = task_runtime.add_mutually_exclusive_group(required=True)
either_runtime.add_argument(
'--metadata',
dest='metadata',
help='Metadata json file to calculate cost on',
)
either_runtime.add_argument(
'--uuid',
dest='uuid',
help='A Cromwell workflow UUID, which is the workflow identifier.',
)
# query arguments
# TODO: implement CLI entry for query API.
# group all of the arguments
args = vars(main_parser.parse_args(arguments))
# TODO: see if this can be moved or if the commands can be populated from above
if args['command'] in (
'submit',
'wait',
'status',
'abort',
'release_hold',
'health',
'metadata',
'task_runtime',
):
auth_arg_dict = {k: args.get(k) for k in auth_args.keys()}
auth = CromwellAuth.harmonize_credentials(**auth_arg_dict)
args['auth'] = auth
for k in auth_args:
if k in args:
del args[k]
command = getattr(CromwellAPI, args['command'], False)
if not command:
try:
command = diagnostic_index[args['command']]
except KeyError:
raise KeyError(f"{args['command']} is not a valid command.")
del args['command']
return command, args
# this should just getattr from CromwellAPI and call the func with args.
# TODO: refactor this module into class-based parsers
def main(arguments=None):
command, args = parser(arguments)
result = command(**args)
if isinstance(result, requests.Response):
print(result.text)
else:
print(result) | |
tailnet.go | package service
import (
"context"
"errors"
"fmt"
"github.com/bufbuild/connect-go"
"github.com/jsiebens/ionscale/internal/domain"
api "github.com/jsiebens/ionscale/pkg/gen/ionscale/v1"
)
func (s *Service) CreateTailnet(ctx context.Context, req *connect.Request[api.CreateTailnetRequest]) (*connect.Response[api.CreateTailnetResponse], error) {
principal := CurrentPrincipal(ctx)
if !principal.IsSystemAdmin() {
return nil, connect.NewError(connect.CodePermissionDenied, errors.New("permission denied"))
}
tailnet, created, err := s.repository.GetOrCreateTailnet(ctx, req.Msg.Name)
if err != nil {
return nil, err
}
if !created {
return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("tailnet already exists"))
}
resp := &api.CreateTailnetResponse{Tailnet: &api.Tailnet{
Id: tailnet.ID,
Name: tailnet.Name,
}}
return connect.NewResponse(resp), nil
}
func (s *Service) GetTailnet(ctx context.Context, req *connect.Request[api.GetTailnetRequest]) (*connect.Response[api.GetTailnetResponse], error) {
principal := CurrentPrincipal(ctx)
if !principal.IsSystemAdmin() && !principal.IsTailnetAdmin(req.Msg.Id) {
return nil, connect.NewError(connect.CodePermissionDenied, errors.New("permission denied"))
}
tailnet, err := s.repository.GetTailnet(ctx, req.Msg.Id)
if err != nil {
return nil, err
}
if tailnet == nil {
return nil, connect.NewError(connect.CodeNotFound, errors.New("tailnet not found"))
}
return connect.NewResponse(&api.GetTailnetResponse{Tailnet: &api.Tailnet{
Id: tailnet.ID,
Name: tailnet.Name,
}}), nil
}
func (s *Service) ListTailnets(ctx context.Context, req *connect.Request[api.ListTailnetRequest]) (*connect.Response[api.ListTailnetResponse], error) {
principal := CurrentPrincipal(ctx)
resp := &api.ListTailnetResponse{}
if principal.IsSystemAdmin() {
tailnets, err := s.repository.ListTailnets(ctx)
if err != nil {
return nil, err
}
for _, t := range tailnets {
gt := api.Tailnet{Id: t.ID, Name: t.Name}
resp.Tailnet = append(resp.Tailnet, >)
}
}
if principal.User != nil {
tailnet, err := s.repository.GetTailnet(ctx, principal.User.TailnetID)
if err != nil {
return nil, err
}
gt := api.Tailnet{Id: tailnet.ID, Name: tailnet.Name}
resp.Tailnet = append(resp.Tailnet, >)
}
return connect.NewResponse(resp), nil
}
func (s *Service) DeleteTailnet(ctx context.Context, req *connect.Request[api.DeleteTailnetRequest]) (*connect.Response[api.DeleteTailnetResponse], error) {
principal := CurrentPrincipal(ctx)
if !principal.IsSystemAdmin() {
return nil, connect.NewError(connect.CodePermissionDenied, errors.New("permission denied"))
}
count, err := s.repository.CountMachineByTailnet(ctx, req.Msg.TailnetId)
if err != nil {
return nil, err
}
if !req.Msg.Force && count > 0 {
return nil, connect.NewError(connect.CodeInvalidArgument, fmt.Errorf("tailnet is not empty, number of machines: %d", count))
}
err = s.repository.Transaction(func(tx domain.Repository) error {
if err := tx.DeleteMachineByTailnet(ctx, req.Msg.TailnetId); err != nil {
return err
}
if err := tx.DeleteAuthKeysByTailnet(ctx, req.Msg.TailnetId); err != nil {
return err
}
if err := tx.DeleteUsersByTailnet(ctx, req.Msg.TailnetId); err != nil {
return err
}
if err := tx.DeleteTailnet(ctx, req.Msg.TailnetId); err != nil {
return err
}
return nil
})
if err != nil {
return nil, err
} | } |
s.brokers(req.Msg.TailnetId).SignalUpdate()
return connect.NewResponse(&api.DeleteTailnetResponse{}), nil |
radio.component.ts | /** @component radio */
import {
ChangeDetectorRef,
Component,
ElementRef,
EventEmitter,
forwardRef,
Input,
Output,
ViewChild,
} from '@angular/core';
import { NG_VALUE_ACCESSOR, ControlValueAccessor } from '@angular/forms';
// tslint:disable:no-use-before-declare
const CUSTOM_RADIO_VALUE_ACCESSOR: any = {
provide: NG_VALUE_ACCESSOR,
useExisting: forwardRef(() => RadioComponent),
multi: true,
};
// tslint:enable:no-use-before-declare
@Component({
selector: 'md-radio',
template: `
<div class="md-radio-group">
<div class="md-input-group md-radio" [ngClass]="wrapperClasses">
<input
class="md-input md-radio__input"
type="radio"
#radioInput
(change)="onToggle($event)"
[attr.id]="htmlId"
[attr.name]="name"
[attr.tabindex]="tabIndex"
[attr.value]="value"
[checked]="checked"
[disabled]="disabled"
/>
<label
class="md-radio__label"
(click)="onToggle($event)"
[attr.for]="htmlId"
>
<span>{{ label }}</span>
</label>
</div>
<ng-content></ng-content>
</div>
`,
styles: [],
providers: [CUSTOM_RADIO_VALUE_ACCESSOR],
})
export class | implements ControlValueAccessor {
constructor(private cdr: ChangeDetectorRef) {}
get wrapperClasses() {
return {
['md-input--nested-' + this.nestedLevel]: this.nestedLevel,
[this.class]: this.class,
};
}
/** @option Optional CSS class name | '' */
@Input() class: string = '';
/** @option Sets the attribute disabled to the Radio | false */
@Input() disabled: boolean = false;
/** @option Unique HTML ID used for tying label to HTML input for automated testing */
@Input() htmlId: string = '';
/** @option Radio label text | '' */
@Input() label: string = '';
/** @option Radio name for group | '' */
@Input() name: string = '';
/** @option index of the radio in tab order */
@Input() tabIndex: number;
/** @option String value that corresponds with Radio button | '' */
@Input() value: any = '';
/** @option Set the level of nested radio | 0 */
@Input() nestedLevel: number = 0;
/** @option Callback function invoked when user clicks the Radio button | null */
@Output() click: EventEmitter<any> = new EventEmitter();
@ViewChild('radioInput') radioViewChild: ElementRef;
public checked: boolean;
public onChangeCallback: Function = () => {};
public onTouchedCallback: Function = () => {};
onToggle(e) {
if (!this.disabled) {
this.radioViewChild.nativeElement.checked = true;
this.checked = true;
this.onChangeCallback(this.value);
this.click.emit();
}
}
writeValue(value: any): void {
this.checked = value === this.value;
if (this.radioViewChild.nativeElement) {
this.radioViewChild.nativeElement.checked = this.checked;
}
this.cdr.markForCheck();
}
registerOnTouched(fn: Function): void {
this.onTouchedCallback = fn;
}
registerOnChange(fn: Function): void {
this.onChangeCallback = fn;
}
}
| RadioComponent |
chain_spec.rs | use sp_core::{Pair, Public, sr25519};
use node_template_runtime::{
AccountId, AuraConfig, BalancesConfig, GenesisConfig, GrandpaConfig,
SudoConfig, SystemConfig, WASM_BINARY, Signature
};
use sp_consensus_aura::sr25519::AuthorityId as AuraId;
use sp_finality_grandpa::AuthorityId as GrandpaId;
use sp_runtime::traits::{Verify, IdentifyAccount};
use sc_service::ChainType;
// The URL for the telemetry server.
// const STAGING_TELEMETRY_URL: &str = "wss://telemetry.polkadot.io/submit/";
/// Specialized `ChainSpec`. This is a specialization of the general Substrate ChainSpec type.
pub type ChainSpec = sc_service::GenericChainSpec<GenesisConfig>;
/// Generate a crypto pair from seed.
pub fn get_from_seed<TPublic: Public>(seed: &str) -> <TPublic::Pair as Pair>::Public {
TPublic::Pair::from_string(&format!("//{}", seed), None)
.expect("static values are valid; qed")
.public()
}
type AccountPublic = <Signature as Verify>::Signer;
/// Generate an account ID from seed.
pub fn get_account_id_from_seed<TPublic: Public>(seed: &str) -> AccountId where
AccountPublic: From<<TPublic::Pair as Pair>::Public>
{
AccountPublic::from(get_from_seed::<TPublic>(seed)).into_account()
}
/// Generate an Aura authority key.
pub fn authority_keys_from_seed(s: &str) -> (AuraId, GrandpaId) {
(
get_from_seed::<AuraId>(s),
get_from_seed::<GrandpaId>(s),
)
}
pub fn development_config() -> Result<ChainSpec, String> {
let wasm_binary = WASM_BINARY.ok_or_else(|| "Development wasm binary not available".to_string())?;
Ok(ChainSpec::from_genesis(
// Name
"Development",
// ID
"dev",
ChainType::Development,
move || testnet_genesis(
wasm_binary,
// Initial PoA authorities
vec![
authority_keys_from_seed("Alice"),
],
// Sudo account
get_account_id_from_seed::<sr25519::Public>("Alice"),
// Pre-funded accounts
vec![
get_account_id_from_seed::<sr25519::Public>("Alice"),
get_account_id_from_seed::<sr25519::Public>("Bob"),
get_account_id_from_seed::<sr25519::Public>("Alice//stash"),
get_account_id_from_seed::<sr25519::Public>("Bob//stash"),
],
true,
),
// Bootnodes
vec![],
// Telemetry
None,
// Protocol ID
None,
// Properties
None,
// Extensions
None,
))
}
pub fn local_testnet_config() -> Result<ChainSpec, String> {
let wasm_binary = WASM_BINARY.ok_or_else(|| "Development wasm binary not available".to_string())?;
Ok(ChainSpec::from_genesis(
// Name
"Local Testnet",
// ID
"local_testnet",
ChainType::Local,
move || testnet_genesis(
wasm_binary,
// Initial PoA authorities
vec![
authority_keys_from_seed("Alice"),
authority_keys_from_seed("Bob"),
],
// Sudo account
get_account_id_from_seed::<sr25519::Public>("Alice"),
// Pre-funded accounts
vec![
get_account_id_from_seed::<sr25519::Public>("Alice"),
get_account_id_from_seed::<sr25519::Public>("Bob"),
get_account_id_from_seed::<sr25519::Public>("Charlie"),
get_account_id_from_seed::<sr25519::Public>("Dave"),
get_account_id_from_seed::<sr25519::Public>("Eve"),
get_account_id_from_seed::<sr25519::Public>("Ferdie"),
get_account_id_from_seed::<sr25519::Public>("Alice//stash"),
get_account_id_from_seed::<sr25519::Public>("Bob//stash"),
get_account_id_from_seed::<sr25519::Public>("Charlie//stash"),
get_account_id_from_seed::<sr25519::Public>("Dave//stash"),
get_account_id_from_seed::<sr25519::Public>("Eve//stash"),
get_account_id_from_seed::<sr25519::Public>("Ferdie//stash"),
],
true,
),
// Bootnodes
vec![],
// Telemetry
None,
// Protocol ID
None,
// Properties
None,
// Extensions
None,
))
}
/// Configure initial storage state for FRAME modules.
fn testnet_genesis(
wasm_binary: &[u8],
initial_authorities: Vec<(AuraId, GrandpaId)>,
root_key: AccountId,
endowed_accounts: Vec<AccountId>,
_enable_println: bool,
) -> GenesisConfig | {
GenesisConfig {
frame_system: Some(SystemConfig {
// Add Wasm runtime to storage.
code: wasm_binary.to_vec(),
changes_trie_config: Default::default(),
}),
pallet_balances: Some(BalancesConfig {
// Configure endowed accounts with initial balance of 1 << 60.
balances: endowed_accounts.iter().cloned().map(|k|(k, 1 << 60)).collect(),
}),
pallet_aura: Some(AuraConfig {
authorities: initial_authorities.iter().map(|x| (x.0.clone())).collect(),
}),
pallet_grandpa: Some(GrandpaConfig {
authorities: initial_authorities.iter().map(|x| (x.1.clone(), 1)).collect(),
}),
pallet_sudo: Some(SudoConfig {
// Assign network admin rights.
key: root_key,
}),
// pallet_genesis_config: Some(GenesisConfigModuleConfig {
// something: 9,
// something_two: 10,
// some_account_value: endowed_accounts.iter().cloned().map(|k|(k, 2)).collect(),
// })
}
} |
|
error.go | // Copyright 2019 The OpenPitrix Authors. All rights reserved.
// Use of this source code is governed by a Apache license
// that can be found in the LICENSE file.
package gerr
import (
"context"
"fmt"
"github.com/pkg/errors"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
"openpitrix.io/logger"
"openpitrix.io/notification/pkg/pb"
)
const En = "en"
const ZhCN = "zh_cn"
const DefaultLocale = ZhCN
func newStatus(ctx context.Context, code codes.Code, err error, errMsg ErrorMessage, a ...interface{}) *status.Status {
locale := DefaultLocale
s := status.New(code, errMsg.Message(locale, err, a...))
errorDetail := &pb.ErrorDetail{ErrorName: errMsg.Name}
if err != nil {
errorDetail.Cause = fmt.Sprintf("%+v", err)
} | logger.New().WithDepth(2).Errorf(ctx, "err: %+v, errMsg: %s", err, errMsg.Message(locale, err, a...))
sd, e := s.WithDetails(errorDetail)
if e == nil {
return sd
} else {
logger.New().WithDepth(2).Errorf(ctx, "%+v", errors.WithStack(e))
}
return s
}
func ClearErrorCause(err error) error {
if e, ok := status.FromError(err); ok {
details := e.Details()
if len(details) > 0 {
detail := details[0]
if d, ok := detail.(*pb.ErrorDetail); ok {
d.Cause = ""
// clear detail
proto := e.Proto()
proto.Details = proto.Details[:0]
e = status.FromProto(proto)
e, _ := e.WithDetails(d)
return e.Err()
}
}
}
return err
}
type GRPCError interface {
error
GRPCStatus() *status.Status
}
func New(ctx context.Context, code codes.Code, errMsg ErrorMessage, a ...interface{}) GRPCError {
return newStatus(ctx, code, nil, errMsg, a...).Err().(GRPCError)
}
func NewWithDetail(ctx context.Context, code codes.Code, err error, errMsg ErrorMessage, a ...interface{}) GRPCError {
return newStatus(ctx, code, err, errMsg, a...).Err().(GRPCError)
}
func IsGRPCError(err error) bool {
if e, ok := err.(GRPCError); ok && e != nil {
return true
}
return false
} | |
course.go | package domain
import (
"context"
)
// Course Status
const (
CourseInDraft Status = "DRAFT"
CourseArchived Status = "Archived"
CourseAssigned Status = "Assigned"
CoursePublished Status = "Published"
CoursePublic Status = "Public"
CourseCreated Status = "Created"
CourseComplete Status = "Completed"
StatusSuccess Status = "Success"
StatusQueued Status = "Queued"
StatusSending Status = "Sending"
StatusUnknown Status = "Unknown"
StatusScheduled Status = "Scheduled"
StatusRetry Status = "Retrying"
)
// Course is a struct represent a created Course
type Course struct {
ID int64 `json:"id" `
Title string `json:"title" validate:"required"`
Description string `json:"description,omitempty"`
ImageURL string `json:"image_url,omitempty"`
Duration uint16 `json:"duration,omitempty"`
CategoryID NullInt64 `json:"-,omitempty"`
Category Category `json:"categories,omitempty"`
Tags []Tag `json:"tags,omitempty"`
AuthorID NullInt64 `json:"-,omitempty"`
Author User `json:"author,omitempty"`
Users []User `json:"users,omitempty"`
LessonCount int `json:"lesson_count,omitempty"`
Lessons []Lesson `json:"lessons,omitempty"`
Attachments []Attachment `json:"attachments,omitempty"`
Status Status `json:"status,omitempty"`
UpdatedAt int64 `json:"updated_at,omitempty"`
CreatedAt int64 `json:"created_at,omitempty"`
}
// CourseStats is a struct representing the statistics for a single Course
type CourseStats struct {
TotalEnroll int64 `json:"total_enroll"`
LessonCount int64 `json:"lesson_count"`
TotalCompleted int64 `json:"total_complete"`
TotalAssigned int64 `json:"total_assign"`
}
// CourseSummaries is a struct representing the overview of Courses
type CourseSummaries struct { | // Tags ...
type Tags struct {
Tag Tag `json:"tag"`
}
// CourseUseCase represent the course's usecases
type CourseUseCase interface {
GetAll(ctx context.Context, start int, limit int) ([]Course, error)
GetByID(ctx context.Context, id int64) (*Course, error)
GetByTitle(ctx context.Context, title string) (*Course, error)
UpdateCourse(ctx context.Context, course *Course, id int64) error
CreateCourse(ctx context.Context, course *Course) error
DeleteCourse(ctx context.Context, id int64) error
// Archive(ctx context.Context, course *Course) error
// AssignToUser(ctx context.Context, course *Course, user *User)
}
// CourseRepository represent the course's repository
type CourseRepository interface {
GetAll(ctx context.Context, start int, limit int) ([]Course, error)
GetByID(ctx context.Context, id int64) (*Course, error)
GetByTitle(ctx context.Context, title string) (*Course, error)
UpdateCourse(ctx context.Context, course *Course) error
CreateCourse(ctx context.Context, course *Course) error
DeleteCourse(ctx context.Context, id int64) error
GetCourseCount(ctx context.Context) (int64, error)
} | Response
Total int64 `json:"total"`
}
|
pool.rs | //! Thread pool for blocking operations
use crate::loom::sync::{Arc, Condvar, Mutex};
use crate::loom::thread;
use crate::runtime::blocking::schedule::NoopSchedule;
use crate::runtime::blocking::shutdown;
use crate::runtime::builder::ThreadNameFn;
use crate::runtime::context;
use crate::runtime::task::{self, JoinHandle};
use crate::runtime::{Builder, Callback, ToHandle};
use std::collections::{HashMap, VecDeque};
use std::fmt;
use std::time::Duration;
pub(crate) struct BlockingPool {
spawner: Spawner,
shutdown_rx: shutdown::Receiver,
}
#[derive(Clone)]
pub(crate) struct Spawner {
inner: Arc<Inner>,
}
struct Inner {
/// State shared between worker threads.
shared: Mutex<Shared>,
/// Pool threads wait on this.
condvar: Condvar,
/// Spawned threads use this name.
thread_name: ThreadNameFn,
/// Spawned thread stack size.
stack_size: Option<usize>,
/// Call after a thread starts.
after_start: Option<Callback>,
/// Call before a thread stops.
before_stop: Option<Callback>,
// Maximum number of threads.
thread_cap: usize,
// Customizable wait timeout.
keep_alive: Duration,
}
struct Shared {
queue: VecDeque<Task>,
num_th: usize,
num_idle: u32,
num_notify: u32,
shutdown: bool,
shutdown_tx: Option<shutdown::Sender>,
/// Prior to shutdown, we clean up JoinHandles by having each timed-out
/// thread join on the previous timed-out thread. This is not strictly
/// necessary but helps avoid Valgrind false positives, see
/// <https://github.com/tokio-rs/tokio/commit/646fbae76535e397ef79dbcaacb945d4c829f666>
/// for more information.
last_exiting_thread: Option<thread::JoinHandle<()>>,
/// This holds the JoinHandles for all running threads; on shutdown, the thread
/// calling shutdown handles joining on these.
worker_threads: HashMap<usize, thread::JoinHandle<()>>,
/// This is a counter used to iterate worker_threads in a consistent order (for loom's
/// benefit).
worker_thread_index: usize,
}
pub(crate) struct Task {
task: task::UnownedTask<NoopSchedule>,
mandatory: Mandatory,
}
#[derive(PartialEq, Eq)]
pub(crate) enum Mandatory {
#[cfg_attr(not(fs), allow(dead_code))]
Mandatory,
NonMandatory,
}
impl Task {
pub(crate) fn new(task: task::UnownedTask<NoopSchedule>, mandatory: Mandatory) -> Task {
Task { task, mandatory }
}
fn run(self) {
self.task.run();
}
fn shutdown_or_run_if_mandatory(self) {
match self.mandatory {
Mandatory::NonMandatory => self.task.shutdown(),
Mandatory::Mandatory => self.task.run(),
}
}
}
const KEEP_ALIVE: Duration = Duration::from_secs(10);
/// Runs the provided function on an executor dedicated to blocking operations.
/// Tasks will be scheduled as non-mandatory, meaning they may not get executed
/// in case of runtime shutdown.
#[track_caller]
pub(crate) fn spawn_blocking<F, R>(func: F) -> JoinHandle<R>
where
F: FnOnce() -> R + Send + 'static,
R: Send + 'static,
{
let rt = context::current();
rt.spawn_blocking(func)
}
cfg_fs! {
#[cfg_attr(any(
all(loom, not(test)), // the function is covered by loom tests
test
), allow(dead_code))]
/// Runs the provided function on an executor dedicated to blocking
/// operations. Tasks will be scheduled as mandatory, meaning they are
/// guaranteed to run unless a shutdown is already taking place. In case a
/// shutdown is already taking place, `None` will be returned.
pub(crate) fn spawn_mandatory_blocking<F, R>(func: F) -> Option<JoinHandle<R>>
where
F: FnOnce() -> R + Send + 'static,
R: Send + 'static,
{
let rt = context::current();
rt.as_inner().spawn_mandatory_blocking(&rt, func)
}
}
// ===== impl BlockingPool =====
impl BlockingPool {
pub(crate) fn new(builder: &Builder, thread_cap: usize) -> BlockingPool {
let (shutdown_tx, shutdown_rx) = shutdown::channel();
let keep_alive = builder.keep_alive.unwrap_or(KEEP_ALIVE);
BlockingPool {
spawner: Spawner {
inner: Arc::new(Inner {
shared: Mutex::new(Shared {
queue: VecDeque::new(),
num_th: 0,
num_idle: 0,
num_notify: 0,
shutdown: false,
shutdown_tx: Some(shutdown_tx),
last_exiting_thread: None,
worker_threads: HashMap::new(),
worker_thread_index: 0,
}),
condvar: Condvar::new(),
thread_name: builder.thread_name.clone(),
stack_size: builder.thread_stack_size,
after_start: builder.after_start.clone(),
before_stop: builder.before_stop.clone(),
thread_cap,
keep_alive,
}),
},
shutdown_rx,
}
}
pub(crate) fn spawner(&self) -> &Spawner {
&self.spawner
}
pub(crate) fn shutdown(&mut self, timeout: Option<Duration>) {
let mut shared = self.spawner.inner.shared.lock();
// The function can be called multiple times. First, by explicitly
// calling `shutdown` then by the drop handler calling `shutdown`. This
// prevents shutting down twice.
if shared.shutdown {
return;
}
shared.shutdown = true;
shared.shutdown_tx = None;
self.spawner.inner.condvar.notify_all();
let last_exited_thread = std::mem::take(&mut shared.last_exiting_thread);
let workers = std::mem::take(&mut shared.worker_threads);
drop(shared);
if self.shutdown_rx.wait(timeout) {
let _ = last_exited_thread.map(|th| th.join());
// Loom requires that execution be deterministic, so sort by thread ID before joining.
// (HashMaps use a randomly-seeded hash function, so the order is nondeterministic)
let mut workers: Vec<(usize, thread::JoinHandle<()>)> = workers.into_iter().collect();
workers.sort_by_key(|(id, _)| *id);
for (_id, handle) in workers.into_iter() {
let _ = handle.join();
}
}
}
}
impl Drop for BlockingPool {
fn drop(&mut self) {
self.shutdown(None);
}
}
impl fmt::Debug for BlockingPool {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt.debug_struct("BlockingPool").finish()
}
}
// ===== impl Spawner =====
impl Spawner {
pub(crate) fn spawn(&self, task: Task, rt: &dyn ToHandle) -> Result<(), ()> {
let mut shared = self.inner.shared.lock();
if shared.shutdown {
// Shutdown the task: it's fine to shutdown this task (even if
// mandatory) because it was scheduled after the shutdown of the
// runtime began.
task.task.shutdown();
// no need to even push this task; it would never get picked up
return Err(());
}
shared.queue.push_back(task);
if shared.num_idle == 0 {
// No threads are able to process the task.
if shared.num_th == self.inner.thread_cap {
// At max number of threads
} else {
assert!(shared.shutdown_tx.is_some());
let shutdown_tx = shared.shutdown_tx.clone();
if let Some(shutdown_tx) = shutdown_tx {
let id = shared.worker_thread_index;
match self.spawn_thread(shutdown_tx, rt, id) {
Ok(handle) => {
shared.num_th += 1;
shared.worker_thread_index += 1;
shared.worker_threads.insert(id, handle);
}
Err(ref e) if is_temporary_os_thread_error(e) && shared.num_th > 0 => {
// OS temporarily failed to spawn a new thread.
// The task will be picked up eventually by a currently
// busy thread.
}
Err(e) => {
// The OS refused to spawn the thread and there is no thread
// to pick up the task that has just been pushed to the queue.
panic!("OS can't spawn worker thread: {}", e)
}
}
}
}
} else {
// Notify an idle worker thread. The notification counter
// is used to count the needed amount of notifications
// exactly. Thread libraries may generate spurious
// wakeups, this counter is used to keep us in a
// consistent state.
shared.num_idle -= 1;
shared.num_notify += 1;
self.inner.condvar.notify_one();
}
Ok(())
}
fn spawn_thread(
&self,
shutdown_tx: shutdown::Sender,
rt: &dyn ToHandle,
id: usize,
) -> std::io::Result<thread::JoinHandle<()>> {
let mut builder = thread::Builder::new().name((self.inner.thread_name)());
if let Some(stack_size) = self.inner.stack_size {
builder = builder.stack_size(stack_size);
}
let rt = rt.to_handle();
builder.spawn(move || {
// Only the reference should be moved into the closure
let _enter = crate::runtime::context::enter(rt.clone());
rt.as_inner().blocking_spawner.inner.run(id);
drop(shutdown_tx);
})
}
}
// Tells whether the error when spawning a thread is temporary.
#[inline]
fn is_temporary_os_thread_error(error: &std::io::Error) -> bool {
matches!(error.kind(), std::io::ErrorKind::WouldBlock)
}
impl Inner {
fn run(&self, worker_thread_id: usize) {
if let Some(f) = &self.after_start {
f()
}
let mut shared = self.shared.lock();
let mut join_on_thread = None;
'main: loop {
// BUSY
while let Some(task) = shared.queue.pop_front() {
drop(shared);
task.run();
shared = self.shared.lock();
}
// IDLE
shared.num_idle += 1;
while !shared.shutdown {
let lock_result = self.condvar.wait_timeout(shared, self.keep_alive).unwrap();
shared = lock_result.0;
let timeout_result = lock_result.1;
if shared.num_notify != 0 {
// We have received a legitimate wakeup,
// acknowledge it by decrementing the counter
// and transition to the BUSY state.
shared.num_notify -= 1;
break;
}
// Even if the condvar "timed out", if the pool is entering the
// shutdown phase, we want to perform the cleanup logic.
if !shared.shutdown && timeout_result.timed_out() {
// We'll join the prior timed-out thread's JoinHandle after dropping the lock.
// This isn't done when shutting down, because the thread calling shutdown will
// handle joining everything.
let my_handle = shared.worker_threads.remove(&worker_thread_id);
join_on_thread = std::mem::replace(&mut shared.last_exiting_thread, my_handle);
break 'main;
}
// Spurious wakeup detected, go back to sleep.
}
if shared.shutdown {
// Drain the queue
while let Some(task) = shared.queue.pop_front() {
drop(shared);
task.shutdown_or_run_if_mandatory();
shared = self.shared.lock();
}
// Work was produced, and we "took" it (by decrementing num_notify).
// This means that num_idle was decremented once for our wakeup.
// But, since we are exiting, we need to "undo" that, as we'll stay idle.
shared.num_idle += 1;
// NOTE: Technically we should also do num_notify++ and notify again,
// but since we're shutting down anyway, that won't be necessary.
break;
}
}
// Thread exit
shared.num_th -= 1; | shared.num_idle = shared
.num_idle
.checked_sub(1)
.expect("num_idle underflowed on thread exit");
if shared.shutdown && shared.num_th == 0 {
self.condvar.notify_one();
}
drop(shared);
if let Some(f) = &self.before_stop {
f()
}
if let Some(handle) = join_on_thread {
let _ = handle.join();
}
}
}
impl fmt::Debug for Spawner {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt.debug_struct("blocking::Spawner").finish()
}
} |
// num_idle should now be tracked exactly, panic
// with a descriptive message if it is not the
// case. |
rl_main.py | # Run this again after editing submodules so Colab uses the updated versions
from citylearn import CityLearn
from citylearn import GridLearn
import matplotlib.pyplot as plt
from pathlib import Path
from citylearn import RL_Agents_Coord, Cluster_Agents
import numpy as np
import csv
import time
import re
import pandas as pd
import torch
from joblib import dump, load
# Load environment
climate_zone = 1
data_path = Path("citylearn/data/Climate_Zone_"+str(climate_zone))
building_attributes = data_path / 'building_attributes.json'
weather_file = data_path / 'weather_data.csv'
solar_profile = data_path / 'solar_generation_1kW.csv'
building_state_actions = 'citylearn/buildings_state_action_space.json'
building_id = ["Building_1","Building_2","Building_3","Building_4","Building_5","Building_6","Building_7","Building_8","Building_9"]
objective_function = ['ramping','1-load_factor','average_daily_peak','peak_demand','net_electricity_consumption','quadratic','voltage_dev']
ep_period = 8760
print("Initializing the grid...")
# Contain the lower and upper bounds of the states and actions, to be provided to the agent to normalize the variables between 0 and 1.
# Can be obtained using observations_spaces[i].low or .high
env = GridLearn(data_path, building_attributes, weather_file, solar_profile, building_id, 1, buildings_states_actions = building_state_actions, simulation_period = (0,ep_period), cost_function = objective_function, verbose=1, n_buildings_per_bus=1)
# Hyperparameters
batch_size = 254 | lr = 0.0003
hid = [batch_size,batch_size]
n_episodes = 3
n_training_eps = n_episodes - 1
if not (batch_size < ep_period * n_training_eps):
print("will produce a key error because the neural nets won't be initialized yet")
print("Initializing the agents...")
# Instantiating the control agent(s)
agents = RL_Agents_Coord(env, list(env.buildings.keys()), discount = gamma, batch_size = bs, replay_buffer_capacity = 1e5, regression_buffer_capacity = 12*ep_period, tau=tau, lr=lr, hidden_dim=hid, start_training=(ep_period+1)*(n_episodes-1), exploration_period = (ep_period+1)*(n_episodes)+1, start_regression=(ep_period+1), information_sharing = True, pca_compression = .95, action_scaling_coef=0.5, reward_scaling = 5., update_per_step = 1, iterations_as = 2)
print("Starting the experiment...")
# The number of episodes can be replaces by a stopping criterion (i.e. convergence of the average reward)
start = time.time()
for e in range(n_episodes):
is_evaluating = (e > n_training_eps) # Evaluate deterministic policy after 7 epochs
rewards = []
state = env.reset()
done = False
j = 0
print("is_deterministic", is_evaluating)
action, coordination_vars = agents.select_action(state, deterministic=is_evaluating)
# print(action)
while not done:
next_state, reward, done, _ = env.step(action)
action_next, coordination_vars_next = agents.select_action(next_state, deterministic=is_evaluating)
agents.add_to_buffer(state, action, reward, next_state, done, coordination_vars, coordination_vars_next)
state = next_state
coordination_vars = coordination_vars_next
action = action_next
print('Loss -',env.cost(), 'Simulation time (min) -',(time.time()-start)/60.0) | bs = batch_size
tau = 0.005
gamma = 0.99 |
gdo.py | input0 = """button_clicked
cycle_complete
button_clicked
button_clicked
button_clicked
button_clicked
button_clicked
cycle_complete"""
input1 = """button_clicked
cycle_complete
button_clicked
block_detected
button_clicked
cycle_complete
button_clicked
block_cleared
button_clicked
cycle_complete"""
class GarageDoor(object):
CLOSED, OPENING, OPEN, CLOSING, STOPPED_OPENING, STOPPED_CLOSING, EMERGENCY_OPENING, OPEN_BLOCKED = range(8)
doordict = { CLOSED:"CLOSED", OPENING:"OPENING", OPEN:"OPEN",
CLOSING:"CLOSING", STOPPED_OPENING:"STOPPED OPENING",
STOPPED_CLOSING:"STOPPED CLOSING",
EMERGENCY_OPENING:"EMERGENCY OPENING",
OPEN_BLOCKED:"OPEN BLOCKED" }
def __init__(self):
self.current = self.CLOSED
def button_clicked(self):
if self.current == self.CLOSED:
self.current = self.OPENING
elif self.current == self.OPEN:
self.current = self.CLOSING
elif self.current == self.OPENING:
self.current = self.STOPPED_OPENING
elif self.current == self.CLOSING:
self.current = self.STOPPED_CLOSING
elif self.current == self.STOPPED_OPENING:
self.current = self.CLOSING
elif self.current == self.STOPPED_CLOSING:
self.current = self.OPENING
def block_detected(self):
if self.current == self.CLOSING:
self.current = self.EMERGENCY_OPENING
def block_cleared(self):
if self.current == self.EMERGENCY_OPENING:
self.current = self.OPENING
elif self.current == self.OPEN_BLOCKED:
self.current = self.OPEN
def cycle_complete(self):
if self.current == self.OPENING:
self.current = self.OPEN
elif self.current == self.CLOSING:
self.current = self.CLOSED
elif self.current == self.EMERGENCY_OPENING:
self.current = self.OPEN_BLOCKED
def run_input( self, inp):
inp = inp.splitlines()
print "INITIAL:", self.doordict[self.current]
for line in inp:
eval("self."+line+"()")
print line, self.doordict[self.current]
return self.doordict[self.current]
if __name__ == "__main__":
| g = GarageDoor()
assert g.run_input(input0) == "CLOSED"
print
assert g.run_input(input1) == "CLOSED" |
|
namespaces_dup.js | var namespaces_dup =
[
[ "cmd", "namespacecmd.html", null ],
[ "cuba", "namespacecuba.html", null ],
[ "ruba", "namespaceruba.html", null ] | ]; |
|
alb.py | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
'ApplicationLBDriver'
]
from libcloud.utils.xml import findtext, findall
from libcloud.loadbalancer.types import State
from libcloud.loadbalancer.base import Driver, LoadBalancer, Member
from libcloud.common.aws import AWSGenericResponse, SignedAWSConnection
VERSION = '2015-12-01'
HOST = 'elasticloadbalancing.%s.amazonaws.com'
ROOT = '/%s/' % (VERSION)
NS = 'http://elasticloadbalancing.amazonaws.com/doc/%s/' % (VERSION, )
class ALBResponse(AWSGenericResponse):
"""
Amazon ALB response class.
"""
namespace = NS
exceptions = {}
xpath = 'Error'
class ALBConnection(SignedAWSConnection):
version = VERSION
host = HOST
responseCls = ALBResponse
service_name = 'elasticloadbalancing'
class ApplicationLBDriver(Driver):
name = 'Amazon Application Load Balancing'
website = 'http://aws.amazon.com/elasticloadbalancing/'
connectionCls = ALBConnection
signature_version = '4'
def __init__(self, access_id, secret, region, token=None):
self.token = token
super(ApplicationLBDriver, self).__init__(
access_id, secret, token=token
)
self.region = region
self.region_name = region
self.connection.host = HOST % (region)
def list_protocols(self):
return ['http', 'https']
def list_balancers(self):
params = {'Action': 'DescribeLoadBalancers'}
data = self.connection.request(ROOT, params=params).object
return self._to_balancers(data)
def balancer_list_members(self, balancer):
return balancer._members
def get_balancer(self, balancer_id):
params = {
'Action': 'DescribeLoadBalancers',
'LoadBalancerNames.member.1': balancer_id
}
data = self.connection.request(ROOT, params=params).object
return self._to_balancers(data)[0]
def ex_balancer_list_listeners(self, balancer):
return balancer.extra.get('listeners', [])
def _to_listeners(self, data):
xpath = 'DescribeListenersResult/Listeners/member'
return [self._to_listener(el) for el in findall(
element=data, xpath=xpath, namespace=NS
)]
def _to_listener(self, el):
listener_arn = findtext(element=el, xpath='ListenerArn', namespace=NS)
listener = {
'id': listener_arn,
'protocol': findtext(element=el, xpath='Protocol', namespace=NS),
'port': findtext(element=el, xpath='Port', namespace=NS),
'rules': self._ex_get_rules_for_listener(listener_arn)
}
return listener
def _to_targets(self, data):
|
def _to_target(self, el):
return findtext(
element=el,
xpath='DefaultActions/member/TargetGroupArn',
namespace=NS
)
def _to_balancer(self, el):
name = findtext(element=el, xpath='LoadBalancerName', namespace=NS)
id = findtext(element=el, xpath='LoadBalancerArn', namespace=NS)
dns_name = findtext(el, xpath='DNSName', namespace=NS)
balancer = LoadBalancer(
id=id,
name=name,
state=State.UNKNOWN,
ip=dns_name,
port=None,
driver=self.connection.driver
)
extra = {
'listeners': self._ex_get_balancer_listeners(balancer),
'target_groups': self._ex_get_balancer_target_groups(balancer),
'tags': self._ex_get_balancer_tags(balancer)
}
balancer.extra = extra
if len(extra['listeners']) > 0:
balancer.port = extra['listeners'][0]['port']
else:
balancer.port = None
balancer._members = self._ex_get_balancer_memebers(balancer)
return balancer
def _to_balancers(self, data):
xpath = 'DescribeLoadBalancersResult/LoadBalancers/member'
return [self._to_balancer(el)
for el in findall(element=data, xpath=xpath, namespace=NS)]
def _to_tags(self, data):
"""
return tags dict
"""
tags = {}
xpath = 'DescribeTagsResult/TagDescriptions/member/Tags/member'
for el in findall(element=data, xpath=xpath, namespace=NS):
key = findtext(element=el, xpath='Key', namespace=NS)
value = findtext(element=el, xpath='Value', namespace=NS)
if key:
tags[key] = value
return tags
def _to_rule(self, el):
def __to_bool__(val):
return val.lower() in ("yes", "true", "t", "1")
id = findtext(element=el, xpath='RuleArn', namespace=NS)
is_default = findtext(element=el, xpath='IsDefault', namespace=NS)
priority = findtext(element=el, xpath='Priority', namespace=NS)
target_group = findtext(
element=el,
xpath='Actions/member/TargetGroupArn',
namespace=NS
)
conditions = {}
cond_members = findall(
element=el, xpath='Conditions/member', namespace=NS
)
for cond_member in cond_members:
field = findtext(element=cond_member, xpath='Field', namespace=NS)
conditions[field] = []
value_members = findall(
element=cond_member, xpath='Values/member', namespace=NS
)
for value_member in value_members:
conditions[field].append(value_member.text)
rule = {
'id': id,
'is_default': __to_bool__(is_default),
'priority': priority,
'target_group': target_group,
'conditions': conditions
}
return rule
def _to_rules(self, data):
xpath = 'DescribeRulesResult/Rules/member'
return [self._to_rule(el)
for el in findall(element=data, xpath=xpath, namespace=NS)]
def _to_target_groups(self, data):
xpath = 'DescribeTargetGroupsResult/TargetGroups/member'
return [self._to_target_group(el)
for el in findall(element=data, xpath=xpath, namespace=NS)]
def _to_target_group(self, el):
target_group_arn = findtext(
element=el, xpath='TargetGroupArn', namespace=NS
)
name = findtext(element=el, xpath='TargetGroupName', namespace=NS)
members = self._ex_get_target_group_members(target_group_arn)
return {'id': target_group_arn, 'name': name, 'members': members}
def _to_target_group_members(self, data):
xpath = 'DescribeTargetHealthResult/TargetHealthDescriptions/member'
return [self._to_target_group_member(el)
for el in findall(element=data, xpath=xpath, namespace=NS)]
def _to_target_group_member(self, el):
id = findtext(element=el, xpath='Target/Id', namespace=NS)
port = findtext(element=el, xpath='Target/Port', namespace=NS)
health = findtext(
element=el, xpath='TargetHealth/State', namespace=NS
)
return {'id': id, 'port': port, 'health': health}
def _ex_get_balancer_memebers(self, balancer):
balancer_members = []
for tg in balancer.extra['target_groups']:
for tg_member in tg['members']:
new_member = Member(
tg_member['id'],
None,
tg_member['port'],
balancer=balancer,
extra={
'health': tg_member['health'],
'target_group': tg['name']
}
)
balancer_members.append(new_member)
return balancer_members
def _ex_get_target_group_members(self, target_group_arn):
"""
Return a list of target group member dicts.
:rtype: ``list`` of ``dict``
"""
params = {
'Action': 'DescribeTargetHealth',
'TargetGroupArn': target_group_arn
}
data = self.connection.request(ROOT, params=params).object
return self._to_target_group_members(data)
def _ex_get_balancer_target_groups(self, balancer):
"""
Return a list of load balancer target groups with members.
:rtype: ``list`` of ``dict``
"""
params = {
'Action': 'DescribeTargetGroups',
'LoadBalancerArn': balancer.id
}
data = self.connection.request(ROOT, params=params).object
return self._to_target_groups(data)
def _ex_get_balancer_listeners(self, balancer):
"""
Return a list of load balancer listeners dicts.
:rtype: ``list`` of ``dict``
"""
params = {
'Action': 'DescribeListeners',
'LoadBalancerArn': balancer.id
}
data = self.connection.request(ROOT, params=params).object
return self._to_listeners(data)
def _ex_get_rules_for_listener(self, listener_arn):
"""
Return a list of listeners rule dicts.
:rtype: ``list`` of ``dict``
"""
params = {
'Action': 'DescribeRules',
'ListenerArn': listener_arn
}
data = self.connection.request(ROOT, params=params).object
return self._to_rules(data)
def _ex_connection_class_kwargs(self):
pdriver = super(ApplicationLBDriver, self)
kwargs = pdriver._ex_connection_class_kwargs()
if hasattr(self, 'token') and self.token is not None:
kwargs['token'] = self.token
kwargs['signature_version'] = '4'
else:
kwargs['signature_version'] = self.signature_version
return kwargs
def _ex_get_balancer_tags(self, balancer):
params = {
'Action': 'DescribeTags',
'ResourceArns.member.1': balancer.id
}
data = self.connection.request(ROOT, params=params).object
return self._to_tags(data)
| xpath = 'DefaultActions/member'
return [self._to_target(el) for el in findall(
element=data, xpath=xpath, namespace=NS
)] |
lib.rs | //! Various data structures used by the Rust compiler. The intention
//! is that code in here should be not be *specific* to rustc, so that
//! it can be easily unit tested and so forth.
//!
//! # Note
//!
//! This API is completely unstable and subject to change.
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
#![feature(in_band_lifetimes)]
#![feature(unboxed_closures)]
#![feature(generators)]
#![feature(generator_trait)]
#![feature(fn_traits)]
#![feature(unsize)]
#![feature(specialization)]
#![feature(optin_builtin_traits)]
#![feature(nll)]
#![feature(allow_internal_unstable)]
#![feature(hash_raw_entry)]
#![feature(stmt_expr_attributes)]
#![feature(core_intrinsics)]
#![feature(integer_atomics)]
#![feature(test)]
#![feature(associated_type_bounds)]
#![cfg_attr(unix, feature(libc))]
#![allow(rustc::default_hash_types)]
#[macro_use]
extern crate log;
#[cfg(unix)]
extern crate libc;
#[macro_use]
extern crate cfg_if;
pub use rustc_serialize::hex::ToHex;
#[inline(never)]
#[cold]
pub fn cold_path<F: FnOnce() -> R, R>(f: F) -> R {
f()
}
#[macro_export]
macro_rules! likely {
($e:expr) => {
#[allow(unused_unsafe)]
{
unsafe { std::intrinsics::likely($e) }
}
}
}
#[macro_export]
macro_rules! unlikely {
($e:expr) => {
#[allow(unused_unsafe)]
{
unsafe { std::intrinsics::unlikely($e) }
}
}
}
pub mod macros;
pub mod svh;
pub mod base_n;
pub mod binary_search_util;
pub mod bit_set;
pub mod box_region;
pub mod const_cstr;
pub mod flock;
pub mod fx;
pub mod graph;
pub mod indexed_vec;
pub mod jobserver;
pub mod obligation_forest;
pub mod owning_ref;
pub mod ptr_key;
pub mod sip128;
pub mod small_c_str;
pub mod snapshot_map;
pub use ena::snapshot_vec;
pub mod sorted_map;
#[macro_use] pub mod stable_hasher;
pub mod sync;
pub mod sharded;
pub mod tiny_list;
pub mod thin_vec;
pub mod transitive_relation;
pub use ena::unify;
pub mod vec_linked_list;
pub mod work_queue;
pub mod fingerprint;
pub struct OnDrop<F: Fn()>(pub F);
impl<F: Fn()> OnDrop<F> {
/// Forgets the function which prevents it from running.
/// Ensure that the function owns no memory, otherwise it will be leaked.
#[inline]
pub fn disable(self) {
std::mem::forget(self);
}
}
impl<F: Fn()> Drop for OnDrop<F> {
#[inline]
fn drop(&mut self) |
}
// See comments in src/librustc/lib.rs
#[doc(hidden)]
pub fn __noop_fix_for_27438() {}
| {
(self.0)();
} |
tcp_accept.rs | #![warn(rust_2018_idioms)]
#![cfg(feature = "full")]
use tokio::net::{TcpListener, TcpStream};
use tokio::sync::{mpsc, oneshot};
use tokio_test::assert_ok;
use std::net::{IpAddr, SocketAddr};
macro_rules! test_accept {
($(($ident:ident, $target:expr),)*) => {
$(
#[tokio::test]
async fn $ident() {
let mut listener = assert_ok!(TcpListener::bind($target).await);
let addr = listener.local_addr().unwrap();
let (tx, rx) = oneshot::channel();
tokio::spawn(async move {
let (socket, _) = assert_ok!(listener.accept().await);
assert_ok!(tx.send(socket));
});
let cli = assert_ok!(TcpStream::connect(&addr).await);
let srv = assert_ok!(rx.await);
assert_eq!(cli.local_addr().unwrap(), srv.peer_addr().unwrap());
}
)*
}
}
test_accept! {
(ip_str, "127.0.0.1:0"),
(host_str, "localhost:0"),
(socket_addr, "127.0.0.1:0".parse::<SocketAddr>().unwrap()),
(str_port_tuple, ("127.0.0.1", 0)),
(ip_port_tuple, ("127.0.0.1".parse::<IpAddr>().unwrap(), 0)),
}
use std::pin::Pin;
use std::sync::{
atomic::{AtomicUsize, Ordering::SeqCst},
Arc,
};
use std::task::{Context, Poll};
use tokio::stream::{Stream, StreamExt};
struct TrackPolls<S> {
npolls: Arc<AtomicUsize>,
s: S,
}
impl<S> Stream for TrackPolls<S>
where
S: Stream,
{
type Item = S::Item;
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> |
}
#[tokio::test]
async fn no_extra_poll() {
let mut listener = assert_ok!(TcpListener::bind("127.0.0.1:0").await);
let addr = listener.local_addr().unwrap();
let (tx, rx) = oneshot::channel();
let (accepted_tx, mut accepted_rx) = mpsc::unbounded_channel();
tokio::spawn(async move {
let mut incoming = TrackPolls {
npolls: Arc::new(AtomicUsize::new(0)),
s: listener.incoming(),
};
assert_ok!(tx.send(Arc::clone(&incoming.npolls)));
while incoming.next().await.is_some() {
accepted_tx.send(()).unwrap();
}
});
let npolls = assert_ok!(rx.await);
tokio::task::yield_now().await;
// should have been polled exactly once: the initial poll
assert_eq!(npolls.load(SeqCst), 1);
let _ = assert_ok!(TcpStream::connect(&addr).await);
accepted_rx.next().await.unwrap();
// should have been polled twice more: once to yield Some(), then once to yield Pending
assert_eq!(npolls.load(SeqCst), 1 + 2);
}
| {
// safety: we do not move s
let this = unsafe { self.get_unchecked_mut() };
this.npolls.fetch_add(1, SeqCst);
// safety: we are pinned, and so is s
unsafe { Pin::new_unchecked(&mut this.s) }.poll_next(cx)
} |
httpbasics.py | #!/usr/bin/env python3
# Copyright (c) 2014-2016 The Hashtagcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test rpc http basics
#
from test_framework.test_framework import HashtagcoinTestFramework
from test_framework.util import *
import http.client
import urllib.parse
class HTTPBasicsTest (HashtagcoinTestFramework):
def | (self):
super().__init__()
self.num_nodes = 3
self.setup_clean_chain = False
def setup_network(self):
self.nodes = self.setup_nodes()
def run_test(self):
#################################################
# lowlevel check for http persistent connection #
#################################################
url = urllib.parse.urlparse(self.nodes[0].url)
authpair = url.username + ':' + url.password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock!=None) #according to http/1.1 connection must still be open!
#send 2nd request without closing connection
conn.request('POST', '/', '{"method": "getchaintips"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1) #must also response with a correct json-rpc message
assert(conn.sock!=None) #according to http/1.1 connection must still be open!
conn.close()
#same should be if we add keep-alive because this should be the std. behaviour
headers = {"Authorization": "Basic " + str_to_b64str(authpair), "Connection": "keep-alive"}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock!=None) #according to http/1.1 connection must still be open!
#send 2nd request without closing connection
conn.request('POST', '/', '{"method": "getchaintips"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1) #must also response with a correct json-rpc message
assert(conn.sock!=None) #according to http/1.1 connection must still be open!
conn.close()
#now do the same with "Connection: close"
headers = {"Authorization": "Basic " + str_to_b64str(authpair), "Connection":"close"}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock==None) #now the connection must be closed after the response
#node1 (2nd node) is running with disabled keep-alive option
urlNode1 = urllib.parse.urlparse(self.nodes[1].url)
authpair = urlNode1.username + ':' + urlNode1.password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(urlNode1.hostname, urlNode1.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
#node2 (third node) is running with standard keep-alive parameters which means keep-alive is on
urlNode2 = urllib.parse.urlparse(self.nodes[2].url)
authpair = urlNode2.username + ':' + urlNode2.password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock!=None) #connection must be closed because hashtagcoind should use keep-alive by default
# Check excessive request size
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
conn.connect()
conn.request('GET', '/' + ('x'*1000), '', headers)
out1 = conn.getresponse()
assert_equal(out1.status, http.client.NOT_FOUND)
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
conn.connect()
conn.request('GET', '/' + ('x'*10000), '', headers)
out1 = conn.getresponse()
assert_equal(out1.status, http.client.BAD_REQUEST)
if __name__ == '__main__':
HTTPBasicsTest ().main ()
| __init__ |
json_deser.rs | // Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
pub fn parse_http_generic_error(
response: &http::Response<bytes::Bytes>,
) -> Result<aws_smithy_types::Error, aws_smithy_json::deserialize::Error> {
crate::json_errors::parse_generic_error(response.body(), response.headers())
}
pub fn deser_structure_crate_error_access_denied_exception_json_err(
value: &[u8],
mut builder: crate::error::access_denied_exception::Builder,
) -> Result<crate::error::access_denied_exception::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_access_denied_for_dependency_exception_json_err(
value: &[u8],
mut builder: crate::error::access_denied_for_dependency_exception::Builder,
) -> Result<
crate::error::access_denied_for_dependency_exception::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Reason" => {
builder = builder.set_reason(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::AccessDeniedForDependencyExceptionReason::from(
u.as_ref(),
)
})
})
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_aws_organizations_not_in_use_exception_json_err(
value: &[u8],
mut builder: crate::error::aws_organizations_not_in_use_exception::Builder,
) -> Result<
crate::error::aws_organizations_not_in_use_exception::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_concurrent_modification_exception_json_err(
value: &[u8],
mut builder: crate::error::concurrent_modification_exception::Builder,
) -> Result<
crate::error::concurrent_modification_exception::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_handshake_already_in_state_exception_json_err(
value: &[u8],
mut builder: crate::error::handshake_already_in_state_exception::Builder,
) -> Result<
crate::error::handshake_already_in_state_exception::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_handshake_constraint_violation_exception_json_err(
value: &[u8],
mut builder: crate::error::handshake_constraint_violation_exception::Builder,
) -> Result<
crate::error::handshake_constraint_violation_exception::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Reason" => {
builder = builder.set_reason(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::HandshakeConstraintViolationExceptionReason::from(
u.as_ref(),
)
})
})
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_handshake_not_found_exception_json_err(
value: &[u8],
mut builder: crate::error::handshake_not_found_exception::Builder,
) -> Result<crate::error::handshake_not_found_exception::Builder, aws_smithy_json::deserialize::Error>
{
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_invalid_handshake_transition_exception_json_err(
value: &[u8],
mut builder: crate::error::invalid_handshake_transition_exception::Builder,
) -> Result<
crate::error::invalid_handshake_transition_exception::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_invalid_input_exception_json_err(
value: &[u8],
mut builder: crate::error::invalid_input_exception::Builder,
) -> Result<crate::error::invalid_input_exception::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Reason" => {
builder = builder.set_reason(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::InvalidInputExceptionReason::from(u.as_ref())
})
})
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_service_exception_json_err(
value: &[u8],
mut builder: crate::error::service_exception::Builder,
) -> Result<crate::error::service_exception::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_too_many_requests_exception_json_err(
value: &[u8],
mut builder: crate::error::too_many_requests_exception::Builder,
) -> Result<crate::error::too_many_requests_exception::Builder, aws_smithy_json::deserialize::Error>
{
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Type" => {
builder = builder.set_type(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_accept_handshake(
value: &[u8],
mut builder: crate::output::accept_handshake_output::Builder,
) -> Result<crate::output::accept_handshake_output::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Handshake" => {
builder = builder.set_handshake(
crate::json_deser::deser_structure_crate_model_handshake(tokens)?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_constraint_violation_exception_json_err(
value: &[u8],
mut builder: crate::error::constraint_violation_exception::Builder,
) -> Result<
crate::error::constraint_violation_exception::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Reason" => {
builder = builder.set_reason(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::ConstraintViolationExceptionReason::from(
u.as_ref(),
)
})
})
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_duplicate_policy_attachment_exception_json_err(
value: &[u8],
mut builder: crate::error::duplicate_policy_attachment_exception::Builder,
) -> Result<
crate::error::duplicate_policy_attachment_exception::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_policy_changes_in_progress_exception_json_err(
value: &[u8],
mut builder: crate::error::policy_changes_in_progress_exception::Builder,
) -> Result<
crate::error::policy_changes_in_progress_exception::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_policy_not_found_exception_json_err(
value: &[u8],
mut builder: crate::error::policy_not_found_exception::Builder,
) -> Result<crate::error::policy_not_found_exception::Builder, aws_smithy_json::deserialize::Error>
{
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_policy_type_not_enabled_exception_json_err(
value: &[u8],
mut builder: crate::error::policy_type_not_enabled_exception::Builder,
) -> Result<
crate::error::policy_type_not_enabled_exception::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_target_not_found_exception_json_err(
value: &[u8],
mut builder: crate::error::target_not_found_exception::Builder,
) -> Result<crate::error::target_not_found_exception::Builder, aws_smithy_json::deserialize::Error>
{
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_unsupported_api_endpoint_exception_json_err(
value: &[u8],
mut builder: crate::error::unsupported_api_endpoint_exception::Builder,
) -> Result<
crate::error::unsupported_api_endpoint_exception::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_cancel_handshake(
value: &[u8],
mut builder: crate::output::cancel_handshake_output::Builder,
) -> Result<crate::output::cancel_handshake_output::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Handshake" => {
builder = builder.set_handshake(
crate::json_deser::deser_structure_crate_model_handshake(tokens)?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_finalizing_organization_exception_json_err(
value: &[u8],
mut builder: crate::error::finalizing_organization_exception::Builder,
) -> Result<
crate::error::finalizing_organization_exception::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_create_account(
value: &[u8],
mut builder: crate::output::create_account_output::Builder,
) -> Result<crate::output::create_account_output::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"CreateAccountStatus" => {
builder = builder.set_create_account_status(
crate::json_deser::deser_structure_crate_model_create_account_status(
tokens,
)?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_create_gov_cloud_account(
value: &[u8],
mut builder: crate::output::create_gov_cloud_account_output::Builder,
) -> Result<
crate::output::create_gov_cloud_account_output::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"CreateAccountStatus" => {
builder = builder.set_create_account_status(
crate::json_deser::deser_structure_crate_model_create_account_status(
tokens,
)?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_already_in_organization_exception_json_err(
value: &[u8],
mut builder: crate::error::already_in_organization_exception::Builder,
) -> Result<
crate::error::already_in_organization_exception::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_create_organization(
value: &[u8],
mut builder: crate::output::create_organization_output::Builder,
) -> Result<crate::output::create_organization_output::Builder, aws_smithy_json::deserialize::Error>
{
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Organization" => {
builder = builder.set_organization(
crate::json_deser::deser_structure_crate_model_organization(tokens)?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_duplicate_organizational_unit_exception_json_err(
value: &[u8],
mut builder: crate::error::duplicate_organizational_unit_exception::Builder,
) -> Result<
crate::error::duplicate_organizational_unit_exception::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_parent_not_found_exception_json_err(
value: &[u8],
mut builder: crate::error::parent_not_found_exception::Builder,
) -> Result<crate::error::parent_not_found_exception::Builder, aws_smithy_json::deserialize::Error>
{
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_create_organizational_unit(
value: &[u8],
mut builder: crate::output::create_organizational_unit_output::Builder,
) -> Result<
crate::output::create_organizational_unit_output::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"OrganizationalUnit" => {
builder = builder.set_organizational_unit(
crate::json_deser::deser_structure_crate_model_organizational_unit(
tokens,
)?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_duplicate_policy_exception_json_err(
value: &[u8],
mut builder: crate::error::duplicate_policy_exception::Builder,
) -> Result<crate::error::duplicate_policy_exception::Builder, aws_smithy_json::deserialize::Error>
{
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_malformed_policy_document_exception_json_err(
value: &[u8],
mut builder: crate::error::malformed_policy_document_exception::Builder,
) -> Result<
crate::error::malformed_policy_document_exception::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_policy_type_not_available_for_organization_exception_json_err(
value: &[u8],
mut builder: crate::error::policy_type_not_available_for_organization_exception::Builder,
) -> Result<
crate::error::policy_type_not_available_for_organization_exception::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_create_policy(
value: &[u8],
mut builder: crate::output::create_policy_output::Builder,
) -> Result<crate::output::create_policy_output::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Policy" => {
builder = builder.set_policy(
crate::json_deser::deser_structure_crate_model_policy(tokens)?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_decline_handshake(
value: &[u8],
mut builder: crate::output::decline_handshake_output::Builder,
) -> Result<crate::output::decline_handshake_output::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Handshake" => {
builder = builder.set_handshake(
crate::json_deser::deser_structure_crate_model_handshake(tokens)?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_organization_not_empty_exception_json_err(
value: &[u8],
mut builder: crate::error::organization_not_empty_exception::Builder,
) -> Result<
crate::error::organization_not_empty_exception::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_organizational_unit_not_empty_exception_json_err(
value: &[u8],
mut builder: crate::error::organizational_unit_not_empty_exception::Builder,
) -> Result<
crate::error::organizational_unit_not_empty_exception::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_organizational_unit_not_found_exception_json_err(
value: &[u8],
mut builder: crate::error::organizational_unit_not_found_exception::Builder,
) -> Result<
crate::error::organizational_unit_not_found_exception::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_policy_in_use_exception_json_err(
value: &[u8],
mut builder: crate::error::policy_in_use_exception::Builder,
) -> Result<crate::error::policy_in_use_exception::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_account_not_found_exception_json_err(
value: &[u8],
mut builder: crate::error::account_not_found_exception::Builder,
) -> Result<crate::error::account_not_found_exception::Builder, aws_smithy_json::deserialize::Error>
{
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_account_not_registered_exception_json_err(
value: &[u8],
mut builder: crate::error::account_not_registered_exception::Builder,
) -> Result<
crate::error::account_not_registered_exception::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_describe_account(
value: &[u8],
mut builder: crate::output::describe_account_output::Builder,
) -> Result<crate::output::describe_account_output::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Account" => {
builder = builder.set_account(
crate::json_deser::deser_structure_crate_model_account(tokens)?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_create_account_status_not_found_exception_json_err(
value: &[u8],
mut builder: crate::error::create_account_status_not_found_exception::Builder,
) -> Result<
crate::error::create_account_status_not_found_exception::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_describe_create_account_status(
value: &[u8],
mut builder: crate::output::describe_create_account_status_output::Builder,
) -> Result<
crate::output::describe_create_account_status_output::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"CreateAccountStatus" => {
builder = builder.set_create_account_status(
crate::json_deser::deser_structure_crate_model_create_account_status(
tokens,
)?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_effective_policy_not_found_exception_json_err(
value: &[u8],
mut builder: crate::error::effective_policy_not_found_exception::Builder,
) -> Result<
crate::error::effective_policy_not_found_exception::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_describe_effective_policy(
value: &[u8],
mut builder: crate::output::describe_effective_policy_output::Builder,
) -> Result<
crate::output::describe_effective_policy_output::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"EffectivePolicy" => {
builder = builder.set_effective_policy(
crate::json_deser::deser_structure_crate_model_effective_policy(
tokens,
)?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_describe_handshake(
value: &[u8],
mut builder: crate::output::describe_handshake_output::Builder,
) -> Result<crate::output::describe_handshake_output::Builder, aws_smithy_json::deserialize::Error>
{
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Handshake" => {
builder = builder.set_handshake(
crate::json_deser::deser_structure_crate_model_handshake(tokens)?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_describe_organization(
value: &[u8],
mut builder: crate::output::describe_organization_output::Builder,
) -> Result<crate::output::describe_organization_output::Builder, aws_smithy_json::deserialize::Error>
{
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Organization" => {
builder = builder.set_organization(
crate::json_deser::deser_structure_crate_model_organization(tokens)?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_describe_organizational_unit(
value: &[u8],
mut builder: crate::output::describe_organizational_unit_output::Builder,
) -> Result<
crate::output::describe_organizational_unit_output::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"OrganizationalUnit" => {
builder = builder.set_organizational_unit(
crate::json_deser::deser_structure_crate_model_organizational_unit(
tokens,
)?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_describe_policy(
value: &[u8],
mut builder: crate::output::describe_policy_output::Builder,
) -> Result<crate::output::describe_policy_output::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Policy" => {
builder = builder.set_policy(
crate::json_deser::deser_structure_crate_model_policy(tokens)?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_policy_not_attached_exception_json_err(
value: &[u8],
mut builder: crate::error::policy_not_attached_exception::Builder,
) -> Result<crate::error::policy_not_attached_exception::Builder, aws_smithy_json::deserialize::Error>
{
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_root_not_found_exception_json_err(
value: &[u8],
mut builder: crate::error::root_not_found_exception::Builder,
) -> Result<crate::error::root_not_found_exception::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_disable_policy_type(
value: &[u8],
mut builder: crate::output::disable_policy_type_output::Builder,
) -> Result<crate::output::disable_policy_type_output::Builder, aws_smithy_json::deserialize::Error>
{
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Root" => {
builder = builder
.set_root(crate::json_deser::deser_structure_crate_model_root(tokens)?);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_enable_all_features(
value: &[u8],
mut builder: crate::output::enable_all_features_output::Builder,
) -> Result<crate::output::enable_all_features_output::Builder, aws_smithy_json::deserialize::Error>
{
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Handshake" => {
builder = builder.set_handshake(
crate::json_deser::deser_structure_crate_model_handshake(tokens)?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_policy_type_already_enabled_exception_json_err(
value: &[u8],
mut builder: crate::error::policy_type_already_enabled_exception::Builder,
) -> Result<
crate::error::policy_type_already_enabled_exception::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_enable_policy_type(
value: &[u8],
mut builder: crate::output::enable_policy_type_output::Builder,
) -> Result<crate::output::enable_policy_type_output::Builder, aws_smithy_json::deserialize::Error>
{
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Root" => {
builder = builder
.set_root(crate::json_deser::deser_structure_crate_model_root(tokens)?);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_account_owner_not_verified_exception_json_err(
value: &[u8],
mut builder: crate::error::account_owner_not_verified_exception::Builder,
) -> Result<
crate::error::account_owner_not_verified_exception::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_duplicate_handshake_exception_json_err(
value: &[u8],
mut builder: crate::error::duplicate_handshake_exception::Builder,
) -> Result<crate::error::duplicate_handshake_exception::Builder, aws_smithy_json::deserialize::Error>
{
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_invite_account_to_organization(
value: &[u8],
mut builder: crate::output::invite_account_to_organization_output::Builder,
) -> Result<
crate::output::invite_account_to_organization_output::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Handshake" => {
builder = builder.set_handshake(
crate::json_deser::deser_structure_crate_model_handshake(tokens)?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_master_cannot_leave_organization_exception_json_err(
value: &[u8],
mut builder: crate::error::master_cannot_leave_organization_exception::Builder,
) -> Result<
crate::error::master_cannot_leave_organization_exception::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_list_accounts(
value: &[u8],
mut builder: crate::output::list_accounts_output::Builder,
) -> Result<crate::output::list_accounts_output::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Accounts" => {
builder = builder.set_accounts(
crate::json_deser::deser_list_com_amazonaws_organizations_accounts(
tokens,
)?,
);
}
"NextToken" => {
builder = builder.set_next_token(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_list_accounts_for_parent(
value: &[u8],
mut builder: crate::output::list_accounts_for_parent_output::Builder,
) -> Result<
crate::output::list_accounts_for_parent_output::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Accounts" => {
builder = builder.set_accounts(
crate::json_deser::deser_list_com_amazonaws_organizations_accounts(
tokens,
)?,
);
}
"NextToken" => {
builder = builder.set_next_token(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_list_aws_service_access_for_organization(
value: &[u8],
mut builder: crate::output::list_aws_service_access_for_organization_output::Builder,
) -> Result<
crate::output::list_aws_service_access_for_organization_output::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"EnabledServicePrincipals" => {
builder = builder.set_enabled_service_principals(
crate::json_deser::deser_list_com_amazonaws_organizations_enabled_service_principals(tokens)?
);
}
"NextToken" => {
builder = builder.set_next_token(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_list_children(
value: &[u8],
mut builder: crate::output::list_children_output::Builder,
) -> Result<crate::output::list_children_output::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Children" => {
builder = builder.set_children(
crate::json_deser::deser_list_com_amazonaws_organizations_children(
tokens,
)?,
);
}
"NextToken" => {
builder = builder.set_next_token(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_list_create_account_status(
value: &[u8],
mut builder: crate::output::list_create_account_status_output::Builder,
) -> Result<
crate::output::list_create_account_status_output::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"CreateAccountStatuses" => {
builder = builder.set_create_account_statuses(
crate::json_deser::deser_list_com_amazonaws_organizations_create_account_statuses(tokens)?
);
}
"NextToken" => {
builder = builder.set_next_token(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_list_delegated_administrators(
value: &[u8],
mut builder: crate::output::list_delegated_administrators_output::Builder,
) -> Result<
crate::output::list_delegated_administrators_output::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"DelegatedAdministrators" => {
builder = builder.set_delegated_administrators(
crate::json_deser::deser_list_com_amazonaws_organizations_delegated_administrators(tokens)?
);
}
"NextToken" => {
builder = builder.set_next_token(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_list_delegated_services_for_account(
value: &[u8],
mut builder: crate::output::list_delegated_services_for_account_output::Builder,
) -> Result<
crate::output::list_delegated_services_for_account_output::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"DelegatedServices" => {
builder = builder.set_delegated_services(
crate::json_deser::deser_list_com_amazonaws_organizations_delegated_services(tokens)?
);
}
"NextToken" => {
builder = builder.set_next_token(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_list_handshakes_for_account(
value: &[u8],
mut builder: crate::output::list_handshakes_for_account_output::Builder,
) -> Result<
crate::output::list_handshakes_for_account_output::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Handshakes" => {
builder = builder.set_handshakes(
crate::json_deser::deser_list_com_amazonaws_organizations_handshakes(
tokens,
)?,
);
}
"NextToken" => {
builder = builder.set_next_token(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_list_handshakes_for_organization(
value: &[u8],
mut builder: crate::output::list_handshakes_for_organization_output::Builder,
) -> Result<
crate::output::list_handshakes_for_organization_output::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Handshakes" => {
builder = builder.set_handshakes(
crate::json_deser::deser_list_com_amazonaws_organizations_handshakes(
tokens,
)?,
);
}
"NextToken" => {
builder = builder.set_next_token(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_list_organizational_units_for_parent(
value: &[u8],
mut builder: crate::output::list_organizational_units_for_parent_output::Builder,
) -> Result<
crate::output::list_organizational_units_for_parent_output::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"OrganizationalUnits" => {
builder = builder.set_organizational_units(
crate::json_deser::deser_list_com_amazonaws_organizations_organizational_units(tokens)?
);
}
"NextToken" => {
builder = builder.set_next_token(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_child_not_found_exception_json_err(
value: &[u8],
mut builder: crate::error::child_not_found_exception::Builder,
) -> Result<crate::error::child_not_found_exception::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_list_parents(
value: &[u8],
mut builder: crate::output::list_parents_output::Builder,
) -> Result<crate::output::list_parents_output::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Parents" => {
builder = builder.set_parents(
crate::json_deser::deser_list_com_amazonaws_organizations_parents(
tokens,
)?,
);
}
"NextToken" => {
builder = builder.set_next_token(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_list_policies(
value: &[u8],
mut builder: crate::output::list_policies_output::Builder,
) -> Result<crate::output::list_policies_output::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Policies" => {
builder = builder.set_policies(
crate::json_deser::deser_list_com_amazonaws_organizations_policies(
tokens,
)?,
);
}
"NextToken" => {
builder = builder.set_next_token(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_list_policies_for_target(
value: &[u8],
mut builder: crate::output::list_policies_for_target_output::Builder,
) -> Result<
crate::output::list_policies_for_target_output::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Policies" => {
builder = builder.set_policies(
crate::json_deser::deser_list_com_amazonaws_organizations_policies(
tokens,
)?,
);
}
"NextToken" => {
builder = builder.set_next_token(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_list_roots(
value: &[u8],
mut builder: crate::output::list_roots_output::Builder,
) -> Result<crate::output::list_roots_output::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Roots" => {
builder = builder.set_roots(
crate::json_deser::deser_list_com_amazonaws_organizations_roots(
tokens,
)?,
);
}
"NextToken" => {
builder = builder.set_next_token(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_list_tags_for_resource(
value: &[u8],
mut builder: crate::output::list_tags_for_resource_output::Builder,
) -> Result<
crate::output::list_tags_for_resource_output::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Tags" => {
builder = builder.set_tags(
crate::json_deser::deser_list_com_amazonaws_organizations_tags(tokens)?,
);
}
"NextToken" => {
builder = builder.set_next_token(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_list_targets_for_policy(
value: &[u8],
mut builder: crate::output::list_targets_for_policy_output::Builder,
) -> Result<
crate::output::list_targets_for_policy_output::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Targets" => {
builder = builder.set_targets(
crate::json_deser::deser_list_com_amazonaws_organizations_policy_targets(tokens)?
);
}
"NextToken" => {
builder = builder.set_next_token(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_destination_parent_not_found_exception_json_err(
value: &[u8],
mut builder: crate::error::destination_parent_not_found_exception::Builder,
) -> Result<
crate::error::destination_parent_not_found_exception::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_duplicate_account_exception_json_err(
value: &[u8],
mut builder: crate::error::duplicate_account_exception::Builder,
) -> Result<crate::error::duplicate_account_exception::Builder, aws_smithy_json::deserialize::Error>
{
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_source_parent_not_found_exception_json_err(
value: &[u8],
mut builder: crate::error::source_parent_not_found_exception::Builder,
) -> Result<
crate::error::source_parent_not_found_exception::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_crate_error_account_already_registered_exception_json_err(
value: &[u8],
mut builder: crate::error::account_already_registered_exception::Builder,
) -> Result<
crate::error::account_already_registered_exception::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_update_organizational_unit(
value: &[u8],
mut builder: crate::output::update_organizational_unit_output::Builder,
) -> Result<
crate::output::update_organizational_unit_output::Builder,
aws_smithy_json::deserialize::Error,
> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"OrganizationalUnit" => {
builder = builder.set_organizational_unit(
crate::json_deser::deser_structure_crate_model_organizational_unit(
tokens,
)?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_crate_operation_update_policy(
value: &[u8],
mut builder: crate::output::update_policy_output::Builder,
) -> Result<crate::output::update_policy_output::Builder, aws_smithy_json::deserialize::Error> {
let mut tokens_owned =
aws_smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(value))
.peekable();
let tokens = &mut tokens_owned;
aws_smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Policy" => {
builder = builder.set_policy(
crate::json_deser::deser_structure_crate_model_policy(tokens)?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
if tokens.next().is_some() {
return Err(aws_smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn or_empty_doc(data: &[u8]) -> &[u8] {
if data.is_empty() {
b"{}"
} else {
data
}
}
pub fn deser_structure_crate_model_handshake<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::Handshake>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::Handshake::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Id" => {
builder = builder.set_id(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Arn" => {
builder = builder.set_arn(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Parties" => {
builder = builder.set_parties(
crate::json_deser::deser_list_com_amazonaws_organizations_handshake_parties(tokens)?
);
}
"State" => {
builder = builder.set_state(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped()
.map(|u| crate::model::HandshakeState::from(u.as_ref()))
})
.transpose()?,
);
}
"RequestedTimestamp" => {
builder = builder.set_requested_timestamp(
aws_smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
aws_smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"ExpirationTimestamp" => {
builder = builder.set_expiration_timestamp(
aws_smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
aws_smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"Action" => {
builder = builder.set_action(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped()
.map(|u| crate::model::ActionType::from(u.as_ref()))
})
.transpose()?,
);
}
"Resources" => {
builder = builder.set_resources(
crate::json_deser::deser_list_com_amazonaws_organizations_handshake_resources(tokens)?
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_create_account_status<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::CreateAccountStatus>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::CreateAccountStatus::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Id" => {
builder = builder.set_id(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"AccountName" => {
builder = builder.set_account_name(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"State" => {
builder = builder.set_state(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::CreateAccountState::from(u.as_ref())
})
})
.transpose()?,
);
}
"RequestedTimestamp" => {
builder = builder.set_requested_timestamp(
aws_smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
aws_smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"CompletedTimestamp" => {
builder = builder.set_completed_timestamp(
aws_smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
aws_smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"AccountId" => {
builder = builder.set_account_id(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"GovCloudAccountId" => {
builder = builder.set_gov_cloud_account_id(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"FailureReason" => {
builder = builder.set_failure_reason(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::CreateAccountFailureReason::from(
u.as_ref(),
)
})
})
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_organization<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::Organization>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::Organization::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Id" => {
builder = builder.set_id(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Arn" => {
builder = builder.set_arn(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"FeatureSet" => {
builder = builder.set_feature_set(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::OrganizationFeatureSet::from(u.as_ref())
})
})
.transpose()?,
);
}
"MasterAccountArn" => {
builder = builder.set_master_account_arn(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"MasterAccountId" => {
builder = builder.set_master_account_id(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"MasterAccountEmail" => {
builder = builder.set_master_account_email(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"AvailablePolicyTypes" => {
builder = builder.set_available_policy_types(
crate::json_deser::deser_list_com_amazonaws_organizations_policy_types(tokens)?
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_organizational_unit<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::OrganizationalUnit>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::OrganizationalUnit::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Id" => {
builder = builder.set_id(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Arn" => {
builder = builder.set_arn(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Name" => {
builder = builder.set_name(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_policy<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::Policy>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::Policy::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"PolicySummary" => {
builder = builder.set_policy_summary(
crate::json_deser::deser_structure_crate_model_policy_summary(
tokens,
)?,
);
}
"Content" => {
builder = builder.set_content(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_account<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::Account>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::Account::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Id" => {
builder = builder.set_id(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Arn" => {
builder = builder.set_arn(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Email" => {
builder = builder.set_email(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Name" => {
builder = builder.set_name(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Status" => {
builder = builder.set_status(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped()
.map(|u| crate::model::AccountStatus::from(u.as_ref()))
})
.transpose()?,
);
}
"JoinedMethod" => {
builder = builder.set_joined_method(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::AccountJoinedMethod::from(u.as_ref())
})
})
.transpose()?,
);
}
"JoinedTimestamp" => {
builder = builder.set_joined_timestamp(
aws_smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
aws_smithy_types::instant::Format::EpochSeconds,
)?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_effective_policy<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::EffectivePolicy>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::EffectivePolicy::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"PolicyContent" => {
builder = builder.set_policy_content(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"LastUpdatedTimestamp" => {
builder = builder.set_last_updated_timestamp(
aws_smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
aws_smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"TargetId" => {
builder = builder.set_target_id(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"PolicyType" => {
builder = builder.set_policy_type(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::EffectivePolicyType::from(u.as_ref())
})
})
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_root<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::Root>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::Root::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Id" => {
builder = builder.set_id(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Arn" => {
builder = builder.set_arn(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Name" => {
builder = builder.set_name(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"PolicyTypes" => {
builder = builder.set_policy_types(
crate::json_deser::deser_list_com_amazonaws_organizations_policy_types(tokens)?
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_com_amazonaws_organizations_accounts<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::Account>>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(aws_smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_crate_model_account(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_com_amazonaws_organizations_enabled_service_principals<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<
Option<std::vec::Vec<crate::model::EnabledServicePrincipal>>,
aws_smithy_json::deserialize::Error,
>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(aws_smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
crate::json_deser::deser_structure_crate_model_enabled_service_principal(tokens)?
;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_com_amazonaws_organizations_children<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::Child>>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(aws_smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_crate_model_child(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_com_amazonaws_organizations_create_account_statuses<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<
Option<std::vec::Vec<crate::model::CreateAccountStatus>>,
aws_smithy_json::deserialize::Error,
>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(aws_smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
crate::json_deser::deser_structure_crate_model_create_account_status(
tokens,
)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_com_amazonaws_organizations_delegated_administrators<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<
Option<std::vec::Vec<crate::model::DelegatedAdministrator>>,
aws_smithy_json::deserialize::Error,
>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(aws_smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
crate::json_deser::deser_structure_crate_model_delegated_administrator(
tokens,
)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_com_amazonaws_organizations_delegated_services<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<
Option<std::vec::Vec<crate::model::DelegatedService>>,
aws_smithy_json::deserialize::Error,
>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(aws_smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
crate::json_deser::deser_structure_crate_model_delegated_service(
tokens,
)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_com_amazonaws_organizations_handshakes<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::Handshake>>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(aws_smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
crate::json_deser::deser_structure_crate_model_handshake(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_com_amazonaws_organizations_organizational_units<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<
Option<std::vec::Vec<crate::model::OrganizationalUnit>>,
aws_smithy_json::deserialize::Error,
>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(aws_smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
crate::json_deser::deser_structure_crate_model_organizational_unit(
tokens,
)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_com_amazonaws_organizations_parents<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::Parent>>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(aws_smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_crate_model_parent(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_com_amazonaws_organizations_policies<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::PolicySummary>>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(aws_smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
crate::json_deser::deser_structure_crate_model_policy_summary(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_com_amazonaws_organizations_roots<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::Root>>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(aws_smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_crate_model_root(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_com_amazonaws_organizations_tags<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::Tag>>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(aws_smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_crate_model_tag(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_com_amazonaws_organizations_policy_targets<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<
Option<std::vec::Vec<crate::model::PolicyTargetSummary>>,
aws_smithy_json::deserialize::Error,
>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(aws_smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
crate::json_deser::deser_structure_crate_model_policy_target_summary(
tokens,
)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn | <'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::HandshakeParty>>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(aws_smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
crate::json_deser::deser_structure_crate_model_handshake_party(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_com_amazonaws_organizations_handshake_resources<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<
Option<std::vec::Vec<crate::model::HandshakeResource>>,
aws_smithy_json::deserialize::Error,
>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(aws_smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
crate::json_deser::deser_structure_crate_model_handshake_resource(
tokens,
)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_com_amazonaws_organizations_policy_types<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<
Option<std::vec::Vec<crate::model::PolicyTypeSummary>>,
aws_smithy_json::deserialize::Error,
>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(aws_smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
crate::json_deser::deser_structure_crate_model_policy_type_summary(
tokens,
)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
pub fn deser_structure_crate_model_policy_summary<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::PolicySummary>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::PolicySummary::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Id" => {
builder = builder.set_id(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Arn" => {
builder = builder.set_arn(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Name" => {
builder = builder.set_name(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Description" => {
builder = builder.set_description(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Type" => {
builder = builder.set_type(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped()
.map(|u| crate::model::PolicyType::from(u.as_ref()))
})
.transpose()?,
);
}
"AwsManaged" => {
builder = builder.set_aws_managed(
aws_smithy_json::deserialize::token::expect_bool_or_null(
tokens.next(),
)?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_enabled_service_principal<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::EnabledServicePrincipal>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::EnabledServicePrincipal::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"ServicePrincipal" => {
builder = builder.set_service_principal(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"DateEnabled" => {
builder = builder.set_date_enabled(
aws_smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
aws_smithy_types::instant::Format::EpochSeconds,
)?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_child<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::Child>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::Child::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Id" => {
builder = builder.set_id(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Type" => {
builder = builder.set_type(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped()
.map(|u| crate::model::ChildType::from(u.as_ref()))
})
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_delegated_administrator<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::DelegatedAdministrator>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::DelegatedAdministrator::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Id" => {
builder = builder.set_id(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Arn" => {
builder = builder.set_arn(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Email" => {
builder = builder.set_email(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Name" => {
builder = builder.set_name(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Status" => {
builder = builder.set_status(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped()
.map(|u| crate::model::AccountStatus::from(u.as_ref()))
})
.transpose()?,
);
}
"JoinedMethod" => {
builder = builder.set_joined_method(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::AccountJoinedMethod::from(u.as_ref())
})
})
.transpose()?,
);
}
"JoinedTimestamp" => {
builder = builder.set_joined_timestamp(
aws_smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
aws_smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"DelegationEnabledDate" => {
builder = builder.set_delegation_enabled_date(
aws_smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
aws_smithy_types::instant::Format::EpochSeconds,
)?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_delegated_service<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::DelegatedService>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::DelegatedService::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"ServicePrincipal" => {
builder = builder.set_service_principal(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"DelegationEnabledDate" => {
builder = builder.set_delegation_enabled_date(
aws_smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
aws_smithy_types::instant::Format::EpochSeconds,
)?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_parent<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::Parent>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::Parent::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Id" => {
builder = builder.set_id(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Type" => {
builder = builder.set_type(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped()
.map(|u| crate::model::ParentType::from(u.as_ref()))
})
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_tag<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::Tag>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::Tag::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Key" => {
builder = builder.set_key(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Value" => {
builder = builder.set_value(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_policy_target_summary<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::PolicyTargetSummary>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::PolicyTargetSummary::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"TargetId" => {
builder = builder.set_target_id(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Arn" => {
builder = builder.set_arn(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Name" => {
builder = builder.set_name(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Type" => {
builder = builder.set_type(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped()
.map(|u| crate::model::TargetType::from(u.as_ref()))
})
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_handshake_party<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::HandshakeParty>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::HandshakeParty::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Id" => {
builder = builder.set_id(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Type" => {
builder = builder.set_type(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::HandshakePartyType::from(u.as_ref())
})
})
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_handshake_resource<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::HandshakeResource>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::HandshakeResource::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Value" => {
builder = builder.set_value(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Type" => {
builder = builder.set_type(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::HandshakeResourceType::from(u.as_ref())
})
})
.transpose()?,
);
}
"Resources" => {
builder = builder.set_resources(
crate::json_deser::deser_list_com_amazonaws_organizations_handshake_resources(tokens)?
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_crate_model_policy_type_summary<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::PolicyTypeSummary>, aws_smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<aws_smithy_json::deserialize::Token<'a>, aws_smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(aws_smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::PolicyTypeSummary::builder();
loop {
match tokens.next().transpose()? {
Some(aws_smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(aws_smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Type" => {
builder = builder.set_type(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped()
.map(|u| crate::model::PolicyType::from(u.as_ref()))
})
.transpose()?,
);
}
"Status" => {
builder = builder.set_status(
aws_smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::PolicyTypeStatus::from(u.as_ref())
})
})
.transpose()?,
);
}
_ => aws_smithy_json::deserialize::token::skip_value(tokens)?,
}
}
other => {
return Err(aws_smithy_json::deserialize::Error::custom(format!(
"expected object key or end object, found: {:?}",
other
)))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(aws_smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
| deser_list_com_amazonaws_organizations_handshake_parties |
files.ts | import { connection } from '@configs/database';
import { isLoggedIn } from '@shared/passport.utils';
import { Request, Response, Router } from 'express';
import { existsSync, unlink } from 'fs';
import { ErrorResponse, ShortFileListResponse, SuccessResponse } from '../models/response.model';
import { ShortFile } from '../models/short-file.model';
// Init shared
const router = Router();
router.get('/list', isLoggedIn(), async (req: Request, res: Response) => {
connection.query('SELECT * FROM files;', (err, result) => {
if (err) {
res.json(new ErrorResponse(err.message));
} else { | res.json(
new ShortFileListResponse(result.filter((item: any) => existsSync(item.path)).map((dbEntry: any) => ShortFile.getShortFileFromDbEntry(dbEntry)))
);
}
});
});
router.post('/create', isLoggedIn(), async (req: Request, res: Response) => {
const file = req.files?.file;
const { description, scope } = req.body;
if (file) {
connection.query(
'INSERT INTO `igs`.`files` (`name`, `author_id`, `author_name`, `mimetype`, `creationDate`, `description`, `scope`) VALUES (?, ?, ?, ?, ?, ?, ?);',
[file.name, res.locals.user.id, res.locals.user.name, file.mimetype, new Date().getTime(), description, scope],
(err, result) => {
if (err) {
res.json(new ErrorResponse(err.message));
} else {
const path = '/usr/src/app/files/' + result.insertId + '.' + file.name.split('.')[file.name.split('.').length - 1];
file.mv(path);
connection.query('UPDATE `igs`.`files` SET `path` = ? WHERE (`id` = ?);', [path, result.insertId], err => {
res.json(err ? new ErrorResponse(err.message) : new SuccessResponse());
});
}
}
);
} else {
res.json(new ErrorResponse('No file attached.'));
}
});
router.post('/remove', isLoggedIn(), async (req: Request, res: Response) => {
const { id } = req.body;
if (id) {
connection.query('SELECT path, author_id FROM files WHERE (id = ?);', [id], (err, result) => {
if (err) {
res.json(new ErrorResponse(err.message));
} else if (!result.length || !result[0].path) {
res.json(new ErrorResponse('No file found.'));
} else {
/** Checks if the user is the author of the file or hast wildcard access */
if (Number(result[0]['author_id']) === Number(res.locals.user.id) || res.locals.user.isSuperUser) {
unlink(result[0].path, err => {
if (err) {
res.json(new ErrorResponse(err.message));
} else {
connection.query('DELETE FROM `igs`.`files` WHERE (`id` = ?);', [id], err => {
res.json(err ? new ErrorResponse(err.message) : new SuccessResponse());
});
}
});
} else {
res.json(new ErrorResponse('Youre not the author of this file.'));
}
}
});
} else {
res.json(new ErrorResponse('No file specified.'));
}
});
router.get('/get/:id', isLoggedIn(), async (req: Request, res: Response) => {
const { id } = req.params;
if (id) {
connection.query('SELECT path FROM files WHERE (id = ?);', [id], (err, result) => {
if (err) {
res.json(new ErrorResponse(err.message));
} else if (!result.length || !result[0].path) {
res.json(new ErrorResponse('No file found.'));
} else {
res.download(result[0].path, err => {
if (err) res.json(new ErrorResponse(err.message));
});
}
});
} else {
res.json(new ErrorResponse('No file specified.'));
}
});
router.get('/satzung', async (req: Request, res: Response) => {
const path = './src/static/Satzung.pdf';
if (existsSync(path)) {
res.download(path);
} else {
res.status(404).send(new ErrorResponse('"Satzung" was not found.'));
}
});
export default router; | |
httpserver.go | // +build !js
package main
import (
"flag"
"fmt"
"io"
"log"
"net/http"
"os"
"os/exec"
"path/filepath"
"strings"
"time"
"github.com/fsnotify/fsnotify"
"github.com/gobwas/glob"
)
//arrayFlags is an array of string flags.
type arrayFlags []string
func (i *arrayFlags) String() string {
return "ArrayFlags"
}
func (i *arrayFlags) Set(value string) error {
*i = append(*i, value)
return nil
}
var (
cmd string
filter string
resourceDirectory string
port int
globFilter glob.Glob
dirs arrayFlags
args arrayFlags
current *exec.Cmd
lastTime time.Time
watcher *fsnotify.Watcher
client *wsclient
)
func watchFiles() |
func main() {
//Parse the flag
flag.StringVar(&cmd, "cmd", "go build", "The command that will be executed when a change has been discovered")
flag.StringVar(&filter, "filter", "*.go", "Filters the files that are modified")
flag.StringVar(&resourceDirectory, "resources", "./resources/", "Resource Directory")
flag.IntVar(&port, "port", 8090, "Port to host the webserver on")
flag.Var(&args, "args", "Arguments for the command")
flag.Var(&dirs, "dir", "Folders to listen for changes.")
flag.Parse()
//Setup the file watcher
globFilter = glob.MustCompile(filter)
fileWatcher, err := fsnotify.NewWatcher()
if err != nil {
log.Fatal(err)
}
defer fileWatcher.Close()
watcher = fileWatcher
for _, f := range dirs {
log.Println("Watching: ", f)
err = watcher.Add(f)
if err != nil {
log.Fatal(err)
}
}
go watchFiles()
//SErve the files
baseFileServe := http.FileServer(http.Dir("./"))
http.Handle("/", http.StripPrefix("/", baseFileServe))
resourceFileServe := http.FileServer(http.Dir(resourceDirectory))
http.Handle("/resources/", http.StripPrefix("/resources/", resourceFileServe))
//Listens
client = &wsclient{}
log.Println("Serving on: ", port)
http.HandleFunc("/listen", client.handle)
http.ListenAndServe(fmt.Sprintf(":%d", port), nil)
}
// runCommand runs the command with given name and arguments. It copies the
// logs to standard output
func runCommand(name string, args ...string) (*exec.Cmd, error) {
cmd := exec.Command(name, args...)
stderr, err := cmd.StderrPipe()
if err != nil {
return cmd, err
}
stdout, err := cmd.StdoutPipe()
if err != nil {
return cmd, err
}
if err := cmd.Start(); err != nil {
return cmd, err
}
go io.Copy(os.Stdout, stdout)
go io.Copy(os.Stderr, stderr)
return cmd, nil
}
| {
for {
select {
case event, ok := <-watcher.Events:
if !ok {
return
}
now := time.Now()
diff := now.Sub(lastTime)
if diff.Milliseconds() >= 5000 {
if event.Op&fsnotify.Write == fsnotify.Write {
absFile, _ := filepath.Abs(event.Name)
absFile = strings.Replace(absFile, "\\", "/", -1)
if globFilter.Match(absFile) {
log.Println("Starting Build", absFile)
cmd, err := runCommand(cmd, args...)
if err == nil {
lastTime = now
current = cmd
current.Wait()
client.Broadcast(payloadEvent{Event: "BuildSuccess", Asset: absFile})
log.Println("Build Complete")
} else {
client.Broadcast(payloadEvent{Event: "BuildFailure", Asset: absFile})
log.Println("Build Failed", err)
}
//Clear the current out
current = nil
} else {
client.Broadcast(payloadEvent{Event: "AssetUpdated", Asset: absFile})
}
}
}
case err, ok := <-watcher.Errors:
if !ok {
return
}
log.Println("error:", err)
}
}
} |
C.go | package C
import (
"bufio"
"io"
"os"
"strconv"
"strings"
"fmt"
)
var (
nstr func() string
stdout *bufio.Writer
)
func init() {
nstr = newReadString(os.Stdin)
stdout = bufio.NewWriter(os.Stdout)
}
func main() {
defer stdout.Flush()
r := nint()
c := nint()
sx := nint()
sy := nint() | gy := nint()
start := yx{sx-1,sy-1,}
goal := yx{gx-1, gy-1,}
nodes := make(map[yx]string)
for i:=0;i < r ;i++ {
line := strings.Split(nstr(), "")
for j:=0;j <c ;j++ {
tmp := yx{i, j,}
nodes[tmp] = line[j]
}
}
fmt.Println(BFS(start, goal, nodes))
}
type yx struct{
y int
x int
}
func BFS(start yx, goal yx, nodes map[yx]string) int{
next := []yx{start}
visited := make(map[yx]bool)
route := make(map[yx]int)
route[start] = 0
visited[start] = true
for len(next) > 0 {
node := next[0]
next = next[1:]
if node.x == goal.x && node.y == goal.y {
return route[node]
}
up := yx{node.y -1, node.x }
down := yx{node.y + 1 , node.x}
left := yx{node.y , node.x-1}
right := yx{node.y , node.x+1}
if !visited[up] && nodes[up] == "." {
next = append(next, up)
route[up] = route[node] + 1
visited[up] = true
}
if !visited[down] && nodes[down] == "." {
next = append(next, down)
route[down] = route[node]+ 1
visited[down] = true
}
if !visited[right] && nodes[right] == "." {
next = append(next, right)
route[right] = route[node] + 1
visited[right] = true
}
if !visited[left] && nodes[left] == "." {
next = append(next, left)
route[left] = route[node] +1
visited[left] = true
}
}
return route[goal]
}
func newReadString(ior io.Reader) func() string {
r := bufio.NewScanner(ior)
r.Buffer(make([]byte, 1024), int(1e+11))
r.Split(bufio.ScanWords)
return func() string {
if !r.Scan() {
panic("Scan failed")
}
return r.Text()
}
}
func nint64() int64 {
i, err := strconv.ParseInt(nstr(), 0, 64)
if err != nil {
panic(err.Error())
}
return i
}
func nint() int {
return int(nint64())
}
func getIntSlice(n int) []int {
b := make([]int, n)
for i := 0; i < n; i++ {
b[i] = nint()
}
return b
}
func getStringSlice(n int) []string {
b := make([]string, n)
for i := 0; i < n; i++ {
b[i] = nstr()
}
return b
}
func gcd(x, y int) int {
if y == 0 {
return x
}
return gcd(y, x%y)
}
func lcm(x, y int) int {
return x * y / gcd(x, y)
}
func contains(s []int, e int) bool {
for _, a := range s {
if a == e {
return true
}
}
return false
}
func bin(i int, prefix bool) string {
i64 := int64(i)
if prefix {
return "0b" + strconv.FormatInt(i64, 2)
} else {
return strconv.FormatInt(i64, 2)
}
}
func strReverse(s string) string {
runes := []rune(s)
for i, j := 0, len(runes)-1; i < j; i, j = i+1, j-1 {
runes[i], runes[j] = runes[j], runes[i]
}
return string(runes)
} | gx := nint() |
trans_trans.py | import torch
import torch.nn as nn
from torch.autograd import Variable
from torch.optim.lr_scheduler import StepLR
from torchvision import datasets
import torchvision.transforms as transforms
from self_attention_cv import TransformerEncoder
import argparse
import math
import numpy as np
from torchvision import datasets, models
import os
from cifar_generator import CIFAR10
parser = argparse.ArgumentParser()
parser.add_argument("-test","--isTest",type = bool, default=False)
args = parser.parse_args()
torch.manual_seed(0)
isTest = args.isTest
CHECKTEACHER = False
EPOCH = 1
BATCH_SIZE = 1
DIM = 28
DIM2 = 6
HIDDEN = False
studentPth = './trans_learnt_student.pth'
teacherPth = './trans_teacher_test.pth'
lFunc = nn.CrossEntropyLoss()
tokenSize = 8
cropIs = [tokenSize * i for i in range(1, DIM // tokenSize + 1)]
class Classifier(nn.Module):
def __init__(self):
super(Classifier, self).__init__()
self.hidden = nn.Linear(12 * 192, 100)
self.out = nn.Linear(100, 10)
def forward(self, x):
x = x.reshape(len(x), -1)
x = self.hidden(x)
x = self.out(x)
return x
def getCrops(inputs):
batch = np.zeros((len(inputs), (DIM ** 2) // (tokenSize ** 2), 3, tokenSize, tokenSize))
for batchI, input in enumerate(inputs):
tokenI = 0
for i in cropIs:
for j in cropIs:
token = input[:, i - tokenSize:i, j - tokenSize:j]
batch[batchI, tokenI, :, :, :] = token
tokenI += 1
batch = torch.from_numpy(batch)
batch = torch.flatten(batch, start_dim = -3)
return batch
def weights_init(m):
classname = m.__class__.__name__
if classname.find('Linear') != -1:
m.weight.data.normal_(0, 0.01)
m.bias.data = torch.ones(m.bias.data.size())
def get_loss(out, target):
loss = torch.square(out - target)
return loss
def train(trainloader, student, teacher, optimizer, scheduler, device):
print("Training...")
student.train()
for i in range(EPOCH):
epoch_loss = 0
count = 0
for inputs, _ in trainloader:
inputs = getCrops(inputs).float()
sample_features = student(Variable(inputs).to(device))
baseline_features = teacher(Variable(inputs).to(device)) # 16 * 32 * 7 * 7
optimizer.zero_grad()
loss = get_loss(sample_features, baseline_features)
loss.backward(torch.ones_like(sample_features))
optimizer.step()
epoch_loss += torch.sum(torch.sum(loss)).item()
if count % 1000 == 0:
print(count, epoch_loss / (count + 1))
count += 1
scheduler.step()
torch.save(student.state_dict(), studentPth)
def trainClassifier(trainloader, student, classifier, optimizer, device):
student.train()
count = 0
for inputs, label in trainloader:
count += 1
if count % 100 == 0:
print(count)
inputs = getCrops(inputs).float()
sample_features = student(Variable(inputs).to(device))
# print(sample_features.shape)
y = classifier(sample_features)
optimizer.zero_grad()
label = Variable(label).to(device)
loss = lFunc(y, label)
loss.backward()
optimizer.step()
def test(testloader, model, classifier, device):
print("Testing...")
model.eval()
accuracy = 0
count = 0
for inputs, labels in testloader:
inputs = getCrops(inputs).float()
sample_features = model(Variable(inputs).to(device))
y = classifier(sample_features)
pred_y = torch.max(y, 1)[1].data.squeeze()
labels = Variable(labels).to(device)
accuracy += (pred_y == labels).sum().item()
count += 1
if count % 1000 == 0:
print(count)
print('Test Accuracy of the model on the 10000 test images:', accuracy / 10000 * 100)
return accuracy
def | ():
device = torch.device("cuda")
assert os.path.exists(teacherPth)
teacher = TransformerEncoder(dim=tokenSize ** 2 * 3,blocks=2,heads=8)
for param in teacher.parameters():
param.requires_grad = False
teacher.to(device)
student = TransformerEncoder(dim=tokenSize ** 2 * 3,blocks=6,heads=8)
student.to(device)
classifier = Classifier()
classifier.apply(weights_init)
classifier.to(device)
optimizer = torch.optim.Adam([
#{"params": student.hidden.parameters(), "lr": 0.001}, ##train classifier
{"params": student.parameters(), "lr": 0.00001},
])
scheduler = StepLR(optimizer,step_size=10000,gamma=1.1)
transform = transforms.Compose(
[#transforms.Resize((DIM, DIM)),
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])
train_data = CIFAR10(
root = 'data',
train = True,
transform = transform,
download = False,
)
trainloader = torch.utils.data.DataLoader(train_data,
batch_size=BATCH_SIZE,
shuffle=True,
num_workers=1)
student.load_state_dict(torch.load(teacherPth))
student.to(device)
# train(trainloader, student, teacher, optimizer, scheduler, device)
test_data = datasets.CIFAR10(
root = 'data',
train = False,
transform = transforms.Compose([transforms.Resize((56, 56)), transforms.ToTensor()]),
download = True,
)
testloader = torch.utils.data.DataLoader(test_data,
batch_size=50,
shuffle=True,
num_workers=1)
optimizer = torch.optim.Adam([
#{"params": student.hidden.parameters(), "lr": 0.001}, ##train classifier
{"params": student.parameters(), "lr": 0.001},
{"params": classifier.hidden.parameters(), "lr": 0.01},
{"params": classifier.out.parameters(), "lr": 0.005},
])
trainloader = torch.utils.data.DataLoader(train_data,
batch_size=100,
shuffle=True,
num_workers=1)
for i in range(3):
trainClassifier(trainloader, student, classifier, optimizer, device) ##try freezing encoder
test(testloader, student, classifier, device)
print('Done.')
if __name__ == '__main__':
main() | main |
test_pytest_aoc.py | import datetime
import pytest
import pytest_aoc
def test_get_cookie(testdir):
|
def test_create_input_dir_1(testdir):
testdir.makepyfile(test_download_inputs='''
import pytest_aoc
def test_create_input_dir_1(mocker):
makedirs = mocker.patch('os.makedirs')
pytest_aoc.create_input_dir('input')
makedirs.assert_called_once_with('input')
''')
testdir.runpytest().assert_outcomes(passed=1)
def test_create_input_dir_2(testdir):
testdir.mkdir('input')
testdir.makepyfile(test_download_inputs='''
import pytest_aoc
def test_create_input_dir_1(mocker):
makedirs = mocker.patch('os.makedirs')
pytest_aoc.create_input_dir('input')
makedirs.assert_not_called()
''')
testdir.runpytest().assert_outcomes(passed=1)
@pytest.mark.freeze_time('2018-12-01 04:59:59.999')
def test_get_available_days_before():
assert pytest_aoc.get_available_days(2018, datetime.datetime.utcnow()) == []
@pytest.mark.freeze_time('2014-12-18 16:16')
def test_get_available_days_during():
assert pytest_aoc.get_available_days(2014, datetime.datetime.utcnow()) == [*range(1, 18+1)]
@pytest.mark.freeze_time('2018-08-14 19:26')
def test_get_available_days_after():
assert pytest_aoc.get_available_days(2017, datetime.datetime.utcnow()) == [*range(1, 25+1)]
def test_download_inputs_1(testdir):
testdir.makepyfile(test_download_inputs='''
import pytest_aoc
def test_download_inputs(responses):
responses.add(responses.GET, 'https://adventofcode.com/2018/day/1/input', body='spam')
pytest_aoc.download_inputs(2018, [1], '.', 'spam', '.cookie')
with open('day01.txt', 'r') as f:
assert f.read() == 'spam'
''')
testdir.runpytest().assert_outcomes(passed=1)
def test_download_inputs_2(testdir):
testdir.makepyfile(test_download_inputs='''
import os.path
import pytest_aoc
def test_download_inputs(responses):
responses.add(responses.GET, 'https://adventofcode.com/2018/day/1/input', status=400)
pytest_aoc.download_inputs(2018, [1], '.', 'spam', '.cookie')
assert not os.path.exists('day01.txt')
''')
testdir.runpytest().assert_outcomes(passed=1)
@pytest.mark.freeze_time('2018-12-01 05:00:00')
@pytest.mark.parametrize('name,text,value', [
('text', 'spam ', '"spam"'),
('raw', 'spam ', '"spam "'),
('lines', 'spam\neggs\n', '["spam", "eggs"]'),
('numbers', '529\n127\n', '[529, 127]'),
('number', '529', '529'),
('grid', 'a b\nc d\n', '[["a", "b"], ["c", "d"]]'),
('number_grid', '1 2\n3 4\n', '[[1, 2], [3, 4]]')
])
def test_fixture(testdir, name, text, value):
with open('day01.txt', 'w') as f:
f.write(text)
testdir.makepyfile(test_fixture=f'def test_{name}(day01_{name}): assert day01_{name} == {value}')
testdir.runpytest('--aoc-year=2018', '--aoc-input-dir=.').assert_outcomes(passed=1)
@pytest.mark.freeze_time('2021-12-01 05:00:00')
def test_fixture_parsed_raw(testdir):
with open('day01.txt', 'w') as f:
f.write('albatross')
testdir.makepyfile(test_fixture='''
def parse(text):
return text[::-1]
def test_parsed_raw(day01_parsed_raw):
assert day01_parsed_raw == 'ssortabla'
''')
testdir.runpytest('--aoc-year=2021', '--aoc-input-dir=.').assert_outcomes(passed=1)
@pytest.mark.freeze_time('2021-12-01 05:00:00')
def test_fixture_parsed_text(testdir):
with open('day01.txt', 'w') as f:
f.write(' albatross ')
testdir.makepyfile(test_fixture='''
def parse(text):
return text[::-1]
def test_parsed_text(day01_parsed_text):
assert day01_parsed_text == 'ssortabla'
''')
testdir.runpytest('--aoc-year=2021', '--aoc-input-dir=.').assert_outcomes(passed=1)
@pytest.mark.freeze_time('2021-12-01 05:00:00')
def test_fixture_parsed_lines(testdir):
with open('day01.txt', 'w') as f:
f.write(' albatross \nspam \n eggs\nbacon\n')
testdir.makepyfile(test_fixture='''
def parse(text):
return text[::-1]
def test_parsed_text(day01_parsed_lines):
assert day01_parsed_lines == ['ssortabla', 'maps', 'sgge', 'nocab']
''')
testdir.runpytest('--aoc-year=2021', '--aoc-input-dir=.').assert_outcomes(passed=1)
def test_module_reload(): # this probably breaks pytest assertion rewriting...
from importlib import reload
reload(pytest_aoc)
| testdir.maketxtfile(cookie='spam')
testdir.makepyfile(test_get_cookie='''
import pytest_aoc
def test_get_cookie_from_session_id():
assert pytest_aoc.get_cookie('eggs', 'cookie.txt') == 'eggs'
def test_get_cookie_from_session_file():
assert pytest_aoc.get_cookie(None, 'cookie.txt') == 'spam'
''')
testdir.runpytest().assert_outcomes(passed=2) |
QATransform.py | # coding:utf-8
#
# The MIT License (MIT)
#
# Copyright (c) 2016-2019 yutiansut/QUANTAXIS
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import csv
import json
import numpy as np
import pandas as pd
def QA_util_to_json_from_pandas(data):
"""
explanation:
将pandas数据转换成json格式
params:
* data ->:
meaning: pandas数据
type: null
optional: [null]
return:
dict
demonstrate:
Not described
output:
Not described
"""
"""需要对于datetime 和date 进行转换, 以免直接被变成了时间戳"""
if 'datetime' in data.columns:
data.datetime = data.datetime.apply(str)
if 'date' in data.columns:
data.date = data.date.apply(str)
return json.loads(data.to_json(orient='records'))
def QA_util_to_json_from_numpy(data):
pass
def QA_util_to_json_from_list(data):
pass
def QA_util_to_list_from_pandas(data):
"""
ex | ation:
将pandas数据转换成列表
params:
* data ->:
meaning: pandas数据
type: null
optional: [null]
return:
list
demonstrate:
Not described
output:
Not described
"""
return np.asarray(data).tolist()
def QA_util_to_list_from_numpy(data):
"""
explanation:
将numpy数据转换为列表
params:
* data ->:
meaning: numpy数据
type: null
optional: [null]
return:
None
demonstrate:
Not described
output:
Not described
"""
return data.tolist()
def QA_util_to_pandas_from_json(data):
"""
explanation:
将json数据载入为pandas数据
params:
* data ->:
meaning: json数据
type: null
optional: [null]
return:
DataFrame
demonstrate:
Not described
output:
Not described
"""
if isinstance(data, dict):
return pd.DataFrame(data=[data, ])
else:
return pd.DataFrame(data=[{'value': data}])
def QA_util_to_pandas_from_list(data):
"""
explanation:
将列表数据转换为pandas
params:
* data ->:
meaning: 列表数据
type: list
optional: [null]
return:
DataFrame
demonstrate:
Not described
output:
Not described
"""
if isinstance(data, list):
return pd.DataFrame(data=data)
| plan |
team.rs | use crate::{
add_team_to_crate,
builders::{CrateBuilder, PublishBuilder},
new_team,
record::GhUser,
OwnerTeamsResponse, RequestHelper, TestApp,
};
use cargo_registry::models::{Crate, NewUser};
use std::sync::Once;
use conduit::StatusCode;
use diesel::*;
impl crate::util::MockAnonymousUser {
/// List the team owners of the specified crate.
fn crate_owner_teams(&self, krate_name: &str) -> crate::util::Response<OwnerTeamsResponse> |
}
// Users: `crates-tester-1` and `crates-tester-2`
// Passwords: ask acrichto or gankro
// Teams: `crates-test-org:core`, `crates-test-org:just-for-crates-2`
// tester-1 is on core only, tester-2 is on both
static GH_USER_1: GhUser = GhUser {
login: "crates-tester-1",
init: Once::new(),
};
static GH_USER_2: GhUser = GhUser {
login: "crates-tester-2",
init: Once::new(),
};
fn mock_user_on_only_one_team() -> NewUser<'static> {
GH_USER_1.user()
}
fn mock_user_on_both_teams() -> NewUser<'static> {
GH_USER_2.user()
}
// Test adding team without `github:`
#[test]
fn not_github() {
let (app, _, user, token) = TestApp::init().with_token();
app.db(|conn| {
CrateBuilder::new("foo_not_github", user.as_model().id).expect_build(conn);
});
let json = token
.add_named_owner("foo_not_github", "dropbox:foo:foo")
.bad_with_status(StatusCode::OK);
assert!(
json.errors[0].detail.contains("unknown organization"),
"{:?}",
json.errors
);
}
#[test]
fn weird_name() {
let (app, _, user, token) = TestApp::init().with_token();
app.db(|conn| {
CrateBuilder::new("foo_weird_name", user.as_model().id).expect_build(conn);
});
let json = token
.add_named_owner("foo_weird_name", "github:foo/../bar:wut")
.bad_with_status(StatusCode::OK);
assert!(
json.errors[0]
.detail
.contains("organization cannot contain"),
"{:?}",
json.errors
);
}
// Test adding team without second `:`
#[test]
fn one_colon() {
let (app, _, user, token) = TestApp::init().with_token();
app.db(|conn| {
CrateBuilder::new("foo_one_colon", user.as_model().id).expect_build(conn);
});
let json = token
.add_named_owner("foo_one_colon", "github:foo")
.bad_with_status(StatusCode::OK);
assert!(
json.errors[0].detail.contains("missing github team"),
"{:?}",
json.errors
);
}
#[test]
fn nonexistent_team() {
let (app, _, user, token) = TestApp::with_proxy().with_token();
app.db(|conn| {
CrateBuilder::new("foo_nonexistent", user.as_model().id).expect_build(conn);
});
let json = token
.add_named_owner(
"foo_nonexistent",
"github:crates-test-org:this-does-not-exist",
)
.bad_with_status(StatusCode::OK);
assert!(
json.errors[0]
.detail
.contains("could not find the github team crates-test-org/this-does-not-exist"),
"{:?}",
json.errors
);
}
// Test adding team names with mixed case, when on the team
#[test]
fn add_team_mixed_case() {
let (app, anon) = TestApp::with_proxy().empty();
let user = app.db_new_user(mock_user_on_both_teams().gh_login);
let token = user.db_new_token("arbitrary token name");
app.db(|conn| {
CrateBuilder::new("foo_mixed_case", user.as_model().id).expect_build(conn);
});
token
.add_named_owner("foo_mixed_case", "github:Crates-Test-Org:Core")
.good();
app.db(|conn| {
let krate = Crate::by_name("foo_mixed_case")
.first::<Crate>(conn)
.unwrap();
let owners = krate.owners(conn).unwrap();
assert_eq!(owners.len(), 2);
let owner = &owners[1];
assert_eq!(owner.login(), owner.login().to_lowercase());
});
let json = anon.crate_owner_teams("foo_mixed_case").good();
assert_eq!(json.teams.len(), 1);
assert_eq!(json.teams[0].login, "github:crates-test-org:core");
}
// Test adding team as owner when not on it
#[test]
fn add_team_as_non_member() {
let (app, _) = TestApp::with_proxy().empty();
let user = app.db_new_user(mock_user_on_only_one_team().gh_login);
let token = user.db_new_token("arbitrary token name");
app.db(|conn| {
CrateBuilder::new("foo_team_non_member", user.as_model().id).expect_build(conn);
});
let json = token
.add_named_owner(
"foo_team_non_member",
"github:crates-test-org:just-for-crates-2",
)
.bad_with_status(StatusCode::OK);
assert!(
json.errors[0]
.detail
.contains("only members of a team can add it as an owner"),
"{:?}",
json.errors
);
}
#[test]
fn remove_team_as_named_owner() {
let (app, _) = TestApp::with_proxy().empty();
let username = mock_user_on_both_teams().gh_login;
let user_on_both_teams = app.db_new_user(username);
let token_on_both_teams = user_on_both_teams.db_new_token("arbitrary token name");
app.db(|conn| {
CrateBuilder::new("foo_remove_team", user_on_both_teams.as_model().id).expect_build(conn);
});
token_on_both_teams
.add_named_owner("foo_remove_team", "github:crates-test-org:core")
.good();
// Removing the individual owner is not allowed, since team members don't
// have permission to manage ownership
let json = token_on_both_teams
.remove_named_owner("foo_remove_team", username)
.bad_with_status(StatusCode::OK);
assert!(json.errors[0]
.detail
.contains("cannot remove all individual owners of a crate"));
token_on_both_teams
.remove_named_owner("foo_remove_team", "github:crates-test-org:core")
.good();
let user_on_one_team = app.db_new_user(mock_user_on_only_one_team().gh_login);
let crate_to_publish = PublishBuilder::new("foo_remove_team").version("2.0.0");
let json = user_on_one_team
.enqueue_publish(crate_to_publish)
.bad_with_status(StatusCode::OK);
assert!(
json.errors[0]
.detail
.contains("this crate exists but you don't seem to be an owner.",),
"{:?}",
json.errors
);
}
#[test]
fn remove_team_as_team_owner() {
let (app, _) = TestApp::with_proxy().empty();
let user_on_both_teams = app.db_new_user(mock_user_on_both_teams().gh_login);
let token_on_both_teams = user_on_both_teams.db_new_token("arbitrary token name");
app.db(|conn| {
CrateBuilder::new("foo_remove_team_owner", user_on_both_teams.as_model().id)
.expect_build(conn);
});
token_on_both_teams
.add_named_owner("foo_remove_team_owner", "github:crates-test-org:core")
.good();
let user_on_one_team = app.db_new_user(mock_user_on_only_one_team().gh_login);
let token_on_one_team = user_on_one_team.db_new_token("arbitrary token name");
let json = token_on_one_team
.remove_named_owner("foo_remove_team_owner", "github:crates-test-org:core")
.bad_with_status(StatusCode::OK);
assert!(
json.errors[0]
.detail
.contains("team members don't have permission to modify owners",),
"{:?}",
json.errors
);
}
// Test trying to publish a crate we don't own
#[test]
fn publish_not_owned() {
let (app, _) = TestApp::with_proxy().empty();
let user_on_both_teams = app.db_new_user(mock_user_on_both_teams().gh_login);
let token_on_both_teams = user_on_both_teams.db_new_token("arbitrary token name");
app.db(|conn| {
CrateBuilder::new("foo_not_owned", user_on_both_teams.as_model().id).expect_build(conn);
});
token_on_both_teams
.add_named_owner("foo_not_owned", "github:crates-test-org:just-for-crates-2")
.good();
let user_on_one_team = app.db_new_user(mock_user_on_only_one_team().gh_login);
let crate_to_publish = PublishBuilder::new("foo_not_owned").version("2.0.0");
let json = user_on_one_team
.enqueue_publish(crate_to_publish)
.bad_with_status(StatusCode::OK);
assert!(
json.errors[0]
.detail
.contains("this crate exists but you don't seem to be an owner.",),
"{:?}",
json.errors
);
}
// Test trying to publish a krate we do own (but only because of teams)
#[test]
fn publish_owned() {
let (app, _) = TestApp::full().empty();
let user_on_both_teams = app.db_new_user(mock_user_on_both_teams().gh_login);
let token_on_both_teams = user_on_both_teams.db_new_token("arbitrary token name");
app.db(|conn| {
CrateBuilder::new("foo_team_owned", user_on_both_teams.as_model().id).expect_build(conn);
});
token_on_both_teams
.add_named_owner("foo_team_owned", "github:crates-test-org:core")
.good();
let user_on_one_team = app.db_new_user(mock_user_on_only_one_team().gh_login);
let crate_to_publish = PublishBuilder::new("foo_team_owned").version("2.0.0");
user_on_one_team.enqueue_publish(crate_to_publish).good();
}
// Test trying to change owners (when only on an owning team)
#[test]
fn add_owners_as_team_owner() {
let (app, _) = TestApp::with_proxy().empty();
let user_on_both_teams = app.db_new_user(mock_user_on_both_teams().gh_login);
let token_on_both_teams = user_on_both_teams.db_new_token("arbitrary token name");
app.db(|conn| {
CrateBuilder::new("foo_add_owner", user_on_both_teams.as_model().id).expect_build(conn);
});
token_on_both_teams
.add_named_owner("foo_add_owner", "github:crates-test-org:core")
.good();
let user_on_one_team = app.db_new_user(mock_user_on_only_one_team().gh_login);
let token_on_one_team = user_on_one_team.db_new_token("arbitrary token name");
let json = token_on_one_team
.add_named_owner("foo_add_owner", "arbitrary_username")
.bad_with_status(StatusCode::OK);
assert!(
json.errors[0]
.detail
.contains("team members don't have permission to modify owners",),
"{:?}",
json.errors
);
}
#[test]
fn crates_by_team_id() {
let (app, anon, user) = TestApp::init().with_user();
let user = user.as_model();
let team = app.db(|conn| {
let t = new_team("github:crates-test-org:team_foo")
.create_or_update(conn)
.unwrap();
let krate = CrateBuilder::new("foo", user.id).expect_build(conn);
add_team_to_crate(&t, &krate, user, conn).unwrap();
t
});
let json = anon.search(&format!("team_id={}", team.id));
assert_eq!(json.crates.len(), 1);
}
#[test]
fn crates_by_team_id_not_including_deleted_owners() {
// This needs to use the proxy beacuse removing a team checks with github that you're on the
// team before you're allowed to remove it from the crate
let (app, anon) = TestApp::with_proxy().empty();
let user = app.db_new_user(mock_user_on_both_teams().gh_login);
let user = user.as_model();
let team = app.db(|conn| {
let t = new_team("github:crates-test-org:core")
.create_or_update(conn)
.unwrap();
let krate = CrateBuilder::new("foo", user.id).expect_build(conn);
add_team_to_crate(&t, &krate, user, conn).unwrap();
krate
.owner_remove(app.as_inner(), conn, user, &t.login)
.unwrap();
t
});
let json = anon.search(&format!("team_id={}", team.id));
assert_eq!(json.crates.len(), 0);
}
| {
let url = format!("/api/v1/crates/{}/owner_team", krate_name);
self.get(&url)
} |
83_e_deleteDuplicates.go | package main
func | (head *ListNode) *ListNode {
cursor := head
if head == nil {
return nil
}
if head.Next == nil {
return head
}
// if the current value matches the next value
for cursor.Next != nil {
// rewind to the current node
if cursor.Next.Val == cursor.Val {
cursor.Next = cursor.Next.Next
} else {
cursor = cursor.Next
}
}
return head
}
// 83/100
| deleteDuplicates |
client.rs | // Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
#[derive(Debug)]
pub(crate) struct Handle {
pub(crate) client: aws_smithy_client::Client<
aws_smithy_client::erase::DynConnector,
aws_smithy_client::erase::DynMiddleware<aws_smithy_client::erase::DynConnector>,
>,
pub(crate) conf: crate::Config,
}
/// Client for Amazon Personalize Events
///
/// Client for invoking operations on Amazon Personalize Events. Each operation on Amazon Personalize Events is a method on this
/// this struct. `.send()` MUST be invoked on the generated operations to dispatch the request to the service.
///
/// # Examples
/// **Constructing a client and invoking an operation**
/// ```rust,no_run
/// # async fn docs() {
/// // create a shared configuration. This can be used & shared between multiple service clients.
/// let shared_config = aws_config::load_from_env().await;
/// let client = aws_sdk_personalizeevents::Client::new(&shared_config);
/// // invoke an operation
/// /* let rsp = client
/// .<operation_name>().
/// .<param>("some value")
/// .send().await; */
/// # }
/// ```
/// **Constructing a client with custom configuration**
/// ```rust,no_run
/// use aws_config::RetryConfig;
/// # async fn docs() {
/// let shared_config = aws_config::load_from_env().await;
/// let config = aws_sdk_personalizeevents::config::Builder::from(&shared_config)
/// .retry_config(RetryConfig::disabled())
/// .build();
/// let client = aws_sdk_personalizeevents::Client::from_conf(config);
/// # }
#[derive(std::fmt::Debug)]
pub struct Client {
handle: std::sync::Arc<Handle>,
}
impl std::clone::Clone for Client {
fn clone(&self) -> Self {
Self {
handle: self.handle.clone(),
}
}
}
#[doc(inline)]
pub use aws_smithy_client::Builder;
impl
From<
aws_smithy_client::Client<
aws_smithy_client::erase::DynConnector,
aws_smithy_client::erase::DynMiddleware<aws_smithy_client::erase::DynConnector>,
>,
> for Client
{
fn from(
client: aws_smithy_client::Client<
aws_smithy_client::erase::DynConnector,
aws_smithy_client::erase::DynMiddleware<aws_smithy_client::erase::DynConnector>,
>,
) -> Self {
Self::with_config(client, crate::Config::builder().build())
}
}
impl Client {
/// Creates a client with the given service configuration.
pub fn with_config(
client: aws_smithy_client::Client<
aws_smithy_client::erase::DynConnector,
aws_smithy_client::erase::DynMiddleware<aws_smithy_client::erase::DynConnector>,
>,
conf: crate::Config,
) -> Self {
Self {
handle: std::sync::Arc::new(Handle { client, conf }),
}
}
/// Returns the client's configuration.
pub fn conf(&self) -> &crate::Config {
&self.handle.conf
}
}
impl Client {
/// Constructs a fluent builder for the [`PutEvents`](crate::client::fluent_builders::PutEvents) operation.
///
/// - The fluent builder is configurable:
/// - [`tracking_id(impl Into<String>)`](crate::client::fluent_builders::PutEvents::tracking_id) / [`set_tracking_id(Option<String>)`](crate::client::fluent_builders::PutEvents::set_tracking_id): <p>The tracking ID for the event. The ID is generated by a call to the <a href="https://docs.aws.amazon.com/personalize/latest/dg/API_CreateEventTracker.html">CreateEventTracker</a> API.</p>
/// - [`user_id(impl Into<String>)`](crate::client::fluent_builders::PutEvents::user_id) / [`set_user_id(Option<String>)`](crate::client::fluent_builders::PutEvents::set_user_id): <p>The user associated with the event.</p>
/// - [`session_id(impl Into<String>)`](crate::client::fluent_builders::PutEvents::session_id) / [`set_session_id(Option<String>)`](crate::client::fluent_builders::PutEvents::set_session_id): <p>The session ID associated with the user's visit. Your application generates the sessionId when a user first visits your website or uses your application. Amazon Personalize uses the sessionId to associate events with the user before they log in. For more information, see <a href="https://docs.aws.amazon.com/personalize/latest/dg/recording-events.html">Recording Events</a>.</p>
/// - [`event_list(Vec<Event>)`](crate::client::fluent_builders::PutEvents::event_list) / [`set_event_list(Option<Vec<Event>>)`](crate::client::fluent_builders::PutEvents::set_event_list): <p>A list of event data from the session.</p>
/// - On success, responds with [`PutEventsOutput`](crate::output::PutEventsOutput)
/// - On failure, responds with [`SdkError<PutEventsError>`](crate::error::PutEventsError)
pub fn put_events(&self) -> fluent_builders::PutEvents {
fluent_builders::PutEvents::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`PutItems`](crate::client::fluent_builders::PutItems) operation.
///
/// - The fluent builder is configurable:
/// - [`dataset_arn(impl Into<String>)`](crate::client::fluent_builders::PutItems::dataset_arn) / [`set_dataset_arn(Option<String>)`](crate::client::fluent_builders::PutItems::set_dataset_arn): <p>The Amazon Resource Name (ARN) of the Items dataset you are adding the item or items to.</p>
/// - [`items(Vec<Item>)`](crate::client::fluent_builders::PutItems::items) / [`set_items(Option<Vec<Item>>)`](crate::client::fluent_builders::PutItems::set_items): <p>A list of item data.</p>
/// - On success, responds with [`PutItemsOutput`](crate::output::PutItemsOutput)
/// - On failure, responds with [`SdkError<PutItemsError>`](crate::error::PutItemsError)
pub fn put_items(&self) -> fluent_builders::PutItems {
fluent_builders::PutItems::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`PutUsers`](crate::client::fluent_builders::PutUsers) operation.
///
/// - The fluent builder is configurable:
/// - [`dataset_arn(impl Into<String>)`](crate::client::fluent_builders::PutUsers::dataset_arn) / [`set_dataset_arn(Option<String>)`](crate::client::fluent_builders::PutUsers::set_dataset_arn): <p>The Amazon Resource Name (ARN) of the Users dataset you are adding the user or users to.</p>
/// - [`users(Vec<User>)`](crate::client::fluent_builders::PutUsers::users) / [`set_users(Option<Vec<User>>)`](crate::client::fluent_builders::PutUsers::set_users): <p>A list of user data.</p>
/// - On success, responds with [`PutUsersOutput`](crate::output::PutUsersOutput)
/// - On failure, responds with [`SdkError<PutUsersError>`](crate::error::PutUsersError)
pub fn put_users(&self) -> fluent_builders::PutUsers {
fluent_builders::PutUsers::new(self.handle.clone())
}
}
pub mod fluent_builders {
//!
//! Utilities to ergonomically construct a request to the service.
//!
//! Fluent builders are created through the [`Client`](crate::client::Client) by calling
//! one if its operation methods. After parameters are set using the builder methods,
//! the `send` method can be called to initiate the request.
//!
/// Fluent builder constructing a request to `PutEvents`.
///
/// <p>Records user interaction event data. For more information see <a href="https://docs.aws.amazon.com/personalize/latest/dg/recording-events.html">Recording Events</a>.</p>
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct PutEvents {
handle: std::sync::Arc<super::Handle>,
inner: crate::input::put_events_input::Builder,
}
impl PutEvents {
/// Creates a new `PutEvents`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::PutEventsOutput,
aws_smithy_http::result::SdkError<crate::error::PutEventsError>,
> {
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// <p>The tracking ID for the event. The ID is generated by a call to the <a href="https://docs.aws.amazon.com/personalize/latest/dg/API_CreateEventTracker.html">CreateEventTracker</a> API.</p>
pub fn tracking_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.tracking_id(input.into());
self
}
/// <p>The tracking ID for the event. The ID is generated by a call to the <a href="https://docs.aws.amazon.com/personalize/latest/dg/API_CreateEventTracker.html">CreateEventTracker</a> API.</p>
pub fn set_tracking_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_tracking_id(input);
self
}
/// <p>The user associated with the event.</p>
pub fn user_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.user_id(input.into());
self
}
/// <p>The user associated with the event.</p>
pub fn set_user_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_user_id(input);
self
}
/// <p>The session ID associated with the user's visit. Your application generates the sessionId when a user first visits your website or uses your application. Amazon Personalize uses the sessionId to associate events with the user before they log in. For more information, see <a href="https://docs.aws.amazon.com/personalize/latest/dg/recording-events.html">Recording Events</a>.</p>
pub fn session_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.session_id(input.into());
self
}
/// <p>The session ID associated with the user's visit. Your application generates the sessionId when a user first visits your website or uses your application. Amazon Personalize uses the sessionId to associate events with the user before they log in. For more information, see <a href="https://docs.aws.amazon.com/personalize/latest/dg/recording-events.html">Recording Events</a>.</p>
pub fn set_session_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_session_id(input);
self
}
/// Appends an item to `eventList`.
///
/// To override the contents of this collection use [`set_event_list`](Self::set_event_list).
///
/// <p>A list of event data from the session.</p>
pub fn event_list(mut self, input: crate::model::Event) -> Self {
self.inner = self.inner.event_list(input);
self
}
/// <p>A list of event data from the session.</p>
pub fn set_event_list(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::Event>>,
) -> Self {
self.inner = self.inner.set_event_list(input);
self
}
}
/// Fluent builder constructing a request to `PutItems`.
///
/// <p>Adds one or more items to an Items dataset. For more information see <a href="https://docs.aws.amazon.com/personalize/latest/dg/importing-items.html">Importing Items Incrementally</a>. </p>
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct PutItems {
handle: std::sync::Arc<super::Handle>,
inner: crate::input::put_items_input::Builder,
}
impl PutItems {
/// Creates a new `PutItems`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::PutItemsOutput,
aws_smithy_http::result::SdkError<crate::error::PutItemsError>,
> {
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// <p>The Amazon Resource Name (ARN) of the Items dataset you are adding the item or items to.</p>
pub fn dataset_arn(mut self, input: impl Into<std::string::String>) -> Self |
/// <p>The Amazon Resource Name (ARN) of the Items dataset you are adding the item or items to.</p>
pub fn set_dataset_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_dataset_arn(input);
self
}
/// Appends an item to `items`.
///
/// To override the contents of this collection use [`set_items`](Self::set_items).
///
/// <p>A list of item data.</p>
pub fn items(mut self, input: crate::model::Item) -> Self {
self.inner = self.inner.items(input);
self
}
/// <p>A list of item data.</p>
pub fn set_items(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::Item>>,
) -> Self {
self.inner = self.inner.set_items(input);
self
}
}
/// Fluent builder constructing a request to `PutUsers`.
///
/// <p>Adds one or more users to a Users dataset. For more information see <a href="https://docs.aws.amazon.com/personalize/latest/dg/importing-users.html">Importing Users Incrementally</a>.</p>
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct PutUsers {
handle: std::sync::Arc<super::Handle>,
inner: crate::input::put_users_input::Builder,
}
impl PutUsers {
/// Creates a new `PutUsers`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::PutUsersOutput,
aws_smithy_http::result::SdkError<crate::error::PutUsersError>,
> {
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// <p>The Amazon Resource Name (ARN) of the Users dataset you are adding the user or users to.</p>
pub fn dataset_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.dataset_arn(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) of the Users dataset you are adding the user or users to.</p>
pub fn set_dataset_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_dataset_arn(input);
self
}
/// Appends an item to `users`.
///
/// To override the contents of this collection use [`set_users`](Self::set_users).
///
/// <p>A list of user data.</p>
pub fn users(mut self, input: crate::model::User) -> Self {
self.inner = self.inner.users(input);
self
}
/// <p>A list of user data.</p>
pub fn set_users(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::User>>,
) -> Self {
self.inner = self.inner.set_users(input);
self
}
}
}
impl Client {
/// Creates a client with the given service config and connector override.
pub fn from_conf_conn<C, E>(conf: crate::Config, conn: C) -> Self
where
C: aws_smithy_client::bounds::SmithyConnector<Error = E> + Send + 'static,
E: Into<aws_smithy_http::result::ConnectorError>,
{
let retry_config = conf.retry_config.as_ref().cloned().unwrap_or_default();
let timeout_config = conf.timeout_config.as_ref().cloned().unwrap_or_default();
let sleep_impl = conf.sleep_impl.clone();
let mut builder = aws_smithy_client::Builder::new()
.connector(aws_smithy_client::erase::DynConnector::new(conn))
.middleware(aws_smithy_client::erase::DynMiddleware::new(
crate::middleware::DefaultMiddleware::new(),
));
builder.set_retry_config(retry_config.into());
builder.set_timeout_config(timeout_config);
if let Some(sleep_impl) = sleep_impl {
builder.set_sleep_impl(Some(sleep_impl));
}
let client = builder.build();
Self {
handle: std::sync::Arc::new(Handle { client, conf }),
}
}
/// Creates a new client from a shared config.
#[cfg(any(feature = "rustls", feature = "native-tls"))]
pub fn new(sdk_config: &aws_types::sdk_config::SdkConfig) -> Self {
Self::from_conf(sdk_config.into())
}
/// Creates a new client from the service [`Config`](crate::Config).
#[cfg(any(feature = "rustls", feature = "native-tls"))]
pub fn from_conf(conf: crate::Config) -> Self {
let retry_config = conf.retry_config.as_ref().cloned().unwrap_or_default();
let timeout_config = conf.timeout_config.as_ref().cloned().unwrap_or_default();
let sleep_impl = conf.sleep_impl.clone();
let mut builder = aws_smithy_client::Builder::dyn_https().middleware(
aws_smithy_client::erase::DynMiddleware::new(
crate::middleware::DefaultMiddleware::new(),
),
);
builder.set_retry_config(retry_config.into());
builder.set_timeout_config(timeout_config);
// the builder maintains a try-state. To avoid suppressing the warning when sleep is unset,
// only set it if we actually have a sleep impl.
if let Some(sleep_impl) = sleep_impl {
builder.set_sleep_impl(Some(sleep_impl));
}
let client = builder.build();
Self {
handle: std::sync::Arc::new(Handle { client, conf }),
}
}
}
| {
self.inner = self.inner.dataset_arn(input.into());
self
} |
test_resend_stashed_msgs.py | from copy import copy
import pytest
from stp_core.loop.eventually import eventually
from plenum.common.constants import OP_FIELD_NAME, BATCH
from plenum.common.messages.node_messages import Batch
from plenum.common.stacks import nodeStackClass
from plenum.common.types import f
from stp_core.network.auth_mode import AuthMode
from stp_core.network.port_dispenser import genHa
from stp_core.test.helper import Printer, connectStack
from stp_zmq.test.helper import genKeys
@pytest.fixture()
def registry():
return {
'Alpha': genHa(),
'Beta': genHa(),
'Gamma': genHa(),
'Delta': genHa()
}
@pytest.fixture(scope="function")
def func_create_stacks(tdir, registry):
def create_stack(count):
genKeys(tdir, registry.keys())
stacks = []
for name, ha in registry.items():
printer = Printer(name)
stackParams = dict(name=name, ha=ha, basedirpath=tdir,
auth_mode=AuthMode.RESTRICTED.value)
reg = copy(registry)
reg.pop(name)
stack = nodeStackClass(stackParams, printer.print, reg)
stack.start()
stacks.append(stack)
if len(stacks) == count:
break
return stacks
yield create_stack
@pytest.mark.skip("INDY-2253: we don't stasj ping/pongs anymore")
def test_use_send_from_zstack_on_resend(func_create_stacks, looper):
| aStack, bStack = func_create_stacks(2)
connectStack(aStack, bStack)
"""
Sending some pi msgs for creating a batch on flashOutBox
This function just put 'pi ' message into outBoxes queue, not send
"""
aStack.sendPingPong(bStack.name)
aStack.sendPingPong(bStack.name)
"""
Emulate batch creation and sending. Batch should be added into _stashed_to_disconnected queue
"""
aStack.flushOutBoxes()
assert len(aStack._stashed_to_disconnected[bStack.name]) == 1
batch_to_disconnected = aStack.deserializeMsg(aStack._stashed_to_disconnected[bStack.name][0])
assert OP_FIELD_NAME in batch_to_disconnected and batch_to_disconnected[OP_FIELD_NAME] == BATCH
"""
This method call connect method for bStack and put 'pi' message into outBoxes queue
"""
connectStack(bStack, aStack)
"""
Wait for socket's connecting routines
"""
looper.runFor(1)
"""
This instruction get 'pi' message from outBoxes queue, create a batch if needed and send it to aStack
"""
bStack.flushOutBoxes()
"""
It needs for getting 'pi' message from bStack. It process 'pi' message and put 'po' message to outBoxes queue
"""
looper.run(aStack.service())
"""
Send 'po' message to bStack
"""
aStack.flushOutBoxes()
"""
Processing previous sending batch (zmq feature) and 'po'
"""
looper.run(bStack.service())
"""
For sending 'po' message to aStack
"""
bStack.flushOutBoxes()
"""
Append 'pi' msg for checking that batch into batch will not be included
"""
aStack._stashed_to_disconnected[bStack.name].append('pi')
"""
Emulate that aStack got 'po' message from bStack and it must run _resend_to_disconnected
"""
looper.run(aStack.service())
"""
Emulate batch creating and sending
"""
aStack.flushOutBoxes()
looper.run(bStack._serviceStack(bStack.age, None))
"""
rxMsgs queue should contains only one 'pi' message from step 3 and batch
which was failed to sending to disconnected stack from step 2
"""
got_pi = False
got_batch = False
def rxMsgsNotEmpty():
assert bStack.rxMsgs
looper.run(eventually(rxMsgsNotEmpty))
while bStack.rxMsgs:
m, frm = bStack.rxMsgs.popleft()
if m.encode() not in bStack.healthMessages:
msg = bStack.deserializeMsg(m)
else:
got_pi = True
continue
if OP_FIELD_NAME in msg and msg[OP_FIELD_NAME] == BATCH:
if msg == batch_to_disconnected:
"""
Exactly the same batch which should be sent to disconnected node
"""
got_batch = True
continue
else:
"""Check that there is no batches with batch as message"""
batch = Batch(messages=msg[f.MSGS.nm],
signature=msg[f.SIG.nm])
for m in batch.messages:
assert OP_FIELD_NAME not in m and BATCH not in m
assert got_pi and got_batch |
|
process_data_log_old.py | import pandas as pd
import os
from configparser import ConfigParser
from datetime import datetime
import numpy as np
def analyze_process_data_log(configini,chosenlist):
| dateTime = datetime.now().strftime('%Y%m%d%H%M%S')
config = ConfigParser()
configFile = str(configini)
config.read(configFile)
csv_dir = config.get('General settings', 'csv_path')
csv_dir_in = os.path.join(csv_dir, 'machine_results')
no_targets = config.getint('SML settings', 'No_targets')
boutEnd = 0
boutEnd_list = [0]
boutStart_list = []
filesFound = []
target_names = []
vidInfPath = config.get('General settings', 'project_path')
vidInfPath = os.path.join(vidInfPath, 'logs')
vidInfPath = os.path.join(vidInfPath, 'video_info.csv')
vidinfDf = pd.read_csv(vidInfPath)
loop = 0
loopy = 0
########### FIND CSV FILES ###########
for i in os.listdir(csv_dir_in):
if i.endswith(".csv"):
file = os.path.join(csv_dir_in, i)
filesFound.append(file)
########### GET TARGET COLUMN NAMES ###########
for ff in range(no_targets):
currentModelNames = 'target_name_' + str(ff+1)
currentModelNames = config.get('SML settings', currentModelNames)
target_names.append(currentModelNames)
print('Analyzing ' + str(len(target_names)) + ' classifier result(s) in ' + str(len(filesFound)) + ' video file(s).')
########### logfile path ###########
log_fn = 'sklearn_' + str(dateTime) + '.csv'
log_path = config.get('General settings', 'project_path')
log_path = os.path.join(log_path, 'logs')
log_fn = os.path.join(log_path, log_fn)
if not os.path.exists(log_path):
os.makedirs(log_path)
headers = ['Video']
for i in target_names:
head1 = str(i) + ' events'
head2 = str(i) + ' sum duration (s)'
head3 = str(i) + ' mean duration (s)'
head4 = str(i) + ' median duration (s)'
head5 = str(i) + ' first occurance (s)'
head6 = str(i) + ' mean interval (s)'
head7 = str(i) + ' median interval (s)'
headers.extend([head1, head2, head3, head4, head5, head6, head7])
log_df = pd.DataFrame(columns=headers)
for i in filesFound:
boutsDf = pd.DataFrame(columns=['Event', 'Start_frame', 'End_frame'])
currentFile = i
currVidName = os.path.basename(currentFile)
currVidName = currVidName.replace('.csv', '')
fps = vidinfDf.loc[vidinfDf['Video'] == currVidName]
try:
fps = int(fps['fps'])
except TypeError:
print('Error: make sure all the videos that are going to be analyzed are represented in the project_folder/logs/video_info.csv file')
loopy+=1
print('Analyzing video ' + str(loopy) + '/' + str(len(filesFound)) + '...')
dataDf = pd.read_csv(currentFile)
dataDf['frames'] = np.arange(len(dataDf))
folderNm = os.path.basename(currentFile)
logFolderNm = str(folderNm.split('.')[0])
for bb in target_names:
currTarget = bb
for indexes, rows in dataDf[dataDf['frames'] >= boutEnd].iterrows():
if rows[currTarget] == 1:
boutStart = rows['frames']
for index, row in dataDf[dataDf['frames'] >= boutStart].iterrows():
if row[currTarget] == 0:
boutEnd = row['frames']
if boutEnd_list[-1] != boutEnd:
boutStart_list.append(boutStart)
boutEnd_list.append(boutEnd)
values = [currTarget, boutStart, boutEnd]
boutsDf.loc[(len(boutsDf))] = values
break
break
boutStart_list = [0]
boutEnd_list = [0]
boutEnd = 0
#Convert to time
boutsDf['Start_time'] = boutsDf['Start_frame'] / fps
boutsDf['End_time'] = boutsDf['End_frame'] / fps
boutsDf['Bout_time'] = boutsDf['End_time'] - boutsDf['Start_time']
#record logs
log_list = []
log_list.append(logFolderNm)
for i in target_names:
currDf = boutsDf.loc[boutsDf['Event'] == i]
try:
firstOccur = round(currDf['Start_time'].iloc[0], 4)
except IndexError:
firstOccur = 0
eventNOs = len(currDf)
TotEventDur = round(currDf['Bout_time'].sum(), 4)
try:
MeanEventDur = round(TotEventDur / eventNOs, 4)
except ZeroDivisionError:
MeanEventDur = 0
try:
MedianEventDur = round(currDf['Bout_time'].median(), 10)
except ZeroDivisionError:
MedianEventDur = 0
currDf_shifted = currDf.shift(periods=-1)
currDf_shifted = currDf_shifted.drop(columns=['Event', 'Start_frame', 'End_frame', 'End_time', 'Bout_time'])
currDf_shifted = currDf_shifted.rename(columns={'Start_time': 'Start_time_shifted'})
currDf_combined = pd.concat([currDf, currDf_shifted], axis=1, join='inner')
currDf_combined['Event_interval'] = currDf_combined['Start_time_shifted'] - currDf_combined['End_time']
meanEventInterval = currDf_combined["Event_interval"].mean()
medianEventInterval = currDf_combined['Event_interval'].median()
log_list.append(eventNOs)
log_list.append(TotEventDur)
log_list.append(MeanEventDur)
log_list.append(MedianEventDur)
log_list.append(firstOccur)
log_list.append(meanEventInterval)
log_list.append(medianEventInterval)
log_df.loc[loop] = log_list
loop += 1
print('File # processed for machine predictions: ' + str(loop) + '/' + str(len(filesFound)))
log_df.fillna(0, inplace=True)
# drop columns not chosen
for i in chosenlist:
log_df = log_df[log_df.columns.drop(list(log_df.filter(regex=str(i))))]
log_df.to_csv(log_fn, index=False)
print('All files processed for machine predictions: data file saved @' + str(log_fn)) |
|
stat.rs | // Copyright 2019-2020 Twitter, Inc.
// Licensed under the Apache License, Version 2.0
// http://www.apache.org/licenses/LICENSE-2.0
use core::convert::TryFrom;
use core::str::FromStr;
use rustcommon_metrics::{Source, Statistic};
use serde_derive::{Deserialize, Serialize};
use strum::ParseError;
use strum_macros::{EnumIter, EnumString, IntoStaticStr};
#[derive(
Clone,
Copy,
Debug,
Deserialize,
EnumIter,
EnumString,
Eq,
IntoStaticStr,
PartialEq,
Hash,
Serialize,
)]
#[serde(deny_unknown_fields, try_from = "&str", into = "&str")]
pub enum UdpStatistic {
#[strum(serialize = "udp/receive/datagrams")]
InDatagrams,
#[strum(serialize = "udp/receive/errors")]
InErrors,
#[strum(serialize = "udp/transmit/datagrams")]
OutDatagrams,
} | Self::InDatagrams => Some(("Udp:", "InDatagrams")),
Self::InErrors => Some(("Udp:", "InErrors")),
Self::OutDatagrams => Some(("Udp:", "OutDatagrams")),
}
}
}
impl Statistic for UdpStatistic {
fn name(&self) -> &str {
(*self).into()
}
fn description(&self) -> Option<&str> {
match self {
Self::InDatagrams => Some("udp datagrams received"),
Self::InErrors => Some("udp datagrams that were not delivered to valid port"),
Self::OutDatagrams => Some("udp datagrams transmitted"),
}
}
fn unit(&self) -> Option<&str> {
match self {
Self::InDatagrams | Self::OutDatagrams => Some("datagrams"),
_ => None,
}
}
fn source(&self) -> Source {
Source::Counter
}
}
impl TryFrom<&str> for UdpStatistic {
type Error = ParseError;
fn try_from(s: &str) -> Result<Self, Self::Error> {
UdpStatistic::from_str(s)
}
} |
impl UdpStatistic {
pub fn keys(self) -> Option<(&'static str, &'static str)> {
match self { |
icon_do_not_disturb.rs |
pub struct IconDoNotDisturb {
props: crate::Props,
}
impl yew::Component for IconDoNotDisturb {
type Properties = crate::Props;
type Message = ();
fn create(props: Self::Properties, _: yew::prelude::ComponentLink<Self>) -> Self
{
Self { props }
}
fn update(&mut self, _: Self::Message) -> yew::prelude::ShouldRender
|
fn change(&mut self, _: Self::Properties) -> yew::prelude::ShouldRender
{
false
}
fn view(&self) -> yew::prelude::Html
{
yew::prelude::html! {
<svg
class=self.props.class.unwrap_or("")
width=self.props.size.unwrap_or(24).to_string()
height=self.props.size.unwrap_or(24).to_string()
viewBox="0 0 24 24"
fill=self.props.fill.unwrap_or("none")
stroke=self.props.color.unwrap_or("currentColor")
stroke-width=self.props.stroke_width.unwrap_or(2).to_string()
stroke-linecap=self.props.stroke_linecap.unwrap_or("round")
stroke-linejoin=self.props.stroke_linejoin.unwrap_or("round")
>
<svg xmlns="http://www.w3.org/2000/svg" height="24" viewBox="0 0 24 24" width="24"><path d="M0 0h24v24H0z" fill="none"/><path d="M12 2C6.48 2 2 6.48 2 12s4.48 10 10 10 10-4.48 10-10S17.52 2 12 2zm0 18c-4.42 0-8-3.58-8-8 0-1.85.63-3.55 1.69-4.9L16.9 18.31C15.55 19.37 13.85 20 12 20zm6.31-3.1L7.1 5.69C8.45 4.63 10.15 4 12 4c4.42 0 8 3.58 8 8 0 1.85-.63 3.55-1.69 4.9z"/></svg>
</svg>
}
}
}
| {
true
} |
chunks.rs | use crate::position::{GlobalSectorPosition, GlobalChunkPosition};
use crate::mask::sparse::recycle::{Recycler, AllocCache};
use crate::mask::{Mask, BitCube, Scan, ScanClear};
use std::collections::HashMap;
use std::collections::hash_map::{Entry, Keys};
use std::ops::Index;
const FALSE_REF: &bool = &false;
/// A sparse mask for marking entire chunks (16x16x16 cubes).
/// For individual blocks, use BlocksMask.
/// This only supports chunks up to Y=15, a 16 high chunk stack.
/// This mirrors the current Anvil implementation in Minecraft, but
/// does not support true cubic chunks.
/// In this implementation, a BitCube represents the chunks in a Sector.
/// Non present BitCubes are all filled with 0s.
/// While it may appear that this is another false world abstraction,
/// it is actually appropriate as a sparse mask.
pub struct ChunksMask {
sectors: HashMap<GlobalSectorPosition, BitCube>,
cache: AllocCache<BitCube>
}
impl ChunksMask {
pub fn new(cache_max_size: usize) -> Self {
ChunksMask {
sectors: HashMap::new(),
cache: AllocCache::new(cache_max_size)
}
}
pub fn | (&self) -> Keys<GlobalSectorPosition, BitCube> {
self.sectors.keys()
}
pub fn sector(&self, coordinates: GlobalSectorPosition) -> Option<&BitCube> {
self.sectors.get(&coordinates)
}
fn require_sector(&mut self, sector: GlobalSectorPosition) -> &mut BitCube {
let cache = &mut self.cache;
self.sectors.entry(sector).or_insert_with(|| cache.create())
}
/*pub fn set_neighbors(&mut self, coords: GlobalChunkPosition) {
self.set_true((coords.0 + 1, coords.1, coords.2 ));
self.set_true((coords.0 - 1, coords.1, coords.2 ));
self.set_true((coords.0, coords.1, coords.2 + 1));
self.set_true((coords.0, coords.1, coords.2 - 1));
if coords.1 < 255 {
self.set_true((coords.0, coords.1 + 1, coords.2 ));
}
if coords.1 > 0 {
self.set_true((coords.0, coords.1 - 1, coords.2 ));
}
}*/
}
impl Mask<GlobalChunkPosition> for ChunksMask {
/*fn clear(&mut self) {
for (_, (mut value, _)) in self.sectors.drain().take(self.cache.remaining_capacity()) {
value.clear();
self.cache.destroy(value);
}
self.sectors.clear();
}*/
fn set_true(&mut self, chunk: GlobalChunkPosition) {
let (sector, inner) = (chunk.global_sector(), chunk.local_chunk());
self.require_sector(sector).set_true(inner);
}
fn set_false(&mut self, chunk: GlobalChunkPosition) {
let (sector, inner) = (chunk.global_sector(), chunk.local_chunk());
match self.sectors.entry(sector) {
Entry::Occupied(mut entry) => {
{
let mask = entry.get_mut();
mask.set_false(inner);
if !mask.empty() {
return;
}
}
self.cache.destroy(entry.remove_entry().1)
},
Entry::Vacant(_) => return
}
}
fn set_or(&mut self, chunk: GlobalChunkPosition, value: bool) {
let (sector, inner) = (chunk.global_sector(), chunk.local_chunk());
self.require_sector(sector).set_or(inner, value);
// We don't need to check to see if the mask is empty here.
// BitCube::set_or can either (1) not change the mask, or (2) add another bit.
// Since the mask can't lose a bit, we don't need to check.
}
fn scan(&self) -> Scan<Self, GlobalChunkPosition> {
// TODO: Scanning sparse maps has a non deterministic order. Return sectors instead?
unimplemented!("No clear / logical way to scan a sparse map yet...")
}
fn scan_clear(&mut self) -> ScanClear<Self, GlobalChunkPosition> {
// TODO: Scanning sparse maps has a non deterministic order. Return sectors instead?
unimplemented!("No clear / logical way to scan a sparse map yet...")
}
fn count_ones(&self) -> u32 {
self.sectors.values().fold(0, |state, value| state + value.count_ones() as u32)
}
fn count_zeros(&self) -> u32 {
self.sectors.values().fold(0, |state, value| state + value.count_zeros() as u32)
}
}
impl Index<GlobalChunkPosition> for ChunksMask {
type Output = bool;
fn index(&self, chunk: GlobalChunkPosition) -> &bool {
let (sector, inner) = (chunk.global_sector(), chunk.local_chunk());
self.sectors.get(§or).map(|mask| &mask[inner]).unwrap_or(FALSE_REF)
}
} | sectors |
TestResultsWriter.py | import unittest
import sys
import os
import glob
root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(root)
from ResultsWriter import ImageWithBoundingBoxes
class TestResultsWriter(unittest.TestCase):
|
if __name__ == "__main__":
unittest.main() | def test_basic_box_drawing(self):
writer = ImageWithBoundingBoxes()
boxes = []
boxes.append((500, 500, 700, 700))
writer.Write("./samples/test-1.jpg", boxes, "./samples/results/test-1-results.jpg")
def test_multiple_box_drawing(self):
writer = ImageWithBoundingBoxes()
boxes = []
boxes.append((500, 500, 700, 700))
boxes.append((800, 500, 1000, 700))
boxes.append((900, 900, 1100, 1100))
boxes.append((1200, 900, 1400, 1100))
writer.Write("./samples/test-1.jpg", boxes, "./samples/results/test-2-results.jpg")
def tearDown(self):
filesToRemove = glob.glob("./samples/results/*.jpg")
for f in filesToRemove:
os.remove(f) |
ArrowSelector.tsx | /* eslint-disable react/prop-types */
import React, { FunctionComponent } from 'react';
import clsx from 'clsx';
import TRSStepper from './TRSStepper/TRSStepper';
import styles from './arrowSelector.css';
export interface ArrowSelectorProps {
backInactive: boolean,
disabled?: boolean,
forwardInactive: boolean,
innerWidth: number,
label: string,
onNavigateBack: () => void,
onNavigateForward: () => void,
}
const ArrowSelector: FunctionComponent<ArrowSelectorProps> = ({
disabled = false,
backInactive,
forwardInactive,
innerWidth,
label, | <TRSStepper
direction="left"
disabled={disabled}
inactive={backInactive}
onClick={(disabled || backInactive) ? () => {} : onNavigateBack}
/>
<div className={styles.contentOuter}>
<div className={styles.contentInner} style={{ width: innerWidth }}>
<div className={clsx(styles.value, { [styles.disabled]: disabled })}>
{label}
</div>
</div>
</div>
<TRSStepper
direction="right"
disabled={disabled}
inactive={forwardInactive}
onClick={(disabled || forwardInactive) ? () => {} : onNavigateForward}
/>
</div>
);
export default ArrowSelector; | onNavigateBack,
onNavigateForward,
}) => (
<div className={styles.arrowSelector}> |
app.js |
const answerUrl = baseUrl+'questions/answers';
document.addEventListener("DOMContentLoaded", function(){
fetch(baseUrl+'questions/recent').then(response => {
return response.json();
}).then(data => {
// Work with JSON data here
console.log(data);
findDom('loading').style.visibility='hidden';
findDom('featured-question-title').innerHTML=data[0]['question_title'];
findDom('featured-question-body').innerHTML=data[0]['question_body'];
findDom('featured-question-id').setAttribute('value', data[0]['id']);
findDom('featured-question-user-id').setAttribute('value', data[0]['user_id']);
console.log(data[0]['id']);
findDom('answer-link').style.visibility='visible';
id = data[0]['id'];
fetchRecentQuestionAnswers(id);
return data;
}).catch(err => {
console.log(err);
// Do something for an error here
});
});
function fetchRecentQuestionAnswers(id){
console.log('recent question Id:'+id);
console.log('lets fetch the answers to this question');
const urlPostFix = 'questions/recent/'+id+'/answers';
const url = baseUrl+urlPostFix;
console.log(url);
fetch(url).then(response => {
return response.json();
}).then(data =>{
console.log('answers retrieved');
console.log(data);
}).catch(err=>{
console.log('Failed to retrieve answers');
});
}
function saveAnswer(questionId) {
const answer = findDom('answer-body').value;
/**
* check if valid characters are entered in answer text
*/
if (invalidAnswer(answer)) {
showAnswerSaveError();
return;
}
questionId = findDom('featured-question-id').value;
const userId = findDom('featured-question-user-id').value;
const queryBody = 'question_id='+questionId+'&user_id='+userId+'&answer='+answer;
const bodyRequest = {
method: 'post',
headers: {
"Content-type": "application/x-www-form-urlencoded; charset=UTF-8",
"authorization": localStorage.Authorization
},
body: queryBody
};
fetch(answerUrl, bodyRequest)
.then(response => response.json())
.then(data => {
data['status'] === 200 ? indicateSuccess() : indicateError();
})
.catch(err => {console.log(err)});
}
function | (answer) {
return !answer || typeof answer !== "string" || answer.trim().length < 5;
}
function showAnswerForm(){
findDom('answer-form').style.visibility='visible';
findDom('answer-again').style.visibility='hidden';
}
function indicateSuccess(){
findDom('answer-body').value = '';
findDom('answer-form').style.visibility='hidden';
findDom('answer-again').style.visibility='visible';
findDom('answer-title').style.visibility='hidden';
const answerStatus = findDom('answer-status');
answerStatus.innerHTML = 'your Answer was saved succesfully';
answerStatus.style.color='green';
answerStatus.style.visibility='visible';
}
function indicateError(){
findDom('answer-status').style.color='red';
findDom('answer-body').style.border='1px solid red';
const answerStatus = findDom('answer-status');
answerStatus.innerHTML = 'Failed to save! Login and try again';
answerStatus.style.visibility='visible';
}
function showAnswerSaveError(){
console.log('invalid answer text');
const answerStatus = findDom('answer-status');
answerStatus.innerHTML = 'please type at least 5 characters';
answerStatus.style.color='orange';
answerStatus.style.visibility='visible';
} | invalidAnswer |
main.rs | fn main() {
cli::start();
}
mod cli {
use clap::{App, Arg, ArgMatches};
use std::cmp::Ordering::Equal;
use tspsolver::solve;
pub fn start() {
let matches = App::new("tsp-solver")
.version("0.1.0")
.author("Kasper Ziemianek <[email protected]>")
.about("Helps salesman find the shortest route!")
.arg(
Arg::with_name("file")
.short("f")
.long("file")
.value_name("FILE")
.help("Sets problem instance file")
.takes_value(true)
.required(true),
)
.arg(
Arg::with_name("duration")
.short("d")
.long("duration")
.value_name("DURATION")
.help("Sets computation duration in seconds")
.takes_value(true)
.default_value(&"5"),
)
.arg(
Arg::with_name("alg")
.short("a")
.long("algorithm")
.value_name("ALGORITHM")
.help("Sets meta-heuristics algorithm")
.takes_value(true)
.default_value(&"hill-climbing"),
)
.arg(
Arg::with_name("runs") | .takes_value(true)
.default_value(&"1"),
)
.arg(
Arg::with_name("parallel")
.short("p")
.long("parallel")
.value_name("PARALLEL")
.takes_value(false)
.help("Sets parallel flag"),
)
.get_matches();
let file = get_file(&matches);
let computation_duration = get_duration(&matches);
let alg = get_alg(&matches);
let runs = get_runs(&matches);
let parallel = get_parallel(&matches);
let result = solve(&file, computation_duration, &alg, runs, parallel);
for (i, result) in result.iter().enumerate() {
match result {
Ok(v) => println!("#{} score {}", i + 1, v.travel_distance),
Err(v) => println!("#{} could not solve problem, error: {}", i + 1, v),
}
}
let mut distances: Vec<f32> = result
.iter()
.filter(|result| result.is_ok())
.map(|result| result.as_ref())
.map(|result| result.unwrap())
.map(|result| result.travel_distance)
.collect();
distances.sort_by(|a, b| a.partial_cmp(b).unwrap_or(Equal));
match distances.len() > 0 {
true => println!("Best score {}", distances.get(0).unwrap()),
false => {}
}
}
fn get_file(matches: &ArgMatches) -> String {
get_argument(matches, "file")
}
fn get_duration(matches: &ArgMatches) -> i64 {
get_argument(matches, "duration").parse().unwrap()
}
fn get_alg(matches: &ArgMatches) -> String {
get_argument(matches, "alg")
}
fn get_runs(matches: &ArgMatches) -> i32 {
get_argument(&matches, "runs").parse().unwrap()
}
fn get_parallel(matches: &ArgMatches) -> bool {
matches.is_present("parallel")
}
fn get_argument(matches: &ArgMatches, name: &str) -> String {
matches.value_of(name).unwrap().to_owned()
}
} | .short("r")
.long("runs")
.value_name("RUNS")
.help("Sets number of alrogithm runs") |
util_test.py | from pytest import mark
from pantable.util import convert_texts, convert_texts_fast, eq_panflute_elems
# construct some texts cases
texts_1 = [
'some **markdown** here',
'and ~~some~~ other?'
]
texts_2 = [
'some *very* intersting markdown [example]{#so_fancy}',
'''# Comical
Text
# Totally comical
Text'''
]
textss = [texts_1, texts_2, texts_1 + texts_2]
# reference answers
elemss = [convert_texts(texts) for texts in textss]
@mark.parametrize('elems,texts', zip(elemss, textss))
def | (elems, texts):
assert eq_panflute_elems(elems, convert_texts_fast(texts))
@mark.parametrize('elems,texts', zip(elemss, textss))
def test_convert_texts_panflute_to_markdown(elems, texts):
assert texts == convert_texts_fast(elems, input_format='panflute', output_format='markdown')
| test_convert_texts_markdown_to_panflute |
device_test.py | # pylint: disable=wrong-or-nonexistent-copyright-notice
import pytest
import cirq
def test_qubit_set():
class RawDevice(cirq.Device):
pass
assert RawDevice().qubit_set() is None
class QubitFieldDevice(cirq.Device):
def __init__(self):
self.qubits = cirq.LineQubit.range(3)
assert QubitFieldDevice().qubit_set() == frozenset(cirq.LineQubit.range(3))
class PrivateQubitFieldDevice(cirq.Device):
def __init__(self):
self._qubits = cirq.LineQubit.range(4)
assert PrivateQubitFieldDevice().qubit_set() == frozenset(cirq.LineQubit.range(4))
class QubitMethodDevice(cirq.Device):
def qubits(self):
return cirq.LineQubit.range(5)
assert QubitMethodDevice().qubit_set() == frozenset(cirq.LineQubit.range(5))
class PrivateQubitMethodDevice(cirq.Device):
def _qubits(self):
return cirq.LineQubit.range(6)
assert PrivateQubitMethodDevice().qubit_set() == frozenset(cirq.LineQubit.range(6))
def test_qid_pairs():
class RawDevice(cirq.Device):
pass
assert RawDevice().qid_pairs() is None
class QubitFieldDevice(cirq.Device):
def __init__(self, qubits):
self.qubits = qubits
assert len(QubitFieldDevice(cirq.LineQubit.range(10)).qid_pairs()) == 9
assert len(QubitFieldDevice(cirq.GridQubit.rect(10, 10)).qid_pairs()) == 180
assert len(QubitFieldDevice([cirq.NamedQubit(str(s)) for s in range(10)]).qid_pairs()) == 45
def test_qid_pair(): | assert e1 == e2
assert e2 != e3
assert repr(e1) == "cirq.QidPair(cirq.LineQubit(0), cirq.LineQubit(1))"
assert len(e1) == 2
a, b = e1
assert (a, b) == (q0, q1)
a, b = e2
assert (a, b) == (q0, q1)
assert q0 in e1
assert q1 in e1
assert q2 not in e1
set1 = frozenset([e1, e2])
set2 = frozenset([e2, e3])
assert len(set1) == 1
assert len(set2) == 2
with pytest.raises(ValueError, match='A QidPair cannot have identical qids.'):
cirq.SymmetricalQidPair(q0, q0) | q0, q1, q2, q3 = cirq.LineQubit.range(4)
e1 = cirq.SymmetricalQidPair(q0, q1)
e2 = cirq.SymmetricalQidPair(q1, q0)
e3 = cirq.SymmetricalQidPair(q2, q3) |
_nn.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=no-else-return, invalid-name, unused-argument, too-many-arguments, consider-using-in
"""Backend compiler related feature registration"""
from __future__ import absolute_import
from tvm import topi
from tvm.topi.utils import get_const_tuple
from tvm.runtime import convert
from tvm.te.hybrid import script
from .. import op as reg
from .. import strategy
from ..op import OpPattern
from .._tensor import elemwise_shape_func
from ..strategy.generic import is_depthwise_conv2d
from ...transform import LayoutConfig
# relu
reg.register_broadcast_schedule("nn.relu")
reg.register_pattern("nn.relu", OpPattern.ELEMWISE)
# softmax
reg.register_strategy("nn.softmax", strategy.softmax_strategy)
reg.register_pattern("nn.softmax", OpPattern.OPAQUE)
# log_softmax
reg.register_schedule("nn.log_softmax", strategy.schedule_log_softmax)
reg.register_pattern("nn.log_softmax", OpPattern.OPAQUE)
# dense
reg.register_strategy("nn.dense", strategy.dense_strategy)
reg.register_pattern("nn.dense", reg.OpPattern.OUT_ELEMWISE_FUSABLE)
# fifo_buffer
@reg.register_compute("nn.fifo_buffer")
def compute_fifo_buffer(attrs, inputs, out_type):
return [topi.nn.fifo_buffer(inputs[0], inputs[1], axis=attrs.get_int("axis"))]
reg.register_injective_schedule("nn.fifo_buffer")
reg.register_pattern("nn.fifo_buffer", OpPattern.OPAQUE)
# batch_matmul
reg.register_strategy("nn.batch_matmul", strategy.batch_matmul_strategy)
reg.register_pattern("nn.batch_matmul", reg.OpPattern.OUT_ELEMWISE_FUSABLE)
# sparse_dense
@reg.register_compute("nn.sparse_dense")
def compute_sparse_dense(attrs, inputs, out_type):
"""Compute definition of sparse_dense"""
return [topi.nn.sparse_dense(inputs[0], inputs[1], inputs[2], inputs[3])]
reg.register_strategy("nn.sparse_dense", strategy.sparse_dense_strategy)
reg.register_pattern("nn.sparse_dense", reg.OpPattern.OUT_ELEMWISE_FUSABLE)
@reg.register_alter_op_layout("nn.sparse_dense")
def alter_op_layout_sparse_dense(attrs, inputs, tinfos, out_type):
"""Alternate the layout of sparse_dense"""
return topi.nn.sparse_dense_alter_layout(attrs, inputs, tinfos, out_type)
@reg.register_compute("nn.internal.sparse_dense_padded")
def compute_sparse_dense_padded(attrs, inputs, out_type):
"""Compute definition of sparse_dense_padded"""
raise NotImplementedError("nn.internal.sparse_dense_padded is only available on cuda")
reg.register_strategy("nn.internal.sparse_dense_padded", strategy.sparse_dense_padded_strategy)
reg.register_pattern("nn.internal.sparse_dense_padded", reg.OpPattern.OUT_ELEMWISE_FUSABLE)
# sparse_transpose
@reg.register_compute("nn.sparse_transpose")
def compute_sparse_transpose(attrs, inputs, out_type):
"""Compute definition of sparse_transpose"""
return topi.nn.sparse_transpose(inputs[0], inputs[1], inputs[2])
reg.register_schedule("nn.sparse_transpose", strategy.schedule_sparse_transpose)
reg.register_pattern("nn.sparse_transpose", reg.OpPattern.OUT_ELEMWISE_FUSABLE)
# conv1d
reg.register_strategy("nn.conv1d", strategy.conv1d_strategy)
reg.register_pattern("nn.conv1d", OpPattern.OUT_ELEMWISE_FUSABLE)
# conv2d
reg.register_strategy("nn.conv2d", strategy.conv2d_strategy)
reg.register_pattern("nn.conv2d", OpPattern.OUT_ELEMWISE_FUSABLE)
@reg.register_alter_op_layout("nn.conv2d")
def alter_op_layout_conv2d(attrs, inputs, tinfos, out_type):
"""Alternate the layout of conv2d"""
return topi.nn.conv2d_alter_layout(attrs, inputs, tinfos, out_type)
@reg.register_legalize("nn.conv2d")
def legalize_conv2d(attrs, inputs, types):
"""Legalize conv2d op.
Parameters
----------
attrs : tvm.ir.Attrs
Attributes of current convolution
inputs : list of tvm.relay.Expr
The args of the Relay expr to be legalized
types : list of types
List of input and output types
Returns
-------
result : tvm.relay.Expr
The legalized expr
"""
return topi.nn.conv2d_legalize(attrs, inputs, types)
@reg.register_convert_op_layout("nn.conv2d")
def convert_conv2d(attrs, inputs, tinfos, desired_layouts):
"""Convert Layout pass registration for conv2d op.
Parameters
----------
attrs : tvm.ir.Attrs
Attributes of current convolution
inputs : list of tvm.relay.Expr
The args of the Relay expr to be legalized
tinfos : list of types
List of input and output types
desired_layouts : list of layout strings
List of layouts defining our desired
layout for the data and kernel inputs respectively.
Returns
-------
result : tvm.relay.Expr
The transformed expr
"""
# pylint: disable=import-outside-toplevel
from tvm import relay
data, weight = inputs
# First check if there is a LayoutConfig scope, and if so, whether
# it indicates we should ignore this layer or not.
layout_config = LayoutConfig.current
if layout_config is not None:
skip_layer = layout_config.check_skip()
if skip_layer:
return relay.nn.conv2d(data, weight, **attrs)
# Prepare new layout.
new_attrs = dict(attrs)
assert len(desired_layouts) == 2, "A desired layout is expected for both of nn.conv2d's inputs"
desired_data_layout, desired_kernel_layout = map(str, desired_layouts)
assert desired_data_layout != "default", "Data layout cannot be default"
new_attrs["data_layout"] = desired_data_layout
if desired_kernel_layout != "default":
new_attrs["kernel_layout"] = desired_kernel_layout
return relay.nn.conv2d(data, weight, **new_attrs)
# Handle default kernel layouts
if desired_data_layout == "NCHW":
new_attrs["kernel_layout"] = "OIHW"
return relay.nn.conv2d(data, weight, **new_attrs)
elif desired_data_layout == "NHWC":
# Check for depthwise convolution.
data_info, weight_info = tinfos
if is_depthwise_conv2d(
data_info.shape,
attrs["data_layout"],
weight_info.shape,
attrs["kernel_layout"],
attrs["groups"],
):
new_attrs["kernel_layout"] = "HWOI"
else:
new_attrs["kernel_layout"] = "HWIO"
return relay.nn.conv2d(data, weight, **new_attrs)
elif desired_data_layout == "HWNC":
new_attrs["kernel_layout"] = "HWOI"
return relay.nn.conv2d(data, weight, **new_attrs)
raise ValueError("Layout %s is not yet supported." % desired_data_layout)
# conv2d_transpose
reg.register_strategy("nn.conv2d_transpose", strategy.conv2d_transpose_strategy)
reg.register_pattern("nn.conv2d_transpose", OpPattern.OUT_ELEMWISE_FUSABLE)
@reg.register_legalize("nn.conv2d_transpose")
def legalize_conv2d_transpose(attrs, inputs, types):
"""Legalize conv2d_transpose op.
Parameters
----------
attrs : tvm.ir.Attrs
Attributes of current Transposed convolution
inputs : list of tvm.relay.Expr
The args of the Relay expr to be legalized
types : list of types
List of input and output types
Returns
-------
result : tvm.relay.Expr
The legalized expr
"""
return topi.nn.conv2d_transpose_legalize(attrs, inputs, types)
@reg.register_convert_op_layout("nn.conv2d_transpose")
def convert_conv2d_transpose(attrs, inputs, tinfos, desired_layouts):
"""Convert Layout pass registration for conv2d_transpose op.
Parameters
----------
attrs : tvm.ir.Attrs
Attributes of current convolution
inputs : list of tvm.relay.Expr
The args of the Relay expr to be legalized
tinfos : list of types
List of input and output types
desired_layouts : list of layout strings
List of layouts defining our desired
layout for the data and kernel inputs respectively.
Returns
-------
result : tvm.relay.Expr
The transformed expr
"""
# pylint: disable=import-outside-toplevel
from tvm import relay
data, weight = inputs
new_attrs = dict(attrs)
assert len(desired_layouts) == 2, "A desired layout is expected for both of nn.conv2d's inputs"
desired_data_layout, desired_kernel_layout = map(str, desired_layouts)
assert desired_data_layout != "default", "Data layout cannot be default"
new_attrs["data_layout"] = desired_data_layout
if desired_kernel_layout != "default":
new_attrs["kernel_layout"] = desired_kernel_layout
return relay.nn.conv2d_transpose(data, weight, **new_attrs)
# Handle default kernel layouts
if desired_data_layout == "NCHW":
new_attrs["kernel_layout"] = "OIHW"
return relay.nn.conv2d_transpose(data, weight, **new_attrs)
elif desired_data_layout == "NHWC":
new_attrs["kernel_layout"] = "HWIO"
return relay.nn.conv2d_transpose(data, weight, **new_attrs)
raise ValueError("Layout %s is not yet supported." % desired_data_layout)
# conv3d_transpose
reg.register_strategy("nn.conv3d_transpose", strategy.conv3d_transpose_strategy)
reg.register_pattern("nn.conv3d_transpose", OpPattern.OUT_ELEMWISE_FUSABLE)
@reg.register_legalize("nn.conv3d_transpose")
def legalize_conv3d_transpose(attrs, inputs, types):
"""Legalize conv3d_transpose op.
Parameters
----------
attrs : tvm.ir.Attrs
Attributes of current Transposed convolution
inputs : list of tvm.relay.Expr
The args of the Relay expr to be legalized
types : list of types
List of input and output types
Returns
-------
result : tvm.relay.Expr
The legalized expr
"""
return topi.nn.conv3d_transpose_legalize(attrs, inputs, types)
# conv3d
reg.register_strategy("nn.conv3d", strategy.conv3d_strategy)
reg.register_pattern("nn.conv3d", OpPattern.OUT_ELEMWISE_FUSABLE)
@reg.register_alter_op_layout("nn.conv3d")
def alter_op_layout_conv3d(attrs, inputs, tinfos, out_type):
"""Alternate the layout of conv3d"""
return topi.nn.conv3d_alter_layout(attrs, inputs, tinfos, out_type)
@reg.register_convert_op_layout("nn.conv3d")
def convert_conv3d(attrs, inputs, tinfos, desired_layouts):
"""Convert Layout pass registration for conv3d op.
Parameters
----------
attrs : tvm.ir.Attrs
Attributes of current convolution
inputs : list of tvm.relay.Expr
The args of the Relay expr to be legalized
tinfos : list of types
List of input and output types
desired_layouts : list of layout strings
List of layouts defining our desired
layout for the data and kernel inputs respectively.
Returns
-------
result : tvm.relay.Expr
The transformed expr
"""
# pylint: disable=import-outside-toplevel
from tvm import relay
data, weight = inputs
new_attrs = dict(attrs)
assert len(desired_layouts) == 2, "A desired layout is expected for both of nn.conv3d's inputs"
desired_data_layout, desired_kernel_layout = map(str, desired_layouts)
assert desired_data_layout != "default", "Data layout cannot be default"
new_attrs["data_layout"] = desired_data_layout
if desired_kernel_layout != "default":
new_attrs["kernel_layout"] = desired_kernel_layout
return relay.nn.conv3d(data, weight, **new_attrs)
# Handle default kernel layouts
if desired_data_layout == "NCDHW":
new_attrs["kernel_layout"] = "OIDHW"
return relay.nn.conv3d(data, weight, **new_attrs)
elif desired_data_layout == "NDHWC":
new_attrs["kernel_layout"] = "DHWIO"
return relay.nn.conv3d(data, weight, **new_attrs)
raise ValueError("Layout %s is not yet supported" % desired_data_layout)
# conv3d_winograd related operators
reg.register_strategy(
"nn.contrib_conv3d_winograd_without_weight_transform",
strategy.conv3d_winograd_without_weight_transfrom_strategy,
)
reg.register_pattern(
"nn.contrib_conv3d_winograd_without_weight_transform", OpPattern.OUT_ELEMWISE_FUSABLE
)
@reg.register_compute("nn.contrib_conv3d_winograd_weight_transform")
def compute_contrib_conv3d_winograd_weight_transform(attrs, inputs, out_dtype):
"""Compute definition of contrib_conv3d_winograd_weight_transform"""
out = topi.nn.conv3d_winograd_weight_transform(inputs[0], attrs.get_int("tile_size"))
return [out]
reg.register_schedule(
"nn.contrib_conv3d_winograd_weight_transform",
strategy.schedule_conv3d_winograd_weight_transform,
)
reg.register_pattern("nn.contrib_conv3d_winograd_weight_transform", OpPattern.OUT_ELEMWISE_FUSABLE)
# conv1d_transpose
reg.register_strategy("nn.conv1d_transpose", strategy.conv1d_transpose_strategy)
reg.register_pattern("nn.conv1d_transpose", OpPattern.OUT_ELEMWISE_FUSABLE)
# bias_add
reg.register_injective_schedule("nn.bias_add")
reg.register_pattern("nn.bias_add", OpPattern.BROADCAST)
# max_pool1d
reg.register_schedule("nn.max_pool1d", strategy.schedule_pool)
reg.register_pattern("nn.max_pool1d", OpPattern.OUT_ELEMWISE_FUSABLE)
# max_pool2d
reg.register_schedule("nn.max_pool2d", strategy.schedule_pool)
reg.register_pattern("nn.max_pool2d", OpPattern.OUT_ELEMWISE_FUSABLE)
# max_pool3d
reg.register_schedule("nn.max_pool3d", strategy.schedule_pool)
reg.register_pattern("nn.max_pool3d", OpPattern.OUT_ELEMWISE_FUSABLE)
# avg_pool1d
reg.register_schedule("nn.avg_pool1d", strategy.schedule_pool)
reg.register_pattern("nn.avg_pool1d", OpPattern.OUT_ELEMWISE_FUSABLE)
# avg_pool2d
reg.register_schedule("nn.avg_pool2d", strategy.schedule_pool)
reg.register_pattern("nn.avg_pool2d", OpPattern.OUT_ELEMWISE_FUSABLE)
# avg_pool3d
reg.register_schedule("nn.avg_pool3d", strategy.schedule_pool)
reg.register_pattern("nn.avg_pool3d", OpPattern.OUT_ELEMWISE_FUSABLE)
# max_pool2d_grad
reg.register_schedule("nn.max_pool2d_grad", strategy.schedule_pool_grad)
reg.register_pattern("nn.max_pool2d_grad", OpPattern.OUT_ELEMWISE_FUSABLE)
# avg_pool2d_grad
reg.register_schedule("nn.avg_pool2d_grad", strategy.schedule_pool_grad)
reg.register_pattern("nn.avg_pool2d_grad", OpPattern.OUT_ELEMWISE_FUSABLE)
# global_max_pool2d
reg.register_schedule("nn.global_max_pool2d", strategy.schedule_adaptive_pool)
reg.register_pattern("nn.global_max_pool2d", OpPattern.OUT_ELEMWISE_FUSABLE)
# global_avg_pool2d
reg.register_schedule("nn.global_avg_pool2d", strategy.schedule_adaptive_pool)
reg.register_pattern("nn.global_avg_pool2d", OpPattern.OUT_ELEMWISE_FUSABLE)
# adaptive_max_pool2d
reg.register_schedule("nn.adaptive_max_pool2d", strategy.schedule_adaptive_pool)
reg.register_pattern("nn.adaptive_max_pool2d", OpPattern.OUT_ELEMWISE_FUSABLE)
# adaptive_avg_pool2d
reg.register_schedule("nn.adaptive_avg_pool2d", strategy.schedule_adaptive_pool)
reg.register_pattern("nn.adaptive_avg_pool2d", OpPattern.OUT_ELEMWISE_FUSABLE)
# adaptive_max_pool3d
reg.register_schedule("nn.adaptive_max_pool3d", strategy.schedule_adaptive_pool)
reg.register_pattern("nn.adaptive_max_pool3d", OpPattern.OUT_ELEMWISE_FUSABLE)
# adaptive_avg_pool3d
reg.register_schedule("nn.adaptive_avg_pool3d", strategy.schedule_adaptive_pool)
reg.register_pattern("nn.adaptive_avg_pool3d", OpPattern.OUT_ELEMWISE_FUSABLE)
# leaky_relu
reg.register_broadcast_schedule("nn.leaky_relu")
reg.register_pattern("nn.leaky_relu", OpPattern.ELEMWISE)
# prelu
reg.register_broadcast_schedule("nn.prelu")
reg.register_pattern("nn.prelu", OpPattern.BROADCAST)
# flatten
reg.register_broadcast_schedule("nn.batch_flatten")
reg.register_pattern("nn.batch_flatten", OpPattern.INJECTIVE)
# lrn
@reg.register_compute("nn.lrn")
def compute_lrn(attrs, inputs, out_dtype):
"""Compute definition of lrn"""
assert len(inputs) == 1
return [topi.nn.lrn(inputs[0], attrs.size, attrs.axis, attrs.alpha, attrs.beta, attrs.bias)]
reg.register_schedule("nn.lrn", strategy.schedule_lrn)
reg.register_pattern("nn.lrn", OpPattern.OPAQUE)
# upsampling
@reg.register_compute("nn.upsampling")
def compute_upsampling(attrs, inputs, out_dtype):
scale_h = attrs.scale_h
scale_w = attrs.scale_w
layout = attrs.layout
method = attrs.method
align_corners = attrs.align_corners
return [topi.nn.upsampling(inputs[0], scale_h, scale_w, layout, method, align_corners)]
reg.register_injective_schedule("nn.upsampling")
# upsampling3d
@reg.register_compute("nn.upsampling3d")
def compute_upsampling3d(attrs, inputs, out_dtype):
scale_d = attrs.scale_d
scale_h = attrs.scale_h
scale_w = attrs.scale_w
layout = attrs.layout
method = attrs.method
coordinate_transformation_mode = attrs.coordinate_transformation_mode
return [
topi.nn.upsampling3d(
inputs[0], scale_d, scale_h, scale_w, layout, method, coordinate_transformation_mode
)
]
reg.register_injective_schedule("nn.upsampling3d")
# pad
reg.register_broadcast_schedule("nn.pad")
# mirror_pad
@reg.register_compute("nn.mirror_pad")
def compute_mirror_pad(attrs, inputs, out_dtype):
pad_before, pad_after = list(zip(*attrs.pad_width))
mode = attrs.mode
out = topi.nn.mirror_pad(inputs[0], pad_before=pad_before, pad_after=pad_after, mode=mode)
return [out]
reg.register_broadcast_schedule("nn.mirror_pad")
@script
def _mirror_pad_func(data_shape, pad_width):
out = output_tensor((data_shape.shape[0],), "int64")
for i in const_range(data_shape.shape[0]):
out[i] = data_shape[i] + int64(pad_width[i][0]) + int64(pad_width[i][1])
return out
@reg.register_shape_func("nn.mirror_pad", False)
def mirror_pad_func(attrs, inputs, _):
pad_width_tuple = [get_const_tuple(p) for p in attrs.pad_width]
return [_mirror_pad_func(inputs[0], convert(pad_width_tuple))]
# conv2d_winograd related operators
reg.register_strategy(
"nn.contrib_conv2d_winograd_without_weight_transform",
strategy.conv2d_winograd_without_weight_transfrom_strategy,
)
reg.register_pattern(
"nn.contrib_conv2d_winograd_without_weight_transform", OpPattern.OUT_ELEMWISE_FUSABLE
)
# conv2d_gemm related operators
reg.register_strategy(
"nn.contrib_conv2d_gemm_without_weight_transform",
strategy.conv2d_gemm_without_weight_transform_strategy,
)
reg.register_pattern(
"nn.contrib_conv2d_gemm_without_weight_transform", OpPattern.OUT_ELEMWISE_FUSABLE
)
@reg.register_compute("nn.contrib_conv2d_gemm_weight_transform")
def compute_contrib_conv2d_gemm_weight_transform(attrs, inputs, out_dtype):
"""Compute definition of contrib_conv2d_gemm_weight_transform"""
out = topi.nn.conv2d_gemm_weight_transform(inputs[0], attrs.tile_rows, attrs.tile_cols)
return [out]
reg.register_schedule(
"nn.contrib_conv2d_gemm_weight_transform", strategy.schedule_conv2d_gemm_weight_transform
)
reg.register_pattern("nn.contrib_conv2d_gemm_weight_transform", OpPattern.OUT_ELEMWISE_FUSABLE)
@reg.register_compute("nn.contrib_conv2d_winograd_weight_transform")
def compute_contrib_conv2d_winograd_weight_transform(attrs, inputs, out_dtype):
"""Compute definition of contrib_conv2d_winograd_weight_transform"""
out = topi.nn.conv2d_winograd_weight_transform(inputs[0], attrs.get_int("tile_size"))
return [out]
reg.register_schedule(
"nn.contrib_conv2d_winograd_weight_transform",
strategy.schedule_conv2d_winograd_weight_transform,
)
reg.register_pattern("nn.contrib_conv2d_winograd_weight_transform", OpPattern.OUT_ELEMWISE_FUSABLE)
@reg.register_compute("nn.contrib_conv2d_winograd_nnpack_weight_transform")
def compute_contrib_conv2d_winograd_nnpack_weight_transform(attrs, inputs, out_dtype):
|
reg.register_schedule(
"nn.contrib_conv2d_winograd_nnpack_weight_transform",
strategy.schedule_conv2d_winograd_nnpack_weight_transform,
)
reg.register_pattern("nn.contrib_conv2d_winograd_nnpack_weight_transform", OpPattern.OPAQUE)
# conv2d_NCHWc
reg.register_strategy("nn.contrib_conv2d_NCHWc", strategy.conv2d_NCHWc_strategy)
reg.register_pattern("nn.contrib_conv2d_NCHWc", OpPattern.OUT_ELEMWISE_FUSABLE)
# depthwise_conv2d_NCHWc
reg.register_strategy("nn.contrib_depthwise_conv2d_NCHWc", strategy.depthwise_conv2d_NCHWc_strategy)
reg.register_pattern("nn.contrib_depthwise_conv2d_NCHWc", OpPattern.OUT_ELEMWISE_FUSABLE)
# deformable_conv2d
reg.register_strategy("nn.deformable_conv2d", strategy.deformable_conv2d_strategy)
reg.register_pattern("nn.deformable_conv2d", OpPattern.OUT_ELEMWISE_FUSABLE)
# bitpack
@reg.register_compute("nn.bitpack")
def compute_bitpack(attrs, inputs, out_dtype):
"""Compute definition for bitpack"""
bits = attrs.bits
pack_axis = attrs.pack_axis
bit_axis = attrs.bit_axis
pack_type = attrs.pack_type
name = attrs.name
out = topi.nn.bitpack(inputs[0], bits, pack_axis, bit_axis, pack_type, name)
return [out]
reg.register_schedule("nn.bitpack", strategy.schedule_bitpack)
reg.register_pattern("nn.bitpack", OpPattern.INJECTIVE)
# bitserial_conv2d
reg.register_strategy("nn.bitserial_conv2d", strategy.bitserial_conv2d_strategy)
reg.register_pattern("nn.bitserial_conv2d", OpPattern.OUT_ELEMWISE_FUSABLE)
@reg.register_legalize("nn.bitserial_conv2d")
def legalize_bitserial_conv2d(attrs, inputs, types):
"""Legalize bitserial_conv2d op.
Parameters
----------
attrs : tvm.ir.Attrs
Attributes of current convolution
inputs : list of tvm.relay.Expr
The args of the Relay expr to be legalized
types : list of types
List of input and output types
Returns
-------
result : tvm.relay.Expr
The legalized expr
"""
return topi.nn.bitserial_conv2d_legalize(attrs, inputs, types)
# bitserial_dense
reg.register_strategy("nn.bitserial_dense", strategy.bitserial_dense_strategy)
reg.register_pattern("nn.bitserial_dense", reg.OpPattern.OUT_ELEMWISE_FUSABLE)
# cross_entropy
@reg.register_compute("nn.cross_entropy")
def compute_cross_entropy(attrs, inputs, out_dtype):
x, y = inputs
return [-topi.sum(topi.log(x) * y) / x.shape[0]]
reg.register_reduce_schedule("nn.cross_entropy")
reg.register_pattern("nn.cross_entropy", OpPattern.OPAQUE)
# dilate
@reg.register_compute("nn.dilate")
def compute_dilate(attrs, inputs, out_dtype):
return [topi.nn.dilate(inputs[0], attrs.strides, attrs.dilation_value)]
reg.register_broadcast_schedule("nn.dilate")
reg.register_pattern("nn.dilate", OpPattern.INJECTIVE)
# cross_entropy_with_logits
@reg.register_compute("nn.cross_entropy_with_logits")
def compute_cross_entropy_with_logits(attrs, inputs, out_dtype):
x, y = inputs
return [-topi.sum(x * y) / x.shape[0]]
reg.register_reduce_schedule("nn.cross_entropy_with_logits")
reg.register_pattern("nn.cross_entropy_with_logits", OpPattern.OPAQUE)
# depth_to_space
@reg.register_compute("nn.depth_to_space")
def compute_depth_to_space(attrs, inputs, out_dtype):
block_size = attrs.block_size
layout = attrs.layout
mode = attrs.mode
return [topi.nn.depth_to_space(inputs[0], block_size, layout=layout, mode=mode)]
reg.register_injective_schedule("nn.depth_to_space")
reg.register_pattern("nn.depth_to_space", OpPattern.INJECTIVE)
# space_to_depth
@reg.register_compute("nn.space_to_depth")
def compute_space_to_depth(attrs, inputs, out_dtype):
block_size = attrs.block_size
layout = attrs.layout
return [topi.nn.space_to_depth(inputs[0], block_size, layout=layout)]
reg.register_injective_schedule("nn.space_to_depth")
reg.register_pattern("nn.space_to_depth", OpPattern.INJECTIVE)
# correlation
reg.register_strategy("nn.correlation", strategy.correlation_strategy)
reg.register_pattern("nn.correlation", OpPattern.OUT_ELEMWISE_FUSABLE)
# space_to_batch_nd and batch_to_space_nd
reg.register_injective_schedule("nn.space_to_batch_nd")
reg.register_injective_schedule("nn.batch_to_space_nd")
#####################
# Shape functions #
#####################
@script
def _conv_shape_func(dshape, kshape, strides, padding, dilation):
out = output_tensor((dshape.shape[0],), "int64")
out[0] = dshape[0]
out[1] = kshape[0]
for i in const_range(dshape.shape[0] - 2):
dilated_k = (kshape[i + 2] - 1) * dilation[i] + 1
out[i + 2] = (dshape[i + 2] + 2 * padding[i] - dilated_k) // strides[i] + 1
return out
def conv_shape_func(attrs, inputs, _):
"""
Shape function for contrib_conv2d_NCHWc op.
"""
strides = get_const_tuple(attrs.strides)
padding = get_const_tuple(attrs.padding)
dilation = get_const_tuple(attrs.dilation)
return [
_conv_shape_func(
inputs[0],
inputs[1],
convert(strides),
convert(padding),
convert(dilation),
)
]
reg.register_shape_func("nn.conv1d", False, conv_shape_func)
reg.register_shape_func("nn.conv2d", False, conv_shape_func)
reg.register_shape_func("nn.conv3d", False, conv_shape_func)
@script
def _conv2d_NCHWc_shape_func(dshape, kshape, strides, padding, dilation, oc_bn):
out = output_tensor((dshape.shape[0],), "int64")
ic_chunk = dshape[1]
height = dshape[2]
width = dshape[3]
ic_bn = dshape[4]
kheight = kshape[2]
kwidth = kshape[3]
dilated_kh = (kheight - 1) * dilation[0] + 1
dilated_kw = (kwidth - 1) * dilation[1] + 1
kflatten = int64(1)
for i in const_range(kshape.shape[0]):
kflatten *= kshape[i]
oc = kflatten // (kheight * kwidth * ic_chunk * ic_bn)
oc_chunk = oc // oc_bn
out_height = (height + 2 * padding[0] - dilated_kh) // strides[0] + 1
out_width = (width + 2 * padding[1] - dilated_kw) // strides[1] + 1
out[0] = dshape[0]
out[1] = oc_chunk
out[2] = out_height
out[3] = out_width
out[4] = int64(oc_bn)
return out
@reg.register_shape_func("nn.contrib_conv2d_NCHWc", False)
def conv2d_NCHWc_shape_func(attrs, inputs, _):
"""
Shape function for contrib_conv2d_NCHWc op.
"""
strides = get_const_tuple(attrs.strides)
padding = get_const_tuple(attrs.padding)
dilation = get_const_tuple(attrs.dilation)
out_layout = attrs.out_layout
oc_bn = int(out_layout[4:-1])
return [
_conv2d_NCHWc_shape_func(
inputs[0],
inputs[1],
convert(strides),
convert(padding),
convert(dilation),
convert(oc_bn),
)
]
@script
def _conv2d_transpose_nchw_shape_func(dshape, kshape, strides, padding, dilation, output_padding):
out = output_tensor((dshape.shape[0],), "int64")
kheight = kshape[2]
kwidth = kshape[3]
dilated_kh = (kheight - 1) * dilation[0] + 1
dilated_kw = (kwidth - 1) * dilation[1] + 1
out_height = strides[0] * (dshape[2] - 1) + dilated_kh - 2 * padding[0] + output_padding[0]
out_width = strides[1] * (dshape[3] - 1) + dilated_kw - 2 * padding[1] + output_padding[1]
out[0] = dshape[0]
out[1] = kshape[1]
out[2] = out_height
out[3] = out_width
return out
@reg.register_shape_func("nn.conv2d_transpose", False)
def conv2d_transpose_nchw_shape_func(attrs, inputs, _):
"""
Shape function for conv2d_transpose op.
"""
strides = get_const_tuple(attrs.strides)
padding = get_const_tuple(attrs.padding)
dilation = get_const_tuple(attrs.dilation)
output_padding = get_const_tuple(attrs.output_padding)
return [
_conv2d_transpose_nchw_shape_func(
inputs[0],
inputs[1],
convert(strides),
convert(padding),
convert(dilation),
convert(output_padding),
)
]
@script
def _pool2d_shape_func(data_shape, pool_size, strides, padding, height_axis, width_axis):
out = output_tensor((data_shape.shape[0],), "int64")
for i in const_range(data_shape.shape[0]):
if i == height_axis:
out[i] = (data_shape[i] + padding[0] + padding[2] - pool_size[0]) // strides[0] + 1
elif i == width_axis:
out[i] = (data_shape[i] + padding[1] + padding[3] - pool_size[1]) // strides[1] + 1
else:
out[i] = data_shape[i]
return out
def pool2d_shape_func(attrs, inputs, _):
"""
Shape function for pool2d op.
"""
pool_size = get_const_tuple(attrs.pool_size)
strides = get_const_tuple(attrs.strides)
padding = get_const_tuple(attrs.padding)
layout = attrs.layout
height_axis = layout.index("H")
width_axis = layout.index("W")
if len(padding) == 1:
padding = [padding[0]] * 4
elif len(padding) == 2:
padding = [padding[0], padding[1], padding[0], padding[1]]
return [
_pool2d_shape_func(
inputs[0],
convert(pool_size),
convert(strides),
convert(padding),
convert(height_axis),
convert(width_axis),
)
]
reg.register_shape_func("nn.max_pool2d", False, pool2d_shape_func)
reg.register_shape_func("nn.avg_pool2d", False, pool2d_shape_func)
@script
def _global_pool2d_shape_func(data_shape, height_axis, width_axis):
out = output_tensor((data_shape.shape[0],), "int64")
for i in const_range(out.shape[0]):
if i == height_axis or i == width_axis:
out[i] = int64(1)
else:
out[i] = data_shape[i]
return out
def global_pool2d_shape_func(attrs, inputs, _):
"""
Shape function for global pool2d op.
"""
layout = attrs.layout
height_axis = width_axis = 1
for i, letter in enumerate(layout):
if letter == "H":
height_axis = i
if letter == "W":
width_axis = i
return [_global_pool2d_shape_func(inputs[0], convert(height_axis), convert(width_axis))]
reg.register_shape_func("nn.global_max_pool2d", False, global_pool2d_shape_func)
reg.register_shape_func("nn.global_avg_pool2d", False, global_pool2d_shape_func)
@script
def _batch_flatten_shape_func(data_shape):
out = output_tensor((2,), "int64")
out[0] = data_shape[0]
out[1] = int64(1)
for i in const_range(data_shape.shape[0] - 1):
out[1] *= data_shape[i + 1]
return out
@reg.register_shape_func("nn.batch_flatten", False)
def batch_flatten_shape_func(attrs, inputs, _):
"""
Shape function for batch_flatten op.
"""
return [_batch_flatten_shape_func(inputs[0])]
@script
def _dense_shape_func(data_shape, weight_shape):
out = output_tensor((data_shape.shape[0],), "int64")
for i in const_range(out.shape[0] - 1):
out[i] = data_shape[i]
out[out.shape[0] - 1] = weight_shape[0]
return out
@reg.register_shape_func("nn.dense", False)
def dense_shape_func(attrs, inputs, _):
"""
Shape function for dense op.
"""
ret = [_dense_shape_func(inputs[0], inputs[1])]
return ret
@script
def _batch_matmul_shape_func(data_shape, weight_shape):
out = output_tensor((data_shape.shape[0],), "int64")
for i in const_range(out.shape[0] - 1):
if i == 0:
out[i] = max(data_shape[i], weight_shape[i])
else:
out[i] = data_shape[i]
out[out.shape[0] - 1] = weight_shape[weight_shape.shape[0] - 2]
return out
@reg.register_shape_func("nn.batch_matmul", False)
def batch_matmul_shape_func(attrs, inputs, _):
"""
Shape function for dense op.
"""
ret = [_batch_matmul_shape_func(inputs[0], inputs[1])]
return ret
@script
def _pad_shape_func(data_shape, pad_width):
out = output_tensor((data_shape.shape[0],), "int64")
for i in const_range(out.shape[0]):
out[i] = data_shape[i] + pad_width[i][0] + pad_width[i][1]
return out
@reg.register_shape_func("nn.pad", False)
def pad_shape_func(attrs, inputs, _):
"""
Shape function for pad op.
"""
pad_width = []
for pair in attrs.pad_width:
pad_width.append(get_const_tuple(pair))
return [_pad_shape_func(inputs[0], convert(pad_width))]
@script
def _dilate_shape_func(data_shape, strides):
out = output_tensor((data_shape.shape[0],), "int64")
for i in const_range(out.shape[0]):
out[i] = (data_shape[i] - 1) * strides[i] + 1
return out
@reg.register_shape_func("nn.dilate", False)
def dilate_shape_func(attrs, inputs, _):
"""
Shape function for dilate op.
"""
return [_dilate_shape_func(inputs[0], convert(attrs.strides))]
reg.register_shape_func("nn.bias_add", False, elemwise_shape_func)
reg.register_shape_func("nn.softmax", False, elemwise_shape_func)
reg.register_shape_func("nn.relu", False, elemwise_shape_func)
| """Compute definition of contrib_conv2d_winograd_nnpack_weight_transform"""
convolution_algorithm = attrs.get_int("convolution_algorithm")
out = topi.nn.conv2d_winograd_nnpack_weight_transform(
inputs[0], convolution_algorithm, out_dtype
)
return [out] |
test_volume.py | # coding: utf-8
"""
Seldon Deploy API
API to interact and manage the lifecycle of your machine learning models deployed through Seldon Deploy. # noqa: E501
OpenAPI spec version: v1alpha1
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import seldon_deploy_sdk
from seldon_deploy_sdk.models.volume import Volume # noqa: E501
from seldon_deploy_sdk.rest import ApiException
class TestVolume(unittest.TestCase):
"""Volume unit test stubs"""
def setUp(self):
|
def tearDown(self):
pass
def testVolume(self):
"""Test Volume"""
# FIXME: construct object with mandatory attributes with example values
# model = seldon_deploy_sdk.models.volume.Volume() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| pass |
exploration-player-page.constants.ts | // Copyright 2019 The Oppia Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* @fileoverview Constants to be used in the learner view.
*/
var oppia = require('AppInit.ts').module;
oppia.constant('CONTENT_FOCUS_LABEL_PREFIX', 'content-focus-label-');
oppia.constant('TWO_CARD_THRESHOLD_PX', 960);
oppia.constant('CONTINUE_BUTTON_FOCUS_LABEL', 'continueButton');
/* Called when a new audio-equippable component is loaded and displayed
to the user, allowing for the automatic playing of audio if necessary. */
oppia.constant('EVENT_AUTOPLAY_AUDIO', 'autoPlayAudio');
// The enforced waiting period before the first hint request.
oppia.constant('WAIT_FOR_FIRST_HINT_MSEC', 60000);
// The enforced waiting period before each of the subsequent hint requests.
oppia.constant('WAIT_FOR_SUBSEQUENT_HINTS_MSEC', 30000);
// The time delay between the learner clicking the hint button
// and the appearance of the hint.
oppia.constant('DELAY_FOR_HINT_FEEDBACK_MSEC', 100);
// Array of i18n IDs for the possible hint request strings.
oppia.constant(
'HINT_REQUEST_STRING_I18N_IDS', [
'I18N_PLAYER_HINT_REQUEST_STRING_1',
'I18N_PLAYER_HINT_REQUEST_STRING_2', | /* This should match the CSS class defined in the tutor card directive. */
oppia.constant(
'AUDIO_HIGHLIGHT_CSS_CLASS', 'conversation-skin-audio-highlight');
oppia.constant(
'FLAG_EXPLORATION_URL_TEMPLATE', '/flagexplorationhandler/<exploration_id>');
// TODO(bhenning): Find a better place for these constants.
// NOTE TO DEVELOPERS: These constants must be the same (in name and value) as
// the corresponding classification constants defined in core.domain.exp_domain.
oppia.constant('EXPLICIT_CLASSIFICATION', 'explicit');
oppia.constant('TRAINING_DATA_CLASSIFICATION', 'training_data_match');
oppia.constant('STATISTICAL_CLASSIFICATION', 'statistical_classifier');
oppia.constant('DEFAULT_OUTCOME_CLASSIFICATION', 'default_outcome');
oppia.constant('EXPLORATION_MODE', {
EXPLORATION: 'exploration',
PRETEST: 'pretest',
QUESTION_PLAYER: 'question_player',
STORY_CHAPTER: 'story_chapter',
OTHER: 'other'
});
oppia.constant('STATS_EVENT_TYPES', {
EVENT_TYPE_START_EXPLORATION: 'start',
EVENT_TYPE_ACTUAL_START_EXPLORATION: 'actual_start',
EVENT_TYPE_COMPLETE_EXPLORATION: 'complete',
EVENT_TYPE_STATE_HIT: 'state_hit',
EVENT_TYPE_STATE_COMPLETED: 'state_complete',
EVENT_TYPE_ANSWER_SUBMITTED: 'answer_submitted',
EVENT_TYPE_SOLUTION_HIT: 'solution_hit',
EVENT_TYPE_LEAVE_FOR_REFRESHER_EXP: 'leave_for_refresher_exp',
});
oppia.constant('STATS_REPORTING_URLS', {
ANSWER_SUBMITTED: '/explorehandler/answer_submitted_event/<exploration_id>',
EXPLORATION_COMPLETED: (
'/explorehandler/exploration_complete_event/<exploration_id>'),
EXPLORATION_MAYBE_LEFT: (
'/explorehandler/exploration_maybe_leave_event/<exploration_id>'),
EXPLORATION_STARTED: (
'/explorehandler/exploration_start_event/<exploration_id>'),
STATE_HIT: '/explorehandler/state_hit_event/<exploration_id>',
STATE_COMPLETED: '/explorehandler/state_complete_event/<exploration_id>',
EXPLORATION_ACTUALLY_STARTED: (
'/explorehandler/exploration_actual_start_event/<exploration_id>'),
SOLUTION_HIT: '/explorehandler/solution_hit_event/<exploration_id>',
LEAVE_FOR_REFRESHER_EXP: (
'/explorehandler/leave_for_refresher_exp_event/<exploration_id>'),
STATS_EVENTS: '/explorehandler/stats_events/<exploration_id>'
});
oppia.constant('FEEDBACK_POPOVER_PATH',
'/pages/exploration-player-page/templates/' +
'feedback-popup-container.template.html'); | 'I18N_PLAYER_HINT_REQUEST_STRING_3']);
|
list_users_of_skill_group.go | package ccc
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//
//http://www.apache.org/licenses/LICENSE-2.0
//
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | //limitations under the License.
//
// Code generated by Alibaba Cloud SDK Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests"
"github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses"
)
// ListUsersOfSkillGroup invokes the ccc.ListUsersOfSkillGroup API synchronously
func (client *Client) ListUsersOfSkillGroup(request *ListUsersOfSkillGroupRequest) (response *ListUsersOfSkillGroupResponse, err error) {
response = CreateListUsersOfSkillGroupResponse()
err = client.DoAction(request, response)
return
}
// ListUsersOfSkillGroupWithChan invokes the ccc.ListUsersOfSkillGroup API asynchronously
func (client *Client) ListUsersOfSkillGroupWithChan(request *ListUsersOfSkillGroupRequest) (<-chan *ListUsersOfSkillGroupResponse, <-chan error) {
responseChan := make(chan *ListUsersOfSkillGroupResponse, 1)
errChan := make(chan error, 1)
err := client.AddAsyncTask(func() {
defer close(responseChan)
defer close(errChan)
response, err := client.ListUsersOfSkillGroup(request)
if err != nil {
errChan <- err
} else {
responseChan <- response
}
})
if err != nil {
errChan <- err
close(responseChan)
close(errChan)
}
return responseChan, errChan
}
// ListUsersOfSkillGroupWithCallback invokes the ccc.ListUsersOfSkillGroup API asynchronously
func (client *Client) ListUsersOfSkillGroupWithCallback(request *ListUsersOfSkillGroupRequest, callback func(response *ListUsersOfSkillGroupResponse, err error)) <-chan int {
result := make(chan int, 1)
err := client.AddAsyncTask(func() {
var response *ListUsersOfSkillGroupResponse
var err error
defer close(result)
response, err = client.ListUsersOfSkillGroup(request)
callback(response, err)
result <- 1
})
if err != nil {
defer close(result)
callback(nil, err)
result <- 0
}
return result
}
// ListUsersOfSkillGroupRequest is the request struct for api ListUsersOfSkillGroup
type ListUsersOfSkillGroupRequest struct {
*requests.RpcRequest
PageNumber requests.Integer `position:"Query" name:"PageNumber"`
InstanceId string `position:"Query" name:"InstanceId"`
SkillGroupId string `position:"Query" name:"SkillGroupId"`
PageSize requests.Integer `position:"Query" name:"PageSize"`
}
// ListUsersOfSkillGroupResponse is the response struct for api ListUsersOfSkillGroup
type ListUsersOfSkillGroupResponse struct {
*responses.BaseResponse
RequestId string `json:"RequestId" xml:"RequestId"`
Success bool `json:"Success" xml:"Success"`
Code string `json:"Code" xml:"Code"`
Message string `json:"Message" xml:"Message"`
HttpStatusCode int `json:"HttpStatusCode" xml:"HttpStatusCode"`
Users Users `json:"Users" xml:"Users"`
}
// CreateListUsersOfSkillGroupRequest creates a request to invoke ListUsersOfSkillGroup API
func CreateListUsersOfSkillGroupRequest() (request *ListUsersOfSkillGroupRequest) {
request = &ListUsersOfSkillGroupRequest{
RpcRequest: &requests.RpcRequest{},
}
request.InitWithApiInfo("CCC", "2017-07-05", "ListUsersOfSkillGroup", "", "")
request.Method = requests.POST
return
}
// CreateListUsersOfSkillGroupResponse creates a response to parse from ListUsersOfSkillGroup response
func CreateListUsersOfSkillGroupResponse() (response *ListUsersOfSkillGroupResponse) {
response = &ListUsersOfSkillGroupResponse{
BaseResponse: &responses.BaseResponse{},
}
return
} | //See the License for the specific language governing permissions and |
instanceattachment.go | /*
Copyright AppsCode Inc. and Contributors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by lister-gen. DO NOT EDIT.
package v1alpha1
import (
v1alpha1 "kubeform.dev/provider-alicloud-api/apis/cen/v1alpha1"
"k8s.io/apimachinery/pkg/api/errors"
"k8s.io/apimachinery/pkg/labels"
"k8s.io/client-go/tools/cache"
)
// InstanceAttachmentLister helps list InstanceAttachments.
// All objects returned here must be treated as read-only.
type InstanceAttachmentLister interface {
// List lists all InstanceAttachments in the indexer.
// Objects returned here must be treated as read-only.
List(selector labels.Selector) (ret []*v1alpha1.InstanceAttachment, err error)
// InstanceAttachments returns an object that can list and get InstanceAttachments.
InstanceAttachments(namespace string) InstanceAttachmentNamespaceLister
InstanceAttachmentListerExpansion
}
// instanceAttachmentLister implements the InstanceAttachmentLister interface.
type instanceAttachmentLister struct {
indexer cache.Indexer
}
// NewInstanceAttachmentLister returns a new InstanceAttachmentLister.
func NewInstanceAttachmentLister(indexer cache.Indexer) InstanceAttachmentLister {
return &instanceAttachmentLister{indexer: indexer}
}
// List lists all InstanceAttachments in the indexer.
func (s *instanceAttachmentLister) List(selector labels.Selector) (ret []*v1alpha1.InstanceAttachment, err error) {
err = cache.ListAll(s.indexer, selector, func(m interface{}) {
ret = append(ret, m.(*v1alpha1.InstanceAttachment))
})
return ret, err
}
// InstanceAttachments returns an object that can list and get InstanceAttachments.
func (s *instanceAttachmentLister) InstanceAttachments(namespace string) InstanceAttachmentNamespaceLister {
return instanceAttachmentNamespaceLister{indexer: s.indexer, namespace: namespace}
}
// InstanceAttachmentNamespaceLister helps list and get InstanceAttachments.
// All objects returned here must be treated as read-only.
type InstanceAttachmentNamespaceLister interface {
// List lists all InstanceAttachments in the indexer for a given namespace.
// Objects returned here must be treated as read-only.
List(selector labels.Selector) (ret []*v1alpha1.InstanceAttachment, err error)
// Get retrieves the InstanceAttachment from the indexer for a given namespace and name.
// Objects returned here must be treated as read-only.
Get(name string) (*v1alpha1.InstanceAttachment, error)
InstanceAttachmentNamespaceListerExpansion
}
// instanceAttachmentNamespaceLister implements the InstanceAttachmentNamespaceLister
// interface.
type instanceAttachmentNamespaceLister struct {
indexer cache.Indexer
namespace string
}
// List lists all InstanceAttachments in the indexer for a given namespace.
func (s instanceAttachmentNamespaceLister) List(selector labels.Selector) (ret []*v1alpha1.InstanceAttachment, err error) {
err = cache.ListAllByNamespace(s.indexer, s.namespace, selector, func(m interface{}) {
ret = append(ret, m.(*v1alpha1.InstanceAttachment))
})
return ret, err
}
// Get retrieves the InstanceAttachment from the indexer for a given namespace and name.
func (s instanceAttachmentNamespaceLister) Get(name string) (*v1alpha1.InstanceAttachment, error) {
obj, exists, err := s.indexer.GetByKey(s.namespace + "/" + name)
if err != nil {
return nil, err
}
if !exists |
return obj.(*v1alpha1.InstanceAttachment), nil
}
| {
return nil, errors.NewNotFound(v1alpha1.Resource("instanceattachment"), name)
} |
params_test.go | package kas
import (
"context"
"testing"
. "github.com/onsi/gomega"
"k8s.io/utils/pointer"
hyperv1 "github.com/openshift/hypershift/api/v1alpha1"
"github.com/openshift/hypershift/support/config"
"github.com/openshift/hypershift/support/globalconfig"
)
// TODO (cewong): Add tests for other params
func | (t *testing.T) {
tests := []struct {
name string
advertiseAddress *string
port *int32
expectedAddress string
expectedPort int32
}{
{
name: "not specified",
expectedAddress: config.DefaultAdvertiseAddress,
expectedPort: config.DefaultAPIServerPort,
},
{
name: "address specified",
advertiseAddress: pointer.StringPtr("1.2.3.4"),
expectedAddress: "1.2.3.4",
expectedPort: config.DefaultAPIServerPort,
},
{
name: "port set",
port: pointer.Int32Ptr(6789),
expectedAddress: config.DefaultAdvertiseAddress,
expectedPort: 6789,
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
hcp := &hyperv1.HostedControlPlane{}
hcp.Spec.APIAdvertiseAddress = test.advertiseAddress
hcp.Spec.APIPort = test.port
p := NewKubeAPIServerParams(context.Background(), hcp, globalconfig.GlobalConfig{}, map[string]string{}, "", 0, "", 0, false)
g := NewGomegaWithT(t)
g.Expect(p.AdvertiseAddress).To(Equal(test.expectedAddress))
g.Expect(p.APIServerPort).To(Equal(test.expectedPort))
})
}
}
| TestNewAPIServerParamsAPIAdvertiseAddressAndPort |
assessments.go | package security
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"github.com/Azure/go-autorest/autorest/validation"
"github.com/Azure/go-autorest/tracing"
"net/http"
)
// AssessmentsClient is the API spec for Microsoft.Security (Azure Security Center) resource provider
type AssessmentsClient struct {
BaseClient
}
// NewAssessmentsClient creates an instance of the AssessmentsClient client.
func NewAssessmentsClient(subscriptionID string, ascLocation string) AssessmentsClient {
return NewAssessmentsClientWithBaseURI(DefaultBaseURI, subscriptionID, ascLocation)
}
// NewAssessmentsClientWithBaseURI creates an instance of the AssessmentsClient client using a custom endpoint. Use
// this when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack).
func NewAssessmentsClientWithBaseURI(baseURI string, subscriptionID string, ascLocation string) AssessmentsClient |
// CreateOrUpdate create a security assessment on your resource. An assessment metadata that describes this assessment
// must be predefined with the same name before inserting the assessment result
// Parameters:
// resourceID - the identifier of the resource.
// assessmentName - the Assessment Key - Unique key for the assessment type
// assessment - calculated assessment on a pre-defined assessment metadata
func (client AssessmentsClient) CreateOrUpdate(ctx context.Context, resourceID string, assessmentName string, assessment Assessment) (result Assessment, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/AssessmentsClient.CreateOrUpdate")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
if err := validation.Validate([]validation.Validation{
{TargetValue: assessment,
Constraints: []validation.Constraint{{Target: "assessment.AssessmentProperties", Name: validation.Null, Rule: false,
Chain: []validation.Constraint{{Target: "assessment.AssessmentProperties.ResourceDetails", Name: validation.Null, Rule: true, Chain: nil},
{Target: "assessment.AssessmentProperties.Status", Name: validation.Null, Rule: true, Chain: nil},
{Target: "assessment.AssessmentProperties.Metadata", Name: validation.Null, Rule: false,
Chain: []validation.Constraint{{Target: "assessment.AssessmentProperties.Metadata.DisplayName", Name: validation.Null, Rule: true, Chain: nil},
{Target: "assessment.AssessmentProperties.Metadata.PartnerData", Name: validation.Null, Rule: false,
Chain: []validation.Constraint{{Target: "assessment.AssessmentProperties.Metadata.PartnerData.PartnerName", Name: validation.Null, Rule: true, Chain: nil},
{Target: "assessment.AssessmentProperties.Metadata.PartnerData.Secret", Name: validation.Null, Rule: true, Chain: nil},
}},
}},
{Target: "assessment.AssessmentProperties.PartnersData", Name: validation.Null, Rule: false,
Chain: []validation.Constraint{{Target: "assessment.AssessmentProperties.PartnersData.PartnerName", Name: validation.Null, Rule: true, Chain: nil},
{Target: "assessment.AssessmentProperties.PartnersData.Secret", Name: validation.Null, Rule: true, Chain: nil},
}},
}}}}}); err != nil {
return result, validation.NewError("security.AssessmentsClient", "CreateOrUpdate", err.Error())
}
req, err := client.CreateOrUpdatePreparer(ctx, resourceID, assessmentName, assessment)
if err != nil {
err = autorest.NewErrorWithError(err, "security.AssessmentsClient", "CreateOrUpdate", nil, "Failure preparing request")
return
}
resp, err := client.CreateOrUpdateSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "security.AssessmentsClient", "CreateOrUpdate", resp, "Failure sending request")
return
}
result, err = client.CreateOrUpdateResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "security.AssessmentsClient", "CreateOrUpdate", resp, "Failure responding to request")
}
return
}
// CreateOrUpdatePreparer prepares the CreateOrUpdate request.
func (client AssessmentsClient) CreateOrUpdatePreparer(ctx context.Context, resourceID string, assessmentName string, assessment Assessment) (*http.Request, error) {
pathParameters := map[string]interface{}{
"assessmentName": autorest.Encode("path", assessmentName),
"resourceId": resourceID,
}
const APIVersion = "2020-01-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsContentType("application/json; charset=utf-8"),
autorest.AsPut(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/{resourceId}/providers/Microsoft.Security/assessments/{assessmentName}", pathParameters),
autorest.WithJSON(assessment),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// CreateOrUpdateSender sends the CreateOrUpdate request. The method will close the
// http.Response Body if it receives an error.
func (client AssessmentsClient) CreateOrUpdateSender(req *http.Request) (*http.Response, error) {
return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))
}
// CreateOrUpdateResponder handles the response to the CreateOrUpdate request. The method always
// closes the http.Response Body.
func (client AssessmentsClient) CreateOrUpdateResponder(resp *http.Response) (result Assessment, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// Delete delete a security assessment on your resource. An assessment metadata that describes this assessment must be
// predefined with the same name before inserting the assessment result
// Parameters:
// resourceID - the identifier of the resource.
// assessmentName - the Assessment Key - Unique key for the assessment type
func (client AssessmentsClient) Delete(ctx context.Context, resourceID string, assessmentName string) (result autorest.Response, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/AssessmentsClient.Delete")
defer func() {
sc := -1
if result.Response != nil {
sc = result.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.DeletePreparer(ctx, resourceID, assessmentName)
if err != nil {
err = autorest.NewErrorWithError(err, "security.AssessmentsClient", "Delete", nil, "Failure preparing request")
return
}
resp, err := client.DeleteSender(req)
if err != nil {
result.Response = resp
err = autorest.NewErrorWithError(err, "security.AssessmentsClient", "Delete", resp, "Failure sending request")
return
}
result, err = client.DeleteResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "security.AssessmentsClient", "Delete", resp, "Failure responding to request")
}
return
}
// DeletePreparer prepares the Delete request.
func (client AssessmentsClient) DeletePreparer(ctx context.Context, resourceID string, assessmentName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"assessmentName": autorest.Encode("path", assessmentName),
"resourceId": resourceID,
}
const APIVersion = "2020-01-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsDelete(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/{resourceId}/providers/Microsoft.Security/assessments/{assessmentName}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// DeleteSender sends the Delete request. The method will close the
// http.Response Body if it receives an error.
func (client AssessmentsClient) DeleteSender(req *http.Request) (*http.Response, error) {
return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))
}
// DeleteResponder handles the response to the Delete request. The method always
// closes the http.Response Body.
func (client AssessmentsClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusNoContent),
autorest.ByClosing())
result.Response = resp
return
}
// Get get a security assessment on your scanned resource
// Parameters:
// resourceID - the identifier of the resource.
// assessmentName - the Assessment Key - Unique key for the assessment type
// expand - oData expand. Optional.
func (client AssessmentsClient) Get(ctx context.Context, resourceID string, assessmentName string, expand ExpandEnum) (result Assessment, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/AssessmentsClient.Get")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.GetPreparer(ctx, resourceID, assessmentName, expand)
if err != nil {
err = autorest.NewErrorWithError(err, "security.AssessmentsClient", "Get", nil, "Failure preparing request")
return
}
resp, err := client.GetSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "security.AssessmentsClient", "Get", resp, "Failure sending request")
return
}
result, err = client.GetResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "security.AssessmentsClient", "Get", resp, "Failure responding to request")
}
return
}
// GetPreparer prepares the Get request.
func (client AssessmentsClient) GetPreparer(ctx context.Context, resourceID string, assessmentName string, expand ExpandEnum) (*http.Request, error) {
pathParameters := map[string]interface{}{
"assessmentName": autorest.Encode("path", assessmentName),
"resourceId": resourceID,
}
const APIVersion = "2020-01-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
if len(string(expand)) > 0 {
queryParameters["$expand"] = autorest.Encode("query", expand)
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/{resourceId}/providers/Microsoft.Security/assessments/{assessmentName}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// GetSender sends the Get request. The method will close the
// http.Response Body if it receives an error.
func (client AssessmentsClient) GetSender(req *http.Request) (*http.Response, error) {
return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))
}
// GetResponder handles the response to the Get request. The method always
// closes the http.Response Body.
func (client AssessmentsClient) GetResponder(resp *http.Response) (result Assessment, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// List get security assessments on all your scanned resources inside a scope
// Parameters:
// scope - scope of the query, can be subscription (/subscriptions/0b06d9ea-afe6-4779-bd59-30e5c2d9d13f) or
// management group (/providers/Microsoft.Management/managementGroups/mgName).
func (client AssessmentsClient) List(ctx context.Context, scope string) (result AssessmentListPage, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/AssessmentsClient.List")
defer func() {
sc := -1
if result.al.Response.Response != nil {
sc = result.al.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
result.fn = client.listNextResults
req, err := client.ListPreparer(ctx, scope)
if err != nil {
err = autorest.NewErrorWithError(err, "security.AssessmentsClient", "List", nil, "Failure preparing request")
return
}
resp, err := client.ListSender(req)
if err != nil {
result.al.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "security.AssessmentsClient", "List", resp, "Failure sending request")
return
}
result.al, err = client.ListResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "security.AssessmentsClient", "List", resp, "Failure responding to request")
}
if result.al.hasNextLink() && result.al.IsEmpty() {
err = result.NextWithContext(ctx)
}
return
}
// ListPreparer prepares the List request.
func (client AssessmentsClient) ListPreparer(ctx context.Context, scope string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"scope": scope,
}
const APIVersion = "2020-01-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/{scope}/providers/Microsoft.Security/assessments", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListSender sends the List request. The method will close the
// http.Response Body if it receives an error.
func (client AssessmentsClient) ListSender(req *http.Request) (*http.Response, error) {
return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))
}
// ListResponder handles the response to the List request. The method always
// closes the http.Response Body.
func (client AssessmentsClient) ListResponder(resp *http.Response) (result AssessmentList, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// listNextResults retrieves the next set of results, if any.
func (client AssessmentsClient) listNextResults(ctx context.Context, lastResults AssessmentList) (result AssessmentList, err error) {
req, err := lastResults.assessmentListPreparer(ctx)
if err != nil {
return result, autorest.NewErrorWithError(err, "security.AssessmentsClient", "listNextResults", nil, "Failure preparing next results request")
}
if req == nil {
return
}
resp, err := client.ListSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "security.AssessmentsClient", "listNextResults", resp, "Failure sending next results request")
}
result, err = client.ListResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "security.AssessmentsClient", "listNextResults", resp, "Failure responding to next results request")
}
return
}
// ListComplete enumerates all values, automatically crossing page boundaries as required.
func (client AssessmentsClient) ListComplete(ctx context.Context, scope string) (result AssessmentListIterator, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/AssessmentsClient.List")
defer func() {
sc := -1
if result.Response().Response.Response != nil {
sc = result.page.Response().Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
result.page, err = client.List(ctx, scope)
return
}
| {
return AssessmentsClient{NewWithBaseURI(baseURI, subscriptionID, ascLocation)}
} |
settings-view.js | /**
* Search Console Settings View component
*
* Site Kit by Google, Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* WordPress dependencies
*/
import { __ } from '@wordpress/i18n';
/**
* Internal dependencies
*/
import Data from 'googlesitekit-data';
import DisplaySetting from '../../../components/display-setting';
import { STORE_NAME } from '../datastore/constants';
const { useSelect } = Data;
export default function SettingsView() {
const propertyID = useSelect( ( select ) => select( STORE_NAME ).getPropertyID() );
return (
<div className="mdc-layout-grid__cell mdc-layout-grid__cell--span-12">
<h5 className="googlesitekit-settings-module__meta-item-type">
{ __( 'Connected Property', 'google-site-kit' ) }
</h5>
<p className="googlesitekit-settings-module__meta-item-data">
<DisplaySetting value={ propertyID } />
</p>
</div>
);
} | * https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, |
main.go | // Copyright 2020 Coinbase, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package main
import (
"context"
"log"
"github.com/keninqiu/rosetta-sdk-go/fetcher"
"github.com/keninqiu/rosetta-sdk-go/types"
)
const (
// serverURL is the URL of a Rosetta Server.
serverURL = "http://localhost:8080" | )
func main() {
ctx := context.Background()
// Step 1: Create a new fetcher
newFetcher := fetcher.New(
serverURL,
)
// Step 2: Initialize the fetcher's asserter
//
// Behind the scenes this makes a call to get the
// network status and uses the response to inform
// the asserter what are valid responses.
primaryNetwork, networkStatus, err := newFetcher.InitializeAsserter(ctx, nil, "")
if err != nil {
log.Fatal(err)
}
// Step 3: Print the primary network and network status
log.Printf("Primary Network: %s\n", types.PrettyPrintStruct(primaryNetwork))
log.Printf("Network Status: %s\n", types.PrettyPrintStruct(networkStatus))
// Step 4: Fetch the current block with retries (automatically
// asserted for correctness)
//
// It is important to note that this assertion only ensures
// required fields are populated and that operations
// in the block only use types and statuses that were
// provided in the networkStatusResponse. To run more
// intensive validation, use the Rosetta Validator. It
// can be found at: https://github.com/coinbase/rosetta-validator
//
// On another note, notice that fetcher.BlockRetry
// automatically fetches all transactions that are
// returned in BlockResponse.OtherTransactions. If you use
// the client directly, you will need to implement a mechanism
// to fully populate the block by fetching all these
// transactions.
block, err := newFetcher.BlockRetry(
ctx,
primaryNetwork,
types.ConstructPartialBlockIdentifier(
networkStatus.CurrentBlockIdentifier,
),
)
if err != nil {
log.Fatal(err)
}
// Step 5: Print the block
log.Printf("Current Block: %s\n", types.PrettyPrintStruct(block))
} | |
gb_holidays_test.go | // (c) Rick Arnold. Licensed under the BSD license (see LICENSE).
package gb
import (
"testing"
"time"
"github.com/devechelon/cal/v2"
)
func d(y, m, d int) time.Time |
func TestHolidays(t *testing.T) {
tests := []struct {
h *cal.Holiday
y int
wantAct time.Time
wantObs time.Time
}{
{NewYear, 2015, d(2015, 1, 1), d(2015, 1, 1)},
{NewYear, 2016, d(2016, 1, 1), d(2016, 1, 1)},
{NewYear, 2017, d(2017, 1, 1), d(2017, 1, 2)},
{NewYear, 2018, d(2018, 1, 1), d(2018, 1, 1)},
{NewYear, 2019, d(2019, 1, 1), d(2019, 1, 1)},
{NewYear, 2020, d(2020, 1, 1), d(2020, 1, 1)},
{NewYear, 2021, d(2021, 1, 1), d(2021, 1, 1)},
{NewYear, 2022, d(2022, 1, 1), d(2022, 1, 3)},
{GoodFriday, 2015, d(2015, 4, 3), d(2015, 4, 3)},
{GoodFriday, 2016, d(2016, 3, 25), d(2016, 3, 25)},
{GoodFriday, 2017, d(2017, 4, 14), d(2017, 4, 14)},
{GoodFriday, 2018, d(2018, 3, 30), d(2018, 3, 30)},
{GoodFriday, 2019, d(2019, 4, 19), d(2019, 4, 19)},
{GoodFriday, 2020, d(2020, 4, 10), d(2020, 4, 10)},
{GoodFriday, 2021, d(2021, 4, 2), d(2021, 4, 2)},
{GoodFriday, 2022, d(2022, 4, 15), d(2022, 4, 15)},
{EasterMonday, 2015, d(2015, 4, 6), d(2015, 4, 6)},
{EasterMonday, 2016, d(2016, 3, 28), d(2016, 3, 28)},
{EasterMonday, 2017, d(2017, 4, 17), d(2017, 4, 17)},
{EasterMonday, 2018, d(2018, 4, 2), d(2018, 4, 2)},
{EasterMonday, 2019, d(2019, 4, 22), d(2019, 4, 22)},
{EasterMonday, 2020, d(2020, 4, 13), d(2020, 4, 13)},
{EasterMonday, 2021, d(2021, 4, 5), d(2021, 4, 5)},
{EasterMonday, 2022, d(2022, 4, 18), d(2022, 4, 18)},
{EarlyMay, 2015, d(2015, 5, 4), d(2015, 5, 4)},
{EarlyMay, 2016, d(2016, 5, 2), d(2016, 5, 2)},
{EarlyMay, 2017, d(2017, 5, 1), d(2017, 5, 1)},
{EarlyMay, 2018, d(2018, 5, 7), d(2018, 5, 7)},
{EarlyMay, 2019, d(2019, 5, 6), d(2019, 5, 6)},
{EarlyMay, 2020, time.Time{}, time.Time{}},
{EarlyMay, 2021, d(2021, 5, 3), d(2021, 5, 3)},
{EarlyMay, 2022, d(2022, 5, 2), d(2022, 5, 2)},
{VEDay, 2015, time.Time{}, time.Time{}},
{VEDay, 2016, time.Time{}, time.Time{}},
{VEDay, 2017, time.Time{}, time.Time{}},
{VEDay, 2018, time.Time{}, time.Time{}},
{VEDay, 2019, time.Time{}, time.Time{}},
{VEDay, 2020, d(2020, 5, 8), d(2020, 5, 8)},
{VEDay, 2021, time.Time{}, time.Time{}},
{VEDay, 2022, time.Time{}, time.Time{}},
{SpringHoliday, 2015, d(2015, 5, 25), d(2015, 5, 25)},
{SpringHoliday, 2016, d(2016, 5, 30), d(2016, 5, 30)},
{SpringHoliday, 2017, d(2017, 5, 29), d(2017, 5, 29)},
{SpringHoliday, 2018, d(2018, 5, 28), d(2018, 5, 28)},
{SpringHoliday, 2019, d(2019, 5, 27), d(2019, 5, 27)},
{SpringHoliday, 2020, d(2020, 5, 25), d(2020, 5, 25)},
{SpringHoliday, 2021, d(2021, 5, 31), d(2021, 5, 31)},
{SpringHoliday, 2022, d(2022, 5, 30), d(2022, 5, 30)},
{SummerHolidayScotland, 2015, d(2015, 8, 3), d(2015, 8, 3)},
{SummerHolidayScotland, 2016, d(2016, 8, 1), d(2016, 8, 1)},
{SummerHolidayScotland, 2017, d(2017, 8, 7), d(2017, 8, 7)},
{SummerHolidayScotland, 2018, d(2018, 8, 6), d(2018, 8, 6)},
{SummerHolidayScotland, 2019, d(2019, 8, 5), d(2019, 8, 5)},
{SummerHolidayScotland, 2020, d(2020, 8, 3), d(2020, 8, 3)},
{SummerHolidayScotland, 2021, d(2021, 8, 2), d(2021, 8, 2)},
{SummerHolidayScotland, 2022, d(2022, 8, 1), d(2022, 8, 1)},
{SummerHoliday, 2015, d(2015, 8, 31), d(2015, 8, 31)},
{SummerHoliday, 2016, d(2016, 8, 29), d(2016, 8, 29)},
{SummerHoliday, 2017, d(2017, 8, 28), d(2017, 8, 28)},
{SummerHoliday, 2018, d(2018, 8, 27), d(2018, 8, 27)},
{SummerHoliday, 2019, d(2019, 8, 26), d(2019, 8, 26)},
{SummerHoliday, 2020, d(2020, 8, 31), d(2020, 8, 31)},
{SummerHoliday, 2021, d(2021, 8, 30), d(2021, 8, 30)},
{SummerHoliday, 2022, d(2022, 8, 29), d(2022, 8, 29)},
{ChristmasDay, 2015, d(2015, 12, 25), d(2015, 12, 25)},
{ChristmasDay, 2016, d(2016, 12, 25), d(2016, 12, 26)},
{ChristmasDay, 2017, d(2017, 12, 25), d(2017, 12, 25)},
{ChristmasDay, 2018, d(2018, 12, 25), d(2018, 12, 25)},
{ChristmasDay, 2019, d(2019, 12, 25), d(2019, 12, 25)},
{ChristmasDay, 2020, d(2020, 12, 25), d(2020, 12, 25)},
{ChristmasDay, 2021, d(2021, 12, 25), d(2021, 12, 27)},
{ChristmasDay, 2022, d(2022, 12, 25), d(2022, 12, 26)},
{BoxingDay, 2015, d(2015, 12, 26), d(2015, 12, 28)},
{BoxingDay, 2016, d(2016, 12, 26), d(2016, 12, 27)},
{BoxingDay, 2017, d(2017, 12, 26), d(2017, 12, 26)},
{BoxingDay, 2018, d(2018, 12, 26), d(2018, 12, 26)},
{BoxingDay, 2019, d(2019, 12, 26), d(2019, 12, 26)},
{BoxingDay, 2020, d(2020, 12, 26), d(2020, 12, 28)},
{BoxingDay, 2021, d(2021, 12, 26), d(2021, 12, 28)},
{BoxingDay, 2022, d(2022, 12, 26), d(2022, 12, 27)},
}
for _, test := range tests {
gotAct, gotObs := test.h.Calc(test.y)
if !gotAct.Equal(test.wantAct) {
t.Errorf("%s %d: got actual: %s, want: %s", test.h.Name, test.y, gotAct.String(), test.wantAct.String())
}
if !gotObs.Equal(test.wantObs) {
t.Errorf("%s %d: got observed: %s, want: %s", test.h.Name, test.y, gotObs.String(), test.wantObs.String())
}
}
}
| {
return time.Date(y, time.Month(m), d, 0, 0, 0, 0, cal.DefaultLoc)
} |
5.0d93b8f7.chunk.js | (window["webpackJsonppurple-react-free"]=window["webpackJsonppurple-react-free"]||[]).push([[5],{158:function(e,a,t){e.exports=t.p+"static/media/circle.1541da91.svg"},328:function(e,a,t){e.exports=t.p+"static/media/img_1.3a82198a.jpg"},329:function(e,a,t){e.exports=t.p+"static/media/img_4.ea8a7a45.jpg"},330:function(e,a,t){e.exports=t.p+"static/media/img_2.1b4a771c.jpg"},331:function(e,a,t){e.exports=t.p+"static/media/img_3.573908af.jpg"},332:function(e,a,t){"use strict";var l=t(1),r=t(3),n=t(5),c=t.n(n),s=t(0),m=t.n(s),i=t(7),d=t(42),o=1e3;function u(e,a,t){var l=(e-a)/(t-a)*100;return Math.round(l*o)/o}function E(e,a){var t,n=e.min,s=e.now,i=e.max,d=e.label,o=e.srOnly,E=e.striped,g=e.animated,b=e.className,p=e.style,h=e.variant,v=e.bsPrefix,N=Object(r.a)(e,["min","now","max","label","srOnly","striped","animated","className","style","variant","bsPrefix"]);return m.a.createElement("div",Object(l.a)({ref:a},N,{role:"progressbar",className:c()(b,v+"-bar",(t={},t["bg-"+h]=h,t[v+"-bar-animated"]=g,t[v+"-bar-striped"]=g||E,t)),style:Object(l.a)({width:u(s,n,i)+"%"},p),"aria-valuenow":s,"aria-valuemin":n,"aria-valuemax":i}),o?m.a.createElement("span",{className:"sr-only"},d):d)}var g=m.a.forwardRef((function(e,a){var t=e.isChild,n=Object(r.a)(e,["isChild"]);if(n.bsPrefix=Object(i.b)(n.bsPrefix,"progress"),t)return E(n,a);var o=n.min,u=n.now,g=n.max,b=n.label,p=n.srOnly,h=n.striped,v=n.animated,N=n.bsPrefix,f=n.variant,C=n.className,y=n.children,w=Object(r.a)(n,["min","now","max","label","srOnly","striped","animated","bsPrefix","variant","className","children"]);return m.a.createElement("div",Object(l.a)({ref:a},w,{className:c()(C,N)}),y?Object(d.b)(y,(function(e){return Object(s.cloneElement)(e,{isChild:!0})})):E({min:o,now:u,max:g,label:b,srOnly:p,striped:h,animated:v,bsPrefix:N,variant:f},a))}));g.displayName="ProgressBar",g.defaultProps={min:0,max:100,animated:!1,isChild:!1,srOnly:!1,striped:!1},a.a=g},339:function(e,a,t){"use strict";t.r(a),t.d(a,"Dashboard",(function(){return v}));var l=t(34),r=t(26),n=t(11),c=t(12),s=t(14),m=t(13),i=t(31),d=t(15),o=t(0),u=t.n(o),E=t(332),g=t(159),b=t(176),p=t.n(b);function h(e,a){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);a&&(l=l.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),t.push.apply(t,l)}return t}var v=function(e){function a(e){var t;return Object(n.a)(this,a),(t=Object(s.a)(this,Object(m.a)(a).call(this,e))).handleChange=function(e){t.setState({startDate:e})},t.state={startDate:new Date,visitSaleData:{},visitSaleOptions:{scales:{yAxes:[{ticks:{beginAtZero:!0,display:!1,min:0,stepSize:20,max:80},gridLines:{drawBorder:!1,color:"rgba(235,237,242,1)",zeroLineColor:"rgba(235,237,242,1)"}}],xAxes:[{gridLines:{display:!1,drawBorder:!1,color:"rgba(0,0,0,1)",zeroLineColor:"rgba(235,237,242,1)"},ticks:{padding:20,fontColor:"#9c9fa6",autoSkip:!0},categoryPercentage:.5,barPercentage:.5}]},legend:{display:!1},elements:{point:{radius:0}}},trafficData:{},trafficOptions:{responsive:!0,animation:{animateScale:!0,animateRotate:!0},legend:!1},todos:[{id:1,task:"Pick up kids from school",isCompleted:!1},{id:2,task:"Prepare for presentation",isCompleted:!0},{id:3,task:"Print Statements",isCompleted:!1},{id:4,task:"Create invoice",isCompleted:!1},{id:5,task:"Call John",isCompleted:!0},{id:6,task:"Meeting with Alisa",isCompleted:!1}],inputValue:""},t.statusChangedHandler=t.statusChangedHandler.bind(Object(i.a)(t)),t.addTodo=t.addTodo.bind(Object(i.a)(t)),t.removeTodo=t.removeTodo.bind(Object(i.a)(t)),t.inputChangeHandler=t.inputChangeHandler.bind(Object(i.a)(t)),t}return Object(d.a)(a,e),Object(c.a)(a,[{key:"statusChangedHandler",value:function(e,a){var t=function(e){for(var a=1;a<arguments.length;a++){var t=null!=arguments[a]?arguments[a]:{};a%2?h(t,!0).forEach((function(a){Object(r.a)(e,a,t[a])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(t)):h(t).forEach((function(a){Object.defineProperty(e,a,Object.getOwnPropertyDescriptor(t,a))}))}return e}({},this.state.todos[a]);t.isCompleted=e.target.checked;var n=Object(l.a)(this.state.todos);n[a]=t,this.setState({todos:n})}},{key:"addTodo",value:function(e){e.preventDefault();var a=Object(l.a)(this.state.todos);a.unshift({id:a.length?a[a.length-1].id+1:1,task:this.state.inputValue,isCompleted:!1}),this.setState({todos:a,inputValue:""})}},{key:"removeTodo",value:function(e){var a=Object(l.a)(this.state.todos);a.splice(e,1),this.setState({todos:a})}},{key:"inputChangeHandler",value:function(e){this.setState({inputValue:e.target.value})}},{key:"componentDidMount",value:function(){var e=document.getElementById("visitSaleChart").getContext("2d"),a=e.createLinearGradient(0,0,0,181);a.addColorStop(0,"rgba(218, 140, 255, 1)"),a.addColorStop(1,"rgba(154, 85, 255, 1)");var t=e.createLinearGradient(0,0,0,360);t.addColorStop(0,"rgba(54, 215, 232, 1)"),t.addColorStop(1,"rgba(177, 148, 250, 1)");var l=e.createLinearGradient(0,0,0,300);l.addColorStop(0,"rgba(255, 191, 150, 1)"),l.addColorStop(1,"rgba(254, 112, 150, 1)");var r=e.createLinearGradient(0,0,0,181);r.addColorStop(0,"rgba(54, 215, 232, 1)"),r.addColorStop(1,"rgba(177, 148, 250, 1)");var n=e.createLinearGradient(0,0,0,50);n.addColorStop(0,"rgba(6, 185, 157, 1)"),n.addColorStop(1,"rgba(132, 217, 210, 1)");var c=e.createLinearGradient(0,0,0,300);c.addColorStop(0,"rgba(254, 124, 150, 1)"),c.addColorStop(1,"rgba(255, 205, 150, 1)");var s={labels:["JAN","FEB","MAR","APR","MAY","JUN","JUL","AUG"],datasets:[{label:"CHN",borderColor:a,backgroundColor:a,hoverBackgroundColor:a,legendColor:a,pointRadius:0,fill:!1,borderWidth:1,data:[20,40,15,35,25,50,30,20]},{label:"USA",borderColor:t,backgroundColor:t,hoverBackgroundColor:t,legendColor:t,pointRadius:0,fill:!1,borderWidth:1,data:[40,30,20,10,50,15,35,40]},{label:"UK",borderColor:l,backgroundColor:l,hoverBackgroundColor:l,legendColor:l,pointRadius:0,fill:!1,borderWidth:1,data:[70,10,30,40,25,50,15,30]}]},m={datasets:[{data:[30,30,40],backgroundColor:[r,n,c],hoverBackgroundColor:[r,n,c],borderColor:[r,n,c],legendColor:[r,n,c]}],labels:["Search Engines","Direct Click","Bookmarks Click"]};this.setState({visitSaleData:s,trafficData:m})}},{key:"toggleProBanner",value:function(){document.querySelector(".proBanner").classList.toggle("hide")}},{key:"render",value:function(){var e=this;return u.a.createElement("div",null,u.a.createElement("div",{className:"proBanner"},u.a.createElement("div",null,u.a.createElement("span",{className:"d-flex align-items-center purchase-popup"},u.a.createElement("p",null,"Get tons of UI components, Plugins, multiple layouts, 20+ sample pages, and more!"),u.a.createElement("a",{href:"https://www.bootstrapdash.com/product/purple-react/?utm_source=organic&utm_medium=banner&utm_campaign=free-preview",rel:"noopener noreferrer",target:"_blank",className:"btn btn-sm purchase-button ml-auto"},"Check Pro Version"),u.a.createElement("i",{className:"mdi mdi-close bannerClose",onClick:this.toggleProBanner})))),u.a.createElement("div",{className:"page-header"},u.a.createElement("h3",{className:"page-title"},u.a.createElement("span",{className:"page-title-icon bg-gradient-primary text-white mr-2"},u.a.createElement("i",{className:"mdi mdi-home"}))," Dashboard "),u.a.createElement("nav",{"aria-label":"breadcrumb"},u.a.createElement("ul",{className:"breadcrumb"},u.a.createElement("li",{className:"breadcrumb-item active","aria-current":"page"},u.a.createElement("span",null),"Overview ",u.a.createElement("i",{className:"mdi mdi-alert-circle-outline icon-sm text-primary align-middle"}))))),u.a.createElement("div",{className:"row"},u.a.createElement("div",{className:"col-md-4 stretch-card grid-margin"},u.a.createElement("div",{className:"card bg-gradient-danger card-img-holder text-white"},u.a.createElement("div",{className:"card-body"},u.a.createElement("img",{src:t(158),className:"card-img-absolute",alt:"circle"}),u.a.createElement("h4",{className:"font-weight-normal mb-3"},"Weekly Sales ",u.a.createElement("i",{className:"mdi mdi-chart-line mdi-24px float-right"})),u.a.createElement("h2",{className:"mb-5"},"$ 15,0000"),u.a.createElement("h6",{className:"card-text"},"Increased by 60%")))),u.a.createElement("div",{className:"col-md-4 stretch-card grid-margin"},u.a.createElement("div",{className:"card bg-gradient-info card-img-holder text-white"},u.a.createElement("div",{className:"card-body"},u.a.createElement("img",{src:t(158),className:"card-img-absolute",alt:"circle"}),u.a.createElement("h4",{className:"font-weight-normal mb-3"},"Weekly Orders ",u.a.createElement("i",{className:"mdi mdi-bookmark-outline mdi-24px float-right"})),u.a.createElement("h2",{className:"mb-5"},"45,6334"),u.a.createElement("h6",{className:"card-text"},"Decreased by 10%")))),u.a.createElement("div",{className:"col-md-4 stretch-card grid-margin"},u.a.createElement("div",{className:"card bg-gradient-success card-img-holder text-white"},u.a.createElement("div",{className:"card-body"},u.a.createElement("img",{src:t(158),className:"card-img-absolute",alt:"circle"}),u.a.createElement("h4",{className:"font-weight-normal mb-3"},"Visitors Online ",u.a.createElement("i",{className:"mdi mdi-diamond mdi-24px float-right"})),u.a.createElement("h2",{className:"mb-5"},"95,5741"),u.a.createElement("h6",{className:"card-text"},"Increased by 5%"))))),u.a.createElement("div",{className:"row"},u.a.createElement("div",{className:"col-md-7 grid-margin stretch-card"},u.a.createElement("div",{className:"card"},u.a.createElement("div",{className:"card-body"},u.a.createElement("div",{className:"clearfix mb-4"},u.a.createElement("h4",{className:"card-title float-left"},"Visit And Sales Statistics"),u.a.createElement("div",{id:"visit-sale-chart-legend",className:"rounded-legend legend-horizontal legend-top-right float-right"},u.a.createElement("ul",null,u.a.createElement("li",null,u.a.createElement("span",{className:"legend-dots bg-primary"}),"CHN"),u.a.createElement("li",null,u.a.createElement("span",{className:"legend-dots bg-danger"}),"USA"),u.a.createElement("li",null,u.a.createElement("span",{className:"legend-dots bg-info"}),"UK")))),u.a.createElement(g.a,{ref:"chart",className:"chartLegendContainer",data:this.state.visitSaleData,options:this.state.visitSaleOptions,id:"visitSaleChart"})))),u.a.createElement("div",{className:"col-md-5 grid-margin stretch-card"},u.a.createElement("div",{className:"card"},u.a.createElement("div",{className:"card-body"},u.a.createElement("h4",{className:"card-title"},"Traffic Sources"),u.a.createElement(g.b,{data:this.state.trafficData,options:this.state.trafficOptions}),u.a.createElement("div",{id:"traffic-chart-legend",className:"rounded-legend legend-vertical legend-bottom-left pt-4"},u.a.createElement("ul",null,u.a.createElement("li",null,u.a.createElement("span",{className:"legend-dots bg-info"}),"Search Engines",u.a.createElement("span",{className:"float-right"},"30%")),u.a.createElement("li",null,u.a.createElement("span",{className:"legend-dots bg-success"}),"Direct Click",u.a.createElement("span",{className:"float-right"},"30%")),u.a.createElement("li",null,u.a.createElement("span",{className:"legend-dots bg-danger"}),"Bookmarks Click",u.a.createElement("span",{className:"float-right"},"40%")))))))),u.a.createElement("div",{className:"row"},u.a.createElement("div",{className:"col-12 grid-margin"},u.a.createElement("div",{className:"card"},u.a.createElement("div",{className:"card-body"},u.a.createElement("h4",{className:"card-title"},"Recent Tickets"),u.a.createElement("div",{className:"table-responsive"},u.a.createElement("table",{className:"table"},u.a.createElement("thead",null,u.a.createElement("tr",null,u.a.createElement("th",null," Assignee "),u.a.createElement("th",null," Subject "),u.a.createElement("th",null," Status "),u.a.createElement("th",null," Last Update "),u.a.createElement("th",null," Tracking ID "))),u.a.createElement("tbody",null,u.a.createElement("tr",null,u.a.createElement("td",null,u.a.createElement("img",{src:t(51),className:"mr-2",alt:"face"})," David Grey "),u.a.createElement("td",null," Fund is not recieved "),u.a.createElement("td",null,u.a.createElement("label",{className:"badge badge-gradient-success"},"DONE")),u.a.createElement("td",null," Dec 5, 2017 "),u.a.createElement("td",null," WD-12345 ")),u.a.createElement("tr",null,u.a.createElement("td",null,u.a.createElement("img",{src:t(67),className:"mr-2",alt:"face"})," Stella Johnson "),u.a.createElement("td",null," High loading time "),u.a.createElement("td",null,u.a.createElement("label",{className:"badge badge-gradient-warning"},"PROGRESS")),u.a.createElement("td",null," Dec 12, 2017 "),u.a.createElement("td",null," WD-12346 ")),u.a.createElement("tr",null,u.a.createElement("td",null,u.a.createElement("img",{src:t(64),className:"mr-2",alt:"face"})," Marina Michel "),u.a.createElement("td",null," Website down for one week "),u.a.createElement("td",null,u.a.createElement("label",{className:"badge badge-gradient-info"},"ON HOLD")),u.a.createElement("td",null," Dec 16, 2017 "),u.a.createElement("td",null," WD-12347 ")),u.a.createElement("tr",null,u.a.createElement("td",null,u.a.createElement("img",{src:t(66),className:"mr-2",alt:"face"})," John Doe "),u.a.createElement("td",null," Loosing control on server "),u.a.createElement("td",null,u.a.createElement("label",{className:"badge badge-gradient-danger"},"REJECTED")),u.a.createElement("td",null," Dec 3, 2017 "),u.a.createElement("td",null," WD-12348 "))))))))),u.a.createElement("div",{className:"row"},u.a.createElement("div",{className:"col-lg-5 grid-margin stretch-card"},u.a.createElement("div",{className:"card"},u.a.createElement("div",{className:"card-body p-0 d-flex"},u.a.createElement("div",{className:"dashboard-custom-date-picker"},u.a.createElement(p.a,{inline:!0,selected:this.state.startDate,onChange:this.handleChange}))))),u.a.createElement("div",{className:"col-lg-7 grid-margin stretch-card"},u.a.createElement("div",{className:"card"},u.a.createElement("div",{className:"card-body"},u.a.createElement("h4",{className:"card-title"},"Recent Updates"),u.a.createElement("div",{className:"d-flex"},u.a.createElement("div",{className:"d-flex align-items-center mr-4 text-muted font-weight-light"},u.a.createElement("i",{className:"mdi mdi-account-outline icon-sm mr-2"}),u.a.createElement("span",null,"jack Menqu")),u.a.createElement("div",{className:"d-flex align-items-center text-muted font-weight-light"},u.a.createElement("i",{className:"mdi mdi-clock icon-sm mr-2"}),u.a.createElement("span",null,"October 3rd, 2018"))),u.a.createElement("div",{className:"row mt-3"},u.a.createElement("div",{className:"col-6 pr-1"},u.a.createElement("img",{src:t(328),className:"mb-2 mw-100 w-100 rounded",alt:"face"}),u.a.createElement("img",{src:t(329),className:"mw-100 w-100 rounded",alt:"face"})),u.a.createElement("div",{className:"col-6 pl-1"},u.a.createElement("img",{src:t(330),className:"mb-2 mw-100 w-100 rounded",alt:"face"}),u.a.createElement("img",{src:t(331),className:"mw-100 w-100 rounded",alt:"face "}))),u.a.createElement("div",{className:"d-flex mt-5 align-items-start"},u.a.createElement("img",{src:t(64),className:"img-sm rounded-circle mr-3",alt:"face"}),u.a.createElement("div",{className:"mb-0 flex-grow"},u.a.createElement("h5",{className:"mr-2 mb-2"},"School Website - Authentication Module."),u.a.createElement("p",{className:"mb-0 font-weight-light"},"It is a long established fact that a reader will be distracted by the readable content of a page.")),u.a.createElement("div",{className:"ml-auto"},u.a.createElement("i",{className:"mdi mdi-heart-outline text-muted"}))))))),u.a.createElement("div",{className:"row"},u.a.createElement("div",{className:"col-xl-7 grid-margin stretch-card"},u.a.createElement("div",{className:"card"},u.a.createElement("div",{className:"card-body"},u.a.createElement("h4",{className:"card-title"},"Project Status"),u.a.createElement("div",{className:"table-responsive"},u.a.createElement("table",{className:"table"},u.a.createElement("thead",null,u.a.createElement("tr",null,u.a.createElement("th",null," # "),u.a.createElement("th",null," Name "),u.a.createElement("th",null," Due Date "),u.a.createElement("th",null," Progress "))),u.a.createElement("tbody",null,u.a.createElement("tr",null,u.a.createElement("td",null," 1 "),u.a.createElement("td",null," Herman Beck "),u.a.createElement("td",null," May 15, 2015 "),u.a.createElement("td",null,u.a.createElement(E.a,{variant:"gradient-success",now:25}))),u.a.createElement("tr",null,u.a.createElement("td",null," 2 "),u.a.createElement("td",null," Messsy Adam "),u.a.createElement("td",null," Jul 01, 2015 "),u.a.createElement("td",null,u.a.createElement(E.a,{variant:"gradient-danger",now:75}))),u.a.createElement("tr",null,u.a.createElement("td",null," 3 "),u.a.createElement("td",null," John Richards "),u.a.createElement("td",null," Apr 12, 2015 "),u.a.createElement("td",null,u.a.createElement(E.a,{variant:"gradient-warning",now:90}))),u.a.createElement("tr",null,u.a.createElement("td",null," 4 "),u.a.createElement("td",null," Peter Meggik "),u.a.createElement("td",null," May 15, 2015 "),u.a.createElement("td",null,u.a.createElement(E.a,{variant:"gradient-primary",now:50}))),u.a.createElement("tr",null,u.a.createElement("td",null," 5 "),u.a.createElement("td",null," Edward "),u.a.createElement("td",null," May 03, 2015 "),u.a.createElement("td",null,u.a.createElement(E.a,{variant:"gradient-danger",now:50}))),u.a.createElement("tr",null,u.a.createElement("td",null," 5 "),u.a.createElement("td",null," Ronald "),u.a.createElement("td",null," Jun 05, 2015 "),u.a.createElement("td",null,u.a.createElement(E.a,{variant:"gradient-info",now:65}))))))))),u.a.createElement("div",{className:"col-xl-5 grid-margin stretch-card"},u.a.createElement("div",{className:"card"},u.a.createElement("div",{className:"card-body"},u.a.createElement("h4",{className:"card-title text-white"},"Todo"),u.a.createElement("form",{className:"add-items d-flex",onSubmit:this.addTodo},u.a.createElement("input",{type:"text",className:"form-control h-auto",placeholder:"What do you need to do today?",value:this.state.inputValue,onChange:this.inputChangeHandler,required:!0}),u.a.createElement("button",{type:"submit",className:"btn btn-gradient-primary font-weight-bold px-lg-4 px-3"},"Add")),u.a.createElement("div",{className:"list-wrapper"},u.a.createElement("ul",{className:"d-flex flex-column todo-list"},this.state.todos.map((function(a,t){return u.a.createElement(N,{isCompleted:a.isCompleted,changed:function(a){return e.statusChangedHandler(a,t)},key:a.id,remove:function(){return e.removeTodo(t)}},a.task)})))))))))}}]),a}(o.Component),N=function(e){return u.a.createElement("li",{className:e.isCompleted?"completed":null},u.a.createElement("div",{className:"form-check"},u.a.createElement("label",{htmlFor:"",className:"form-check-label"},u.a.createElement("input",{className:"checkbox",type:"checkbox",checked:e.isCompleted,onChange:e.changed})," ",e.children," ",u.a.createElement("i",{className:"input-helper"}))),u.a.createElement("i",{className:"remove mdi mdi-close-circle-outline",onClick:e.remove}))};a.default=v}}]);
//# sourceMappingURL=5.0d93b8f7.chunk.js.map | ||
bench_chain.rs | // Copyright (c) The Starcoin Core Contributors
// SPDX-License-Identifier: Apache-2.0
use benchmarks::chain::ChainBencher;
#[allow(deprecated)]
use criterion::{criterion_group, criterion_main, Benchmark, Criterion};
#[allow(deprecated)]
fn block_apply(c: &mut Criterion) {
::logger::init();
for i in &[10u64, 1000] {
c.bench(
"block_apply",
Benchmark::new(format!("block_apply_{:?}", i), move |b| {
let bencher = ChainBencher::new(Some(*i));
bencher.bench(b)
})
.sample_size(10),
);
}
}
#[allow(deprecated)]
fn query_block(c: &mut Criterion) |
criterion_group!(starcoin_chain_benches, block_apply, query_block);
criterion_main!(starcoin_chain_benches);
| {
::logger::init();
for block_num in &[10u64, 1000u64] {
let bencher = ChainBencher::new(Some(*block_num));
bencher.execute();
for i in &[100u64, 1000, 10000] {
let id = format!("query_block_in({:?})_times({:?})", block_num, i,);
let bencher_local = bencher.clone();
c.bench(
"query_block",
Benchmark::new(id, move |b| bencher_local.query_bench(b, *i)).sample_size(10),
);
}
}
} |
lightning_channels.py | import asyncio
from typing import TYPE_CHECKING, Optional, Union
from kivy.lang import Builder
from kivy.factory import Factory
from kivy.uix.popup import Popup
from .fee_dialog import FeeDialog
from electrum.util import bh2u
from electrum.logging import Logger
from electrum.lnutil import LOCAL, REMOTE, format_short_channel_id
from electrum.lnchannel import AbstractChannel, Channel, ChannelState
from electrum.gui.kivy.i18n import _
from .question import Question
from electrum.transaction import PartialTxOutput, Transaction
from electrum.util import NotEnoughFunds, NoDynamicFeeEstimates, format_fee_satoshis, quantize_feerate
from electrum.lnutil import ln_dummy_address
from electrum.gui import messages
from .qr_dialog import QRDialog
from .choice_dialog import ChoiceDialog
if TYPE_CHECKING:
from ...main_window import ElectrumWindow
from electrum import SimpleConfig
Builder.load_string(r'''
<SwapDialog@Popup>
id: popup
title: _('Lightning Swap')
size_hint: 0.8, 0.8
pos_hint: {'top':0.9}
mining_fee_text: ''
fee_rate_text: ''
method: 0
BoxLayout:
orientation: 'vertical'
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Label:
text: _('You Send') + ':'
size_hint: 0.4, 1
Label:
id: send_amount_label
size_hint: 0.6, 1
text: _('0')
background_color: (0,0,0,0)
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Label:
text: _('You Receive') + ':'
size_hint: 0.4, 1
Label:
id: receive_amount_label
text: _('0')
background_color: (0,0,0,0)
size_hint: 0.6, 1
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Label:
text: _('Server Fee') + ':'
size_hint: 0.4, 1
Label:
id: server_fee_label
text: _('0')
background_color: (0,0,0,0)
size_hint: 0.6, 1
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Label:
id: swap_action_label
text: _('Adds receiving capacity')
background_color: (0,0,0,0)
font_size: '14dp'
Slider:
id: swap_slider
range: 0, 4
step: 1
on_value: root.swap_slider_moved(self.value)
Widget:
size_hint: 1, 0.5
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Label:
text: _('Mining Fee') + ':'
size_hint: 0.4, 1
Button:
text: root.mining_fee_text + ' (' + root.fee_rate_text + ')'
background_color: (0,0,0,0)
bold: True
on_release:
root.on_fee_button()
Widget:
size_hint: 1, 0.5
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
TopLabel:
id: fee_estimate
text: ''
font_size: '14dp'
Widget:
size_hint: 1, 0.5
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Button:
text: 'Cancel'
size_hint: 0.5, None
height: '48dp'
on_release: root.dismiss()
Button:
id: ok_button
text: 'OK'
size_hint: 0.5, None
height: '48dp'
on_release:
root.on_ok()
root.dismiss()
<LightningChannelItem@CardItem>
details: {}
active: False
short_channel_id: '<channelId not set>'
status: ''
is_backup: False
balances: ''
node_alias: ''
_chan: None
BoxLayout:
size_hint: 0.7, None
spacing: '8dp'
height: '32dp'
orientation: 'vertical'
Widget
CardLabel:
color: (.5,.5,.5,1) if not root.active else (1,1,1,1)
text: root.short_channel_id
font_size: '15sp'
Widget
CardLabel:
font_size: '13sp'
shorten: True
text: root.node_alias
Widget
BoxLayout:
size_hint: 0.3, None
spacing: '8dp'
height: '32dp'
orientation: 'vertical'
Widget
CardLabel:
text: root.status
font_size: '13sp'
halign: 'right'
Widget
CardLabel:
text: root.balances if not root.is_backup else ''
font_size: '13sp'
halign: 'right'
Widget
<LightningChannelsDialog@Popup>:
name: 'lightning_channels'
title: _('Lightning Network')
has_lightning: False
has_gossip: False
can_send: ''
can_receive: ''
num_channels_text: ''
id: popup
BoxLayout:
id: box
orientation: 'vertical'
spacing: '2dp'
padding: '12dp'
BoxLabel:
text: _('You can send') + ':'
value: root.can_send
BoxLabel:
text: _('You can receive') + ':'
value: root.can_receive
TopLabel:
text: root.num_channels_text
ScrollView:
GridLayout:
cols: 1
id: lightning_channels_container
size_hint: 1, None
height: self.minimum_height
spacing: '2dp'
BoxLayout:
size_hint: 1, None
height: '48dp'
Button:
size_hint: 0.3, None
height: '48dp'
text: _('Open Channel')
disabled: not root.has_lightning
on_release: popup.app.popup_dialog('lightning_open_channel_dialog')
Button:
size_hint: 0.3, None
height: '48dp'
text: _('Swap')
disabled: not root.has_lightning
on_release: popup.app.popup_dialog('swap_dialog')
Button:
size_hint: 0.3, None
height: '48dp'
text: _('Gossip')
disabled: not root.has_gossip
on_release: popup.app.popup_dialog('lightning')
<ChannelDetailsPopup@Popup>:
id: popuproot
data: []
is_closed: False
is_redeemed: False
node_id:''
short_id:''
initiator:''
capacity:''
funding_txid:''
closing_txid:''
state:''
local_ctn:0
remote_ctn:0
local_csv:0
remote_csv:0
feerate:''
can_send:''
can_receive:''
is_open:False
warning: ''
BoxLayout:
padding: '12dp', '12dp', '12dp', '12dp'
spacing: '12dp'
orientation: 'vertical'
ScrollView:
scroll_type: ['bars', 'content']
scroll_wheel_distance: dp(114)
BoxLayout:
orientation: 'vertical'
height: self.minimum_height
size_hint_y: None
spacing: '5dp'
TopLabel:
text: root.warning
color: .905, .709, .509, 1
BoxLabel:
text: _('Channel ID')
value: root.short_id
BoxLabel:
text: _('State')
value: root.state
BoxLabel:
text: _('Initiator')
value: root.initiator
BoxLabel:
text: _('Capacity')
value: root.capacity
BoxLabel:
text: _('Can send')
value: root.can_send if root.is_open else 'n/a'
BoxLabel:
text: _('Can receive')
value: root.can_receive if root.is_open else 'n/a'
BoxLabel:
text: _('CSV delay')
value: 'Local: %d\nRemote: %d' % (root.local_csv, root.remote_csv)
BoxLabel:
text: _('CTN')
value: 'Local: %d\nRemote: %d' % (root.local_ctn, root.remote_ctn)
BoxLabel:
text: _('Fee rate')
value: '{} sat/byte'.format(root.feerate)
Widget:
size_hint: 1, 0.1
TopLabel:
text: _('Remote Node ID')
TxHashLabel:
data: root.node_id
name: _('Remote Node ID')
TopLabel:
text: _('Funding Transaction')
TxHashLabel:
data: root.funding_txid
name: _('Funding Transaction')
touch_callback: lambda: app.show_transaction(root.funding_txid)
TopLabel:
text: _('Closing Transaction')
opacity: int(bool(root.closing_txid))
TxHashLabel:
opacity: int(bool(root.closing_txid))
data: root.closing_txid
name: _('Closing Transaction')
touch_callback: lambda: app.show_transaction(root.closing_txid)
Widget:
size_hint: 1, 0.1
Widget:
size_hint: 1, 0.05
BoxLayout:
size_hint: 1, None
height: '48dp'
Button:
size_hint: 0.5, None
height: '48dp'
text: _('Backup')
on_release: root.export_backup()
Button:
size_hint: 0.5, None
height: '48dp'
text: _('Close')
on_release: root.close()
disabled: root.is_closed
Button:
size_hint: 0.5, None
height: '48dp'
text: _('Force-close')
on_release: root.force_close()
disabled: root.is_closed
Button:
size_hint: 0.5, None
height: '48dp'
text: _('Delete')
on_release: root.remove_channel()
disabled: not root.is_redeemed
<ChannelBackupPopup@Popup>:
id: popuproot
data: []
is_funded: False
is_imported: False
node_id:''
short_id:''
initiator:'' | closing_txid:''
state:''
is_open:False
BoxLayout:
padding: '12dp', '12dp', '12dp', '12dp'
spacing: '12dp'
orientation: 'vertical'
ScrollView:
scroll_type: ['bars', 'content']
scroll_wheel_distance: dp(114)
BoxLayout:
orientation: 'vertical'
height: self.minimum_height
size_hint_y: None
spacing: '5dp'
BoxLabel:
text: _('Channel ID')
value: root.short_id
BoxLabel:
text: _('State')
value: root.state
BoxLabel:
text: _('Initiator')
value: root.initiator
BoxLabel:
text: _('Capacity')
value: root.capacity
Widget:
size_hint: 1, 0.1
TopLabel:
text: _('Remote Node ID')
TxHashLabel:
data: root.node_id
name: _('Remote Node ID')
TopLabel:
text: _('Funding Transaction')
TxHashLabel:
data: root.funding_txid
name: _('Funding Transaction')
touch_callback: lambda: app.show_transaction(root.funding_txid)
TopLabel:
text: _('Closing Transaction')
opacity: int(bool(root.closing_txid))
TxHashLabel:
opacity: int(bool(root.closing_txid))
data: root.closing_txid
name: _('Closing Transaction')
touch_callback: lambda: app.show_transaction(root.closing_txid)
Widget:
size_hint: 1, 0.1
Widget:
size_hint: 1, 0.05
BoxLayout:
size_hint: 1, None
height: '48dp'
Button:
size_hint: 0.5, None
height: '48dp'
text: _('Request force-close')
on_release: root.request_force_close()
disabled: not root.is_funded
Button:
size_hint: 0.5, None
height: '48dp'
text: _('Delete')
on_release: root.remove_backup()
disabled: not root.is_imported
''')
class ChannelBackupPopup(Popup, Logger):
def __init__(self, chan: AbstractChannel, app, **kwargs):
Popup.__init__(self, **kwargs)
Logger.__init__(self)
self.chan = chan
self.is_funded = chan.get_state() == ChannelState.FUNDED
self.is_imported = chan.is_imported
self.funding_txid = chan.funding_outpoint.txid
self.app = app
self.short_id = format_short_channel_id(chan.short_channel_id)
self.capacity = self.app.format_amount_and_units(chan.get_capacity())
self.state = chan.get_state_for_GUI()
self.title = _('Channel Backup')
def request_force_close(self):
msg = _('Request force close?')
Question(msg, self._request_force_close).open()
def _request_force_close(self, b):
if not b:
return
loop = self.app.wallet.network.asyncio_loop
coro = asyncio.run_coroutine_threadsafe(self.app.wallet.lnworker.request_force_close_from_backup(self.chan.channel_id), loop)
try:
coro.result(5)
self.app.show_info(_('Request sent'))
except Exception as e:
self.logger.exception("Could not close channel")
self.app.show_info(_('Could not close channel: ') + repr(e)) # repr because str(Exception()) == ''
def remove_backup(self):
msg = _('Delete backup?')
Question(msg, self._remove_backup).open()
def _remove_backup(self, b):
if not b:
return
self.app.wallet.lnworker.remove_channel_backup(self.chan.channel_id)
self.dismiss()
class ChannelDetailsPopup(Popup, Logger):
def __init__(self, chan: Channel, app: 'ElectrumWindow', **kwargs):
Popup.__init__(self, **kwargs)
Logger.__init__(self)
self.is_closed = chan.is_closed()
self.is_redeemed = chan.is_redeemed()
self.app = app
self.chan = chan
self.title = _('Channel details')
self.node_id = bh2u(chan.node_id)
self.channel_id = bh2u(chan.channel_id)
self.funding_txid = chan.funding_outpoint.txid
self.short_id = format_short_channel_id(chan.short_channel_id)
self.capacity = self.app.format_amount_and_units(chan.get_capacity())
self.state = chan.get_state_for_GUI()
self.local_ctn = chan.get_latest_ctn(LOCAL)
self.remote_ctn = chan.get_latest_ctn(REMOTE)
self.local_csv = chan.config[LOCAL].to_self_delay
self.remote_csv = chan.config[REMOTE].to_self_delay
self.initiator = 'Local' if chan.constraints.is_initiator else 'Remote'
feerate_kw = chan.get_latest_feerate(LOCAL)
self.feerate = str(quantize_feerate(Transaction.satperbyte_from_satperkw(feerate_kw)))
self.can_send = self.app.format_amount_and_units(chan.available_to_spend(LOCAL) // 1000)
self.can_receive = self.app.format_amount_and_units(chan.available_to_spend(REMOTE) // 1000)
self.is_open = chan.is_open()
closed = chan.get_closing_height()
if closed:
self.closing_txid, closing_height, closing_timestamp = closed
msg = ' '.join([
_("Trampoline routing is enabled, but this channel is with a non-trampoline node."),
_("This channel may still be used for receiving, but it is frozen for sending."),
_("If you want to keep using this channel, you need to disable trampoline routing in your preferences."),
])
self.warning = '' if self.app.wallet.lnworker.channel_db or self.app.wallet.lnworker.is_trampoline_peer(chan.node_id) else _('Warning') + ': ' + msg
def close(self):
dialog = ChoiceDialog(
title=_('Close channel'),
choices={0:_('Cooperative close'), 1:_('Request force-close')}, key=0,
callback=self._close,
description=_(messages.MSG_REQUEST_FORCE_CLOSE),
keep_choice_order=True)
dialog.open()
def _close(self, choice):
loop = self.app.wallet.network.asyncio_loop
if choice == 1:
coro = self.app.wallet.lnworker.request_force_close_from_backup(self.chan.channel_id)
msg = _('Request sent')
else:
coro = self.app.wallet.lnworker.close_channel(self.chan.channel_id)
msg = _('Channel closed')
f = asyncio.run_coroutine_threadsafe(coro, loop)
try:
f.result(5)
self.app.show_info(msg)
except Exception as e:
self.logger.exception("Could not close channel")
self.app.show_info(_('Could not close channel: ') + repr(e)) # repr because str(Exception()) == ''
def remove_channel(self):
msg = _('Are you sure you want to delete this channel? This will purge associated transactions from your wallet history.')
Question(msg, self._remove_channel).open()
def _remove_channel(self, b):
if not b:
return
self.app.wallet.lnworker.remove_channel(self.chan.channel_id)
self.app._trigger_update_history()
self.dismiss()
def export_backup(self):
text = self.app.wallet.lnworker.export_channel_backup(self.chan.channel_id)
# TODO: some messages are duplicated between Kivy and Qt.
help_text = ' '.join([
_("Channel backups can be imported in another instance of the same wallet, by scanning this QR code."),
_("Please note that channel backups cannot be used to restore your channels."),
_("If you lose your wallet file, the only thing you can do with a backup is to request your channel to be closed, so that your funds will be sent on-chain."),
])
self.app.qr_dialog(_("Channel Backup " + self.chan.short_id_for_GUI()), text, help_text=help_text)
def force_close(self):
if self.chan.is_closed():
self.app.show_error(_('Channel already closed'))
return
to_self_delay = self.chan.config[REMOTE].to_self_delay
help_text = ' '.join([
_('If you force-close this channel, the funds you have in it will not be available for {} blocks.').format(to_self_delay),
_('During that time, funds will not be recoverable from your seed, and may be lost if you lose your device.'),
_('To prevent that, please save this channel backup.'),
_('It may be imported in another wallet with the same seed.')
])
title = _('Save backup and force-close')
data = self.app.wallet.lnworker.export_channel_backup(self.chan.channel_id)
popup = QRDialog(
title, data,
show_text=False,
text_for_clipboard=data,
help_text=help_text,
close_button_text=_('Next'),
on_close=self._confirm_force_close)
popup.open()
def _confirm_force_close(self):
Question(
_('Confirm force close?'),
self._do_force_close,
title=_('Force-close channel'),
no_str=_('Cancel'),
yes_str=_('Proceed')).open()
def _do_force_close(self, b):
if not b:
return
loop = self.app.wallet.network.asyncio_loop
coro = asyncio.run_coroutine_threadsafe(self.app.wallet.lnworker.force_close_channel(self.chan.channel_id), loop)
try:
coro.result(1)
self.app.show_info(_('Channel closed, you may need to wait at least {} blocks, because of CSV delays'.format(self.chan.config[REMOTE].to_self_delay)))
except Exception as e:
self.logger.exception("Could not force close channel")
self.app.show_info(_('Could not force close channel: ') + repr(e)) # repr because str(Exception()) == ''
class LightningChannelsDialog(Factory.Popup):
def __init__(self, app: 'ElectrumWindow'):
super(LightningChannelsDialog, self).__init__()
self.clocks = []
self.app = app
self.has_lightning = app.wallet.has_lightning()
self.has_gossip = self.app.network.channel_db is not None
self.update()
def show_item(self, obj):
chan = obj._chan
if chan.is_backup():
p = ChannelBackupPopup(chan, self.app)
else:
p = ChannelDetailsPopup(chan, self.app)
p.open()
def format_fields(self, chan):
labels = {}
for subject in (REMOTE, LOCAL):
bal_minus_htlcs = chan.balance_minus_outgoing_htlcs(subject)//1000
label = self.app.format_amount(bal_minus_htlcs)
other = subject.inverted()
bal_other = chan.balance(other)//1000
bal_minus_htlcs_other = chan.balance_minus_outgoing_htlcs(other)//1000
if bal_other != bal_minus_htlcs_other:
label += ' (+' + self.app.format_amount(bal_other - bal_minus_htlcs_other) + ')'
labels[subject] = label
closed = chan.is_closed()
return [
'n/a' if closed else labels[LOCAL],
'n/a' if closed else labels[REMOTE],
]
def update_item(self, item):
chan = item._chan
item.status = chan.get_state_for_GUI()
item.short_channel_id = chan.short_id_for_GUI()
l, r = self.format_fields(chan)
item.balances = l + '/' + r
self.update_can_send()
def update(self):
channel_cards = self.ids.lightning_channels_container
channel_cards.clear_widgets()
if not self.app.wallet:
return
lnworker = self.app.wallet.lnworker
channels = list(lnworker.channels.values()) if lnworker else []
backups = list(lnworker.channel_backups.values()) if lnworker else []
for i in channels + backups:
item = Factory.LightningChannelItem()
item.screen = self
item.active = not i.is_closed()
item.is_backup = i.is_backup()
item._chan = i
item.node_alias = lnworker.get_node_alias(i.node_id) or i.node_id.hex()
self.update_item(item)
channel_cards.add_widget(item)
self.update_can_send()
def update_can_send(self):
lnworker = self.app.wallet.lnworker
if not lnworker:
self.can_send = 'n/a'
self.can_receive = 'n/a'
return
self.num_channels_text = _(f'You have {len(lnworker.channels)} channels.')
self.can_send = self.app.format_amount_and_units(lnworker.num_sats_can_send())
self.can_receive = self.app.format_amount_and_units(lnworker.num_sats_can_receive())
# Swaps should be done in due time which is why we recommend a certain fee.
RECOMMEND_BLOCKS_SWAP = 25
class SwapDialog(Factory.Popup):
def __init__(self, app: 'ElectrumWindow', config: 'SimpleConfig'):
super(SwapDialog, self).__init__()
self.app = app
self.config = config
self.fmt_amt = self.app.format_amount_and_units
self.lnworker = self.app.wallet.lnworker
# swap related
self.swap_manager = self.lnworker.swap_manager
self.send_amount: Optional[int] = None
self.receive_amount: Optional[int] = None
self.tx = None # only for forward swap
self.is_reverse = None
# init swaps and sliders
asyncio.run(self.swap_manager.get_pairs())
self.update_and_init()
def update_and_init(self):
self.update_fee_text()
self.update_swap_slider()
self.swap_slider_moved(0)
def on_fee_button(self):
fee_dialog = FeeDialog(self, self.config, self.after_fee_changed)
fee_dialog.open()
def after_fee_changed(self):
self.update_fee_text()
self.update_swap_slider()
self.swap_slider_moved(self.ids.swap_slider.value)
def update_fee_text(self):
fee_per_kb = self.config.fee_per_kb()
# eta is -1 when block inclusion cannot be estimated for low fees
eta = self.config.fee_to_eta(fee_per_kb)
fee_per_b = format_fee_satoshis(fee_per_kb / 1000)
suggest_fee = self.config.eta_target_to_fee(RECOMMEND_BLOCKS_SWAP)
suggest_fee_per_b = format_fee_satoshis(suggest_fee / 1000)
s = 's' if eta > 1 else ''
if eta > RECOMMEND_BLOCKS_SWAP or eta == -1:
msg = f'Warning: Your fee rate of {fee_per_b} sat/B may be too ' \
f'low for the swap to succeed before its timeout. ' \
f'The recommended fee rate is at least {suggest_fee_per_b} ' \
f'sat/B.'
else:
msg = f'Info: Your swap is estimated to be processed in {eta} ' \
f'block{s} with an onchain fee rate of {fee_per_b} sat/B.'
self.fee_rate_text = f'{fee_per_b} sat/B'
self.ids.fee_estimate.text = msg
def update_tx(self, onchain_amount: Union[int, str]):
"""Updates the transaction associated with a forward swap."""
if onchain_amount is None:
self.tx = None
self.ids.ok_button.disabled = True
return
outputs = [PartialTxOutput.from_address_and_value(ln_dummy_address(), onchain_amount)]
coins = self.app.wallet.get_spendable_coins(None)
try:
self.tx = self.app.wallet.make_unsigned_transaction(
coins=coins,
outputs=outputs)
except (NotEnoughFunds, NoDynamicFeeEstimates):
self.tx = None
self.ids.ok_button.disabled = True
def update_swap_slider(self):
"""Sets the minimal and maximal amount that can be swapped for the swap
slider."""
# tx is updated again afterwards with send_amount in case of normal swap
# this is just to estimate the maximal spendable onchain amount for HTLC
self.update_tx('!')
try:
max_onchain_spend = self.tx.output_value_for_address(ln_dummy_address())
except AttributeError: # happens if there are no utxos
max_onchain_spend = 0
reverse = int(min(self.lnworker.num_sats_can_send(),
self.swap_manager.get_max_amount()))
forward = int(min(self.lnworker.num_sats_can_receive(),
# maximally supported swap amount by provider
self.swap_manager.get_max_amount(),
max_onchain_spend))
# we expect range to adjust the value of the swap slider to be in the
# correct range, i.e., to correct an overflow when reducing the limits
self.ids.swap_slider.range = (-reverse, forward)
def swap_slider_moved(self, position: float):
position = int(position)
# pay_amount and receive_amounts are always with fees already included
# so they reflect the net balance change after the swap
if position < 0: # reverse swap
self.ids.swap_action_label.text = "Adds Lightning receiving capacity."
self.is_reverse = True
pay_amount = abs(position)
self.send_amount = pay_amount
self.ids.send_amount_label.text = \
f"{self.fmt_amt(pay_amount)} (offchain)" if pay_amount else ""
receive_amount = self.swap_manager.get_recv_amount(
send_amount=pay_amount, is_reverse=True)
self.receive_amount = receive_amount
self.ids.receive_amount_label.text = \
f"{self.fmt_amt(receive_amount)} (onchain)" if receive_amount else ""
# fee breakdown
self.ids.server_fee_label.text = \
f"{self.swap_manager.percentage:0.1f}% + {self.fmt_amt(self.swap_manager.lockup_fee)}"
self.mining_fee_text = \
f"{self.fmt_amt(self.swap_manager.get_claim_fee())}"
else: # forward (normal) swap
self.ids.swap_action_label.text = f"Adds Lightning sending capacity."
self.is_reverse = False
self.send_amount = position
self.update_tx(self.send_amount)
# add lockup fees, but the swap amount is position
pay_amount = position + self.tx.get_fee() if self.tx else 0
self.ids.send_amount_label.text = \
f"{self.fmt_amt(pay_amount)} (onchain)" if self.fmt_amt(pay_amount) else ""
receive_amount = self.swap_manager.get_recv_amount(
send_amount=position, is_reverse=False)
self.receive_amount = receive_amount
self.ids.receive_amount_label.text = \
f"{self.fmt_amt(receive_amount)} (offchain)" if receive_amount else ""
# fee breakdown
self.ids.server_fee_label.text = \
f"{self.swap_manager.percentage:0.1f}% + {self.fmt_amt(self.swap_manager.normal_fee)}"
self.mining_fee_text = \
f"{self.fmt_amt(self.tx.get_fee())}" if self.tx else ""
if pay_amount and receive_amount:
self.ids.ok_button.disabled = False
else:
# add more nuanced error reporting?
self.ids.swap_action_label.text = "Swap below minimal swap size, change the slider."
self.ids.ok_button.disabled = True
def do_normal_swap(self, lightning_amount, onchain_amount, password):
tx = self.tx
assert tx
if lightning_amount is None or onchain_amount is None:
return
loop = self.app.network.asyncio_loop
coro = self.swap_manager.normal_swap(
lightning_amount_sat=lightning_amount,
expected_onchain_amount_sat=onchain_amount,
password=password,
tx=tx,
)
asyncio.run_coroutine_threadsafe(coro, loop)
def do_reverse_swap(self, lightning_amount, onchain_amount, password):
if lightning_amount is None or onchain_amount is None:
return
loop = self.app.network.asyncio_loop
coro = self.swap_manager.reverse_swap(
lightning_amount_sat=lightning_amount,
expected_onchain_amount_sat=onchain_amount + self.swap_manager.get_claim_fee(),
)
asyncio.run_coroutine_threadsafe(coro, loop)
def on_ok(self):
if not self.app.network:
self.window.show_error(_("You are offline."))
return
if self.is_reverse:
lightning_amount = self.send_amount
onchain_amount = self.receive_amount
self.app.protected(
'Do you want to do a reverse submarine swap?',
self.do_reverse_swap, (lightning_amount, onchain_amount))
else:
lightning_amount = self.receive_amount
onchain_amount = self.send_amount
self.app.protected(
'Do you want to do a submarine swap? '
'You will need to wait for the swap transaction to confirm.',
self.do_normal_swap, (lightning_amount, onchain_amount)) | capacity:''
funding_txid:'' |
movie.js | if ($('#page-movie').length > 0) {
$("button#edit-btn").click(function() {
alert($(this).data("id"));
});
$("button#delete-btn").click(function() {
alert($(this).data("id")); | } | }); |
pipeline-builder.ts | /*
* Copyright 2020 ZUP IT SERVICOS EM TECNOLOGIA E INOVACAO SA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { SpinnakerPipeline } from './interfaces'
import { ExpectedArtifact, Stage } from './interfaces/spinnaker-pipeline.interface'
import {
getBakeStage,
getDeleteUnusedStage,
getDeploymentsEvaluationStage,
getDeploymentStage,
getFailureWebhookStage,
getHelmTemplateObject,
getHelmValueObject,
getRollbackDeploymentsStage,
getSuccessWebhookStage
} from './templates/deployment'
import { getDestinationRulesStage } from './templates/deployment/destination-rules-stage'
import { getVirtualServiceStage } from './templates/deployment/virtual-service-stage'
import { getProxyEvaluationStage } from './templates/deployment/proxy-evaluation'
import { Component, Deployment } from '../../../api/deployments/interfaces'
import {
getUndeploymentDestinationRulesStage, getUndeploymentEmptyVirtualServiceStage, getUndeploymentFailureWebhookStage,
getUndeploymentProxyEvaluationStage, getUndeploymentsDeleteUnusedStage, getUndeploymentsSuccessWebhookStage,
getUndeploymentVirtualServiceStage
} from './templates/undeployment'
import { DeploymentTemplateUtils } from './utils/deployment-template.utils'
import { UndeploymentTemplateUtils } from './utils/undeployment-template.utils'
import { ConnectorConfiguration } from '../interfaces/connector-configuration.interface'
import { componentsToBeRemoved, DeploymentUtils } from '../utils/deployment.utils'
import { DeploymentComponent } from '../../../api/deployments/interfaces/deployment.interface'
export class | {
private currentStageId = 1
public buildSpinnakerDeploymentPipeline(deployment: Deployment, activeComponents: Component[], configuration: ConnectorConfiguration): SpinnakerPipeline {
return {
application: `app-${deployment.cdConfiguration.id}`,
name: `${deployment.id}`,
expectedArtifacts: this.getExpectedArtifacts(deployment),
stages: this.getSpinnakerDeploymentStages(deployment, activeComponents, configuration)
}
}
public buildSpinnakerUndeploymentPipeline(
deployment: Deployment,
activeComponents: Component[],
configuration: ConnectorConfiguration
): SpinnakerPipeline {
return {
application: `app-${deployment.cdConfiguration.id}`,
name: `${deployment.id}`,
expectedArtifacts: [],
stages: this.getSpinnakerUndeploymentStages(deployment, activeComponents, configuration)
}
}
private getExpectedArtifacts(deployment: Deployment): ExpectedArtifact[] {
const expectedArtifacts: ExpectedArtifact[] = []
deployment.components?.forEach(component => {
expectedArtifacts.push(getHelmTemplateObject(component, deployment.cdConfiguration))
expectedArtifacts.push(getHelmValueObject(component, deployment.cdConfiguration))
})
return expectedArtifacts
}
private getSpinnakerDeploymentStages(deployment: Deployment, activeComponents: Component[], configuration: ConnectorConfiguration): Stage[] {
this.currentStageId = 1
return [
...this.getDeploymentStages(deployment),
...this.getProxyDeploymentStages(deployment, activeComponents),
...this.getDeploymentsEvaluationStage(deployment.components),
...this.getProxyDeploymentsEvaluationStage(deployment.components),
...this.getRollbackDeploymentsStage(deployment, activeComponents),
...this.getUnusedVersions(deployment, activeComponents),
...this.getFailureWebhookStage(deployment, configuration),
...this.getSuccessWebhookStage(deployment, configuration)
]
}
private getSpinnakerUndeploymentStages(deployment: Deployment, activeComponents: Component[], configuration: ConnectorConfiguration): Stage[] {
this.currentStageId = 1
return [
...this.getProxyUndeploymentStages(deployment, activeComponents),
...this.getProxyUndeploymentsEvaluationStage(deployment.components),
...this.getUndeploymentDeleteUnusedDeploymentsStage(deployment),
...this.getUndeploymentFailureWebhookStage(deployment, configuration),
...this.getUndeploymentSuccessWebhookStage(deployment, configuration)
]
}
private getDeploymentStages(deployment: Deployment): Stage[] {
const deploymentStages: Stage[] = []
deployment.components?.forEach(component => {
deploymentStages.push(getBakeStage(component, deployment.cdConfiguration, this.currentStageId++, deployment.circleId))
deploymentStages.push(getDeploymentStage(component, deployment.cdConfiguration, this.currentStageId++))
})
return deploymentStages
}
private getProxyDeploymentStages(deployment: Deployment, activeComponents: Component[]): Stage[] {
if (!deployment?.components) {
return []
}
const proxyStages: Stage[] = []
const evalStageId: number = DeploymentTemplateUtils.getDeploymentEvalStageId(deployment.components)
deployment.components.forEach(component => {
const activeByName: Component[] = DeploymentUtils.getActiveComponentsByName(activeComponents, component.name)
proxyStages.push(getDestinationRulesStage(component, deployment, activeByName, this.currentStageId++, evalStageId))
proxyStages.push(getVirtualServiceStage(component, deployment, activeByName, this.currentStageId++))
})
return proxyStages
}
private getProxyUndeploymentStages(deployment: Deployment, activeComponents: Component[]): Stage[] {
if (!deployment?.components) {
return []
}
const proxyStages: Stage[] = []
deployment.components.forEach(component => {
const activeByName: Component[] = DeploymentUtils.getActiveComponentsByName(activeComponents, component.name)
proxyStages.push(getUndeploymentDestinationRulesStage(component, deployment, activeByName, this.currentStageId++))
proxyStages.push(activeByName.length > 1 ?
getUndeploymentVirtualServiceStage(component, deployment, activeByName, this.currentStageId++) :
getUndeploymentEmptyVirtualServiceStage(component, deployment, this.currentStageId++)
)
})
return proxyStages
}
private getDeploymentsEvaluationStage(components: DeploymentComponent[] | undefined): Stage[] {
return components && components.length ?
[getDeploymentsEvaluationStage(components, this.currentStageId++)] :
[]
}
private getRollbackDeploymentsStage(deployment: Deployment, activeComponents: Component[]): Stage[] {
if (!deployment?.components) {
return []
}
const stages: Stage[] = []
const evalStageId: number = DeploymentTemplateUtils.getDeploymentEvalStageId(deployment.components)
deployment.components?.forEach(component => {
const activeSameCircleSameTag = DeploymentUtils.getActiveSameCircleTagComponent(activeComponents, component, deployment.circleId)
if (!activeSameCircleSameTag) {
stages.push(getRollbackDeploymentsStage(component, deployment.cdConfiguration, this.currentStageId++, evalStageId, deployment.circleId))
}
})
return stages
}
private getProxyDeploymentsEvaluationStage(components: DeploymentComponent[] | undefined): Stage[] {
return components && components.length ?
[getProxyEvaluationStage(components, this.currentStageId++)] :
[]
}
private getProxyUndeploymentsEvaluationStage(components: DeploymentComponent[] | undefined): Stage[] {
return components && components.length ?
[getUndeploymentProxyEvaluationStage(components, this.currentStageId++)] :
[]
}
private getUnusedVersions(deployment: Deployment, activeComponents: Component[]): Stage[] {
return deployment.defaultCircle ?
this.defaultUnusedVersions(deployment, activeComponents) :
this.circleUnusedVersions(deployment, activeComponents)
}
private defaultUnusedVersions(deployment: Deployment, activeComponents: Component[]): Stage[] {
if (!deployment?.components) {
return []
}
const stages: Stage[] = []
const evalStageId: number = DeploymentTemplateUtils.getProxyEvalStageId(deployment.components)
deployment.components.forEach(component => {
const unusedComponent: Component | undefined = DeploymentUtils.getUnusedComponent(activeComponents, component, deployment.circleId)
if (unusedComponent) {
stages.push(getDeleteUnusedStage(unusedComponent, deployment.cdConfiguration, this.currentStageId++, evalStageId, deployment.circleId))
}
})
return stages
}
private circleUnusedVersions(deployment: Deployment, activeComponents: Component[]): Stage[] {
if (!deployment?.components) {
return []
}
const evalStageId: number = DeploymentTemplateUtils.getProxyEvalStageId(deployment.components)
return componentsToBeRemoved(deployment, activeComponents).map(component => {
return getDeleteUnusedStage(component, deployment.cdConfiguration, this.currentStageId++, evalStageId, deployment.circleId)
})
}
private getUndeploymentDeleteUnusedDeploymentsStage(deployment: Deployment): Stage[] {
if (!deployment?.components) {
return []
}
const stages: Stage[] = []
const evalStageId: number = UndeploymentTemplateUtils.getProxyEvalStageId(deployment.components)
deployment.components.forEach(component => {
stages.push(getUndeploymentsDeleteUnusedStage(component, deployment.cdConfiguration, this.currentStageId++, evalStageId, deployment.circleId))
})
return stages
}
private getFailureWebhookStage(deployment: Deployment, configuration: ConnectorConfiguration): Stage[] {
return [getFailureWebhookStage(deployment, this.currentStageId++, configuration)]
}
private getSuccessWebhookStage(deployment: Deployment, configuration: ConnectorConfiguration): Stage[] {
return [getSuccessWebhookStage(deployment, this.currentStageId++, configuration)]
}
private getUndeploymentFailureWebhookStage(deployment: Deployment, configuration: ConnectorConfiguration): Stage[] {
return [getUndeploymentFailureWebhookStage(deployment, this.currentStageId++, configuration)]
}
private getUndeploymentSuccessWebhookStage(deployment: Deployment, configuration: ConnectorConfiguration): Stage[] {
return [getUndeploymentsSuccessWebhookStage(deployment, this.currentStageId++, configuration)]
}
}
| SpinnakerPipelineBuilder |
gomod.go | package gomod
import (
"strings"
"github.com/sirkon/goproxy/internal/errors"
"github.com/sirkon/goproxy/internal/modfile"
"github.com/sirkon/goproxy/internal/modload"
"github.com/sirkon/goproxy/internal/module"
)
// Replacement is type safe hack to deal with the lack of algebraic/variative typing in Go
type Replacement interface {
notYourConcern()
}
var _ Replacement = Dependency{}
// Dependency describes module and its version
type Dependency struct {
Path string
Version string
}
func (d Dependency) notYourConcern() {}
var _ Replacement = RelativePath("") |
// RelativePath describes relative path replacement
type RelativePath string
func (p RelativePath) notYourConcern() {}
// Module go.mod description
type Module struct {
Name string
GoVersion string
Require map[string]string
Exclude map[string]string
Replace map[string]Replacement
}
// Parse parse input Go file
func Parse(fileName string, input []byte) (*Module, error) {
gomod, err := modfile.Parse(fileName, input, fixVersion)
if err != nil {
return nil, err
}
var goVersion string
if gomod.Go != nil {
goVersion = gomod.Go.Version
}
res := &Module{
Name: gomod.Module.Mod.Path,
GoVersion: goVersion,
Require: map[string]string{},
Exclude: map[string]string{},
Replace: map[string]Replacement{},
}
for _, req := range gomod.Require {
res.Require[req.Mod.Path] = req.Mod.Version
}
for _, exc := range gomod.Exclude {
res.Exclude[exc.Mod.Path] = exc.Mod.Version
}
for _, rep := range gomod.Replace {
if len(rep.New.Version) == 0 {
// it is path replacement
res.Replace[rep.Old.Path] = RelativePath(rep.New.Path)
} else {
res.Replace[rep.Old.Path] = Dependency{
Path: rep.New.Path,
Version: rep.New.Version,
}
}
}
return res, nil
}
func fixVersion(path, vers string) (string, error) {
// Special case: remove the old -gopkgin- hack.
if strings.HasPrefix(path, "gopkg.in/") && strings.Contains(vers, "-gopkgin-") {
vers = vers[strings.Index(vers, "-gopkgin-")+len("-gopkgin-"):]
}
// fixVersion is called speculatively on every
// module, version pair from every go.mod file.
// Avoid the query if it looks OK.
_, pathMajor, ok := module.SplitPathVersion(path)
if !ok {
return "", errors.Newf("malformed module path: %s", path)
}
if vers != "" && module.CanonicalVersion(vers) == vers && module.MatchPathMajor(vers, pathMajor) {
return vers, nil
}
info, err := modload.Query(path, vers, nil)
if err != nil {
return "", err
}
return info.Version, nil
} | |
merge-section.ts | import { SectionedConfig, BitcoinConfig } from './types';
import { BITCOIN_CONFIG_OPTIONS } from './bitcoin-config-options';
import { getChainName } from './get-chain-name';
import { mergeBitcoinConfigs } from './merge-bitcoin-configs';
export function mergeSection(sectionedConfig: SectionedConfig): BitcoinConfig {
const chainName = getChainName(sectionedConfig);
const { sections, ...rest } = sectionedConfig;
if (chainName !== 'main') {
for (const [optionName, option] of Object.entries(BITCOIN_CONFIG_OPTIONS)) {
if (option.onlyAppliesToMain) {
delete rest[optionName as keyof typeof BITCOIN_CONFIG_OPTIONS];
}
}
}
if (!sections) {
return rest;
}
const sectionConfig = sections[chainName];
if (!sectionConfig) {
return rest; | return mergeBitcoinConfigs(rest, sectionConfig);
} | } |
queryable.go | package marlow
import "io"
import "fmt"
import "strings"
import "net/url"
import "github.com/gedex/inflector"
import "github.com/090809/marlow/marlow/writing"
import "github.com/090809/marlow/marlow/constants"
type finderSymbols struct {
blueprint string
results string
rowItem string
queryString string
statementResult string
statementError string
queryResult string
queryError string
recordSlice string
limit string
offset string
}
// finter builds a generator that is responsible for creating the FindRecord methods for a given record store.
func finder(record marlowRecord) io.Reader {
pr, pw := io.Pipe()
// Build the method name.
methodName := fmt.Sprintf("%s%s",
record.config.Get(constants.StoreFindMethodPrefixConfigOption),
inflector.Pluralize(record.name()),
)
if len(record.fields) == 0 {
pw.CloseWithError(nil)
return pr
}
blueprintName := record.config.Get(constants.BlueprintNameConfigOption)
symbols := finderSymbols{
blueprint: "_blueprint",
results: "_results",
rowItem: "_row",
statementResult: "_statement",
statementError: "_se",
queryString: "_queryString",
queryResult: "_queryResult",
queryError: "_qe",
limit: "_limit",
offset: "_offset",
recordSlice: fmt.Sprintf("[]*%s", record.name()),
}
go func() {
gosrc := writing.NewGoWriter(pw)
gosrc.Comment("[marlow feature]: finder on table[%s]", record.table())
params := []writing.FuncParam{
{Symbol: symbols.blueprint, Type: fmt.Sprintf("*%s", blueprintName)},
}
returns := []string{symbols.recordSlice, "error"}
fieldList := record.fieldList(nil)
defaultLimit := record.config.Get(constants.DefaultLimitConfigOption)
if defaultLimit == "" {
pw.CloseWithError(fmt.Errorf("invalid defaultLimit for record %s", record.name()))
return
}
e := gosrc.WithMethod(methodName, record.store(), params, returns, func(scope url.Values) error {
logwriter := logWriter{output: gosrc, receiver: scope.Get("receiver")}
// Prepare the array that will be returned.
gosrc.Println("%s := make(%s, 0)\n", symbols.results, symbols.recordSlice)
defer gosrc.Returns(symbols.results, writing.Nil)
columns := make([]string, len(fieldList))
for i, n := range fieldList {
columns[i] = n.column
}
// Prepare the sql statement that will be sent to the DB.
gosrc.Println(
"%s := bytes.NewBufferString(\"SELECT %s FROM %s\")",
symbols.queryString,
strings.Join(columns, ","),
record.table(),
)
// Write our where clauses
e := gosrc.WithIf("%s != nil", func(url.Values) error {
return gosrc.Println("fmt.Fprintf(%s, \" %%s\", %s)", symbols.queryString, symbols.blueprint)
}, symbols.blueprint)
// Write the limit determining code.
limitCondition := fmt.Sprintf("%s != nil && %s.Limit >= 1", symbols.blueprint, symbols.blueprint)
gosrc.Println("%s := %s", symbols.limit, defaultLimit)
e = gosrc.WithIf(limitCondition, func(url.Values) error {
return gosrc.Println("%s = %s.Limit", symbols.limit, symbols.blueprint)
})
if e != nil {
return e
}
// Write the offset determining code.
offsetCondition := fmt.Sprintf("%s != nil && %s.Offset >= 1", symbols.blueprint, symbols.blueprint)
gosrc.Println("%s := 0", symbols.offset)
e = gosrc.WithIf(offsetCondition, func(url.Values) error {
return gosrc.Println("%s = %s.Offset", symbols.offset, symbols.blueprint)
})
if e != nil {
return e
}
// Write out the limit & offset query write.
gosrc.Println(
"fmt.Fprintf(%s, \" LIMIT %%d OFFSET %%d\", %s, %s)",
symbols.queryString,
symbols.limit,
symbols.offset,
)
logwriter.AddLog(symbols.queryString, fmt.Sprintf("%s.Values()", symbols.blueprint))
// Write the query execution statement.
gosrc.Println(
"%s, %s := %s.Prepare(%s.String())",
symbols.statementResult,
symbols.statementError,
scope.Get("receiver"),
symbols.queryString,
)
// Query has been executed, write out error handler
gosrc.WithIf("%s != nil", func(url.Values) error {
return gosrc.Returns(writing.Nil, symbols.statementError)
}, symbols.statementError)
// Write out result close deferred statement.
gosrc.Println("defer %s.Close()", symbols.statementResult)
gosrc.Println(
"%s, %s := %s.Query(%s.Values()...)",
symbols.queryResult,
symbols.queryError,
symbols.statementResult,
symbols.blueprint,
)
// Check to see if the two results had an error
gosrc.WithIf("%s != nil ", func(url.Values) error {
return gosrc.Returns(writing.Nil, symbols.queryError)
}, symbols.queryError)
// Build the iteration that will loop over the row results, scanning them into real records.
return gosrc.WithIter("%s.Next()", func(url.Values) error {
gosrc.Println("var %s %s", symbols.rowItem, record.name())
references := make([]string, 0, len(record.fields))
for _, f := range fieldList {
references = append(references, fmt.Sprintf("&%s.%s", symbols.rowItem, f.name))
}
scans := strings.Join(references, ",")
// Write the scan attempt and check for errors.
condition := fmt.Sprintf("e := %s.Scan(%s); e != nil", symbols.queryResult, scans)
gosrc.WithIf(condition, func(url.Values) error {
gosrc.Println("return nil, e")
return nil
})
gosrc.Println("%s = append(%s, &%s)", symbols.results, symbols.results, symbols.rowItem)
return nil
}, symbols.queryResult)
})
if e != nil {
pw.CloseWithError(e)
return
}
record.registerStoreMethod(writing.FuncDecl{
Name: methodName,
Params: params,
Returns: returns,
})
record.registerImports("fmt", "bytes", "strings")
pw.Close()
}()
return pr
}
type counterSymbols struct {
countMethodName string
blueprint string
StatementQuery string
statementResult string
statementError string
queryResult string
queryError string
ScanResult string
scanError string
}
// counter generates the CountRecords methods for a given record store.
func counter(record marlowRecord) io.Reader {
pr, pw := io.Pipe()
methodPrefix := record.config.Get(constants.StoreCountMethodPrefixConfigOption)
if len(record.fields) == 0 {
pw.CloseWithError(nil)
return pr
}
symbols := counterSymbols{
countMethodName: fmt.Sprintf("%s%s", methodPrefix, inflector.Pluralize(record.name())),
blueprint: "_blueprint",
StatementQuery: "_raw",
statementError: "_statementError",
statementResult: "_statement",
queryResult: "_queryResult",
queryError: "_queryError",
ScanResult: "_scanResult",
scanError: "_scanError",
}
go func() {
gosrc := writing.NewGoWriter(pw)
gosrc.Comment("[marlow feature]: counter on table[%s]", record.table())
params := []writing.FuncParam{
{Symbol: symbols.blueprint, Type: fmt.Sprintf("*%s", record.blueprint())},
}
returns := []string{
"int",
"error",
}
e := gosrc.WithMethod(symbols.countMethodName, record.store(), params, returns, func(scope url.Values) error {
receiver := scope.Get("receiver")
logwriter := logWriter{output: gosrc, receiver: receiver}
gosrc.WithIf("%s == nil", func(url.Values) error {
return gosrc.Println("%s = &%s{}", params[0].Symbol, record.blueprint())
}, symbols.blueprint)
gosrc.Println(
"%s := fmt.Sprintf(\"SELECT COUNT(*) FROM %s %%s;\", %s)",
symbols.StatementQuery,
record.table(),
symbols.blueprint,
)
logwriter.AddLog(symbols.StatementQuery, fmt.Sprintf("%s.Values()", symbols.blueprint))
gosrc.Println(
"%s, %s := %s.Prepare(%s)",
symbols.statementResult,
symbols.statementError,
receiver,
symbols.StatementQuery,
)
gosrc.WithIf("%s != nil", func(url.Values) error {
return gosrc.Returns("-1", symbols.statementError)
}, symbols.statementError)
gosrc.Println("defer %s.Close()", symbols.statementResult)
// Write the query execution, using the blueprint Values().
gosrc.Println(
"%s, %s := %s.Query(%s.Values()...)",
symbols.queryResult,
symbols.queryError,
symbols.statementResult,
symbols.blueprint,
)
gosrc.WithIf("%s != nil", func(url.Values) error {
return gosrc.Returns("-1", symbols.queryError)
}, symbols.queryError)
gosrc.Println("defer %s.Close()", symbols.queryResult)
gosrc.WithIf("%s.Next() != true", func(url.Values) error {
return gosrc.Returns("-1", "fmt.Errorf(\"invalid-scan\")")
}, symbols.queryResult)
// Scan the result into it's integer form.
gosrc.Println("var %s int", symbols.ScanResult)
gosrc.Println("%s := %s.Scan(&%s)", symbols.scanError, symbols.queryResult, symbols.ScanResult)
gosrc.WithIf("%s != nil", func(url.Values) error {
return gosrc.Returns("-1", symbols.scanError)
}, symbols.scanError)
return gosrc.Returns(symbols.ScanResult, writing.Nil)
})
if e == nil {
record.registerImports("fmt")
record.registerStoreMethod(writing.FuncDecl{
Name: symbols.countMethodName,
Params: params,
Returns: returns,
})
}
pw.CloseWithError(e)
}()
return pr
}
type selectorSymbols struct {
returnSlice string
queryResult string
queryError string
queryString string
statementResult string
statementError string
blueprint string
rowItem string
scanError string
limit string
offset string
}
// selector will return a generator that will product a single field selection method for a given record store.
func selector(record marlowRecord, fieldName string, fieldConfig url.Values) io.Reader {
pr, pw := io.Pipe()
// Build this field's select method name - will take the form "SelectAuthorIDs", "SelectAuthorNames".
methodName := fmt.Sprintf(
"%s%s%s",
record.config.Get(constants.StoreSelectMethodPrefixConfigOption),
record.name(),
inflector.Pluralize(fieldName),
)
columnName := fieldConfig.Get(constants.ColumnConfigOption)
returnItemType := fieldConfig.Get("type")
returnArrayType := fmt.Sprintf("[]%v", returnItemType)
returns := []string{
returnArrayType,
"error",
}
symbols := selectorSymbols{
returnSlice: "_results",
queryString: "_queryString",
queryResult: "_queryResult",
queryError: "_qe",
statementResult: "_statement",
statementError: "_se",
scanError: "_re",
blueprint: "_blueprint",
rowItem: "_row",
limit: "_limit",
offset: "_offset",
}
params := []writing.FuncParam{
{Type: fmt.Sprintf("*%s", record.blueprint()), Symbol: symbols.blueprint},
}
columnReference := fmt.Sprintf("%s.%s", record.table(), columnName)
go func() {
gosrc := writing.NewGoWriter(pw)
gosrc.Comment("[marlow] field selector for %s (%s) [print: %s]", fieldName, methodName, record.blueprint())
e := gosrc.WithMethod(methodName, record.store(), params, returns, func(scope url.Values) error {
logwriter := logWriter{output: gosrc, receiver: scope.Get("receiver")}
gosrc.Println("%s := make(%s, 0)", symbols.returnSlice, returnArrayType)
gosrc.Println(
"%s := bytes.NewBufferString(\"SELECT %s FROM %s\")",
symbols.queryString,
columnReference,
record.table(),
)
// Write our where clauses
gosrc.WithIf("%s != nil", func(url.Values) error {
return gosrc.Println("fmt.Fprintf(%s, \" %%s\", %s)", symbols.queryString, symbols.blueprint)
}, symbols.blueprint)
// Apply the limits and offsets to the query
defaultLimit := record.config.Get(constants.DefaultLimitConfigOption)
gosrc.Println("%s, %s := %s, 0", symbols.limit, symbols.offset, defaultLimit)
gosrc.WithIf("%s != nil && %s.Offset > 0", func(url.Values) error {
return gosrc.Println("%s = %s.Offset", symbols.offset, symbols.blueprint)
}, symbols.blueprint, symbols.blueprint)
gosrc.WithIf("%s != nil && %s.Limit > 0", func(url.Values) error {
return gosrc.Println("%s = %s.Limit", symbols.limit, symbols.blueprint)
}, symbols.blueprint, symbols.blueprint)
rangeString := "\" LIMIT %d OFFSET %d\""
// Write the write statement for adding limit and offset into the query string.
gosrc.Println("fmt.Fprintf(%s, %s, %s, %s)", symbols.queryString, rangeString, symbols.limit, symbols.offset)
// Write the query execution statement.
gosrc.Println(
"%s, %s := %s.Prepare(%s.String())",
symbols.statementResult,
symbols.statementError,
scope.Get("receiver"),
symbols.queryString,
)
logwriter.AddLog(symbols.queryString, fmt.Sprintf("%s.Values()", symbols.blueprint))
gosrc.WithIf("%s != nil", func(url.Values) error {
return gosrc.Returns(writing.Nil, symbols.statementError)
}, symbols.statementError)
// Write out result close deferred statement.
gosrc.Println("defer %s.Close()", symbols.statementResult)
// Write the execution statement using the bluepring values.
gosrc.Println(
"%s, %s := %s.Query(%s.Values()...)",
symbols.queryResult,
symbols.queryError,
symbols.statementResult,
symbols.blueprint,
)
gosrc.WithIf("%s != nil", func(url.Values) error {
return gosrc.Returns(writing.Nil, symbols.queryError)
}, symbols.queryError)
// Write out result close deferred statement.
gosrc.Println("defer %s.Close()", symbols.queryResult)
e := gosrc.WithIter("%s.Next()", func(url.Values) error {
gosrc.Println("var %s %s", symbols.rowItem, returnItemType)
condition := fmt.Sprintf(
"%s := %s.Scan(&%s); %s != nil",
symbols.scanError,
symbols.queryResult,
symbols.rowItem,
symbols.scanError,
)
gosrc.WithIf(condition, func(url.Values) error {
return gosrc.Returns(writing.Nil, symbols.scanError)
})
return gosrc.Println("%s = append(%s, %s)", symbols.returnSlice, symbols.returnSlice, symbols.rowItem)
}, symbols.queryResult)
if e != nil {
return e
}
record.registerStoreMethod(writing.FuncDecl{
Name: methodName,
Params: params,
Returns: returns,
})
gosrc.Println("return %s, nil", symbols.returnSlice)
return nil
})
pw.CloseWithError(e)
}()
return pr
}
// newQueryableGenerator is responsible for returning a reader that will generate lookup functions for a given record.
func newQueryableGenerator(record marlowRecord) io.Reader {
pr, pw := io.Pipe()
if len(record.table()) == 0 || len(record.name()) == 0 || len(record.store()) == 0 {
pw.CloseWithError(fmt.Errorf("invalid record config"))
return pr
}
features := []io.Reader{
finder(record),
counter(record),
}
for name, config := range record.fields {
s := selector(record, name, config)
features = append(features, s)
}
go func() {
_, e := io.Copy(pw, io.MultiReader(features...))
pw.CloseWithError(e)
}()
return pr | } | |
index.js | export { default as userOperations } from './userOperations';
export { default as userActions } from './userActions';
export { default as userReducers } from './userReducers'; | export { default as userSelector } from './userSelector'; | |
node.rs | // Copyright 2016 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::{HashMap, HashSet};
use std::sync::{mpsc, Arc, Mutex, RwLock};
use std::sync::mpsc::Sender;
use std::time::Duration;
use std::boxed::FnBox;
use std::ops::Deref;
use tempdir::TempDir;
use super::cluster::{Cluster, Simulator};
use tikv::server::Node;
use tikv::raftstore::store::*;
use kvproto::metapb;
use kvproto::raft_cmdpb::*;
use kvproto::raft_serverpb::{self, RaftMessage};
use kvproto::eraftpb::MessageType;
use tikv::config::TiKvConfig;
use tikv::raftstore::{Error, Result};
use tikv::util::HandyRwLock;
use tikv::util::transport::SendCh;
use tikv::server::transport::{RaftStoreRouter, ServerRaftStoreRouter};
use tikv::raft::SnapshotStatus;
use super::pd::TestPdClient;
use super::transport_simulate::*;
pub struct ChannelTransportCore {
snap_paths: HashMap<u64, (SnapManager, TempDir)>,
routers: HashMap<u64, SimulateTransport<Msg, ServerRaftStoreRouter>>,
snapshot_status_senders: HashMap<u64, Mutex<Sender<SnapshotStatusMsg>>>,
}
#[derive(Clone)]
pub struct ChannelTransport {
core: Arc<RwLock<ChannelTransportCore>>,
}
impl ChannelTransport {
pub fn new() -> ChannelTransport {
ChannelTransport {
core: Arc::new(RwLock::new(ChannelTransportCore {
snap_paths: HashMap::new(),
routers: HashMap::new(),
snapshot_status_senders: HashMap::new(),
})),
}
}
}
impl Deref for ChannelTransport {
type Target = Arc<RwLock<ChannelTransportCore>>;
fn deref(&self) -> &Self::Target {
&self.core
}
}
impl Channel<RaftMessage> for ChannelTransport {
fn send(&self, msg: RaftMessage) -> Result<()> {
let from_store = msg.get_from_peer().get_store_id();
let to_store = msg.get_to_peer().get_store_id();
let to_peer_id = msg.get_to_peer().get_id();
let region_id = msg.get_region_id();
let is_snapshot = msg.get_message().get_msg_type() == MessageType::MsgSnapshot;
if msg.get_message().get_msg_type() == MessageType::MsgSnapshot {
let snap = msg.get_message().get_snapshot();
let key = SnapKey::from_snap(snap).unwrap();
let from = match self.rl().snap_paths.get(&from_store) {
Some(p) => {
p.0.register(key.clone(), SnapEntry::Sending);
p.0.get_snapshot_for_sending(&key).unwrap()
}
None => return Err(box_err!("missing temp dir for store {}", from_store)),
};
let to = match self.rl().snap_paths.get(&to_store) {
Some(p) => {
p.0.register(key.clone(), SnapEntry::Receiving);
let data = msg.get_message().get_snapshot().get_data();
p.0.get_snapshot_for_receiving(&key, data).unwrap()
}
None => return Err(box_err!("missing temp dir for store {}", to_store)),
};
defer!({
let core = self.rl();
core.snap_paths[&from_store]
.0
.deregister(&key, &SnapEntry::Sending);
core.snap_paths[&to_store]
.0
.deregister(&key, &SnapEntry::Receiving);
});
try!(copy_snapshot(from, to));
}
match self.core.rl().routers.get(&to_store) {
Some(h) => {
try!(h.send_raft_msg(msg));
if is_snapshot {
// should report snapshot finish.
let core = self.rl();
core.snapshot_status_senders[&from_store]
.lock()
.unwrap()
.send(SnapshotStatusMsg {
region_id: region_id,
to_peer_id: to_peer_id,
status: SnapshotStatus::Finish,
})
.unwrap();
}
Ok(())
}
_ => Err(box_err!("missing sender for store {}", to_store)),
}
}
}
type SimulateChannelTransport = SimulateTransport<RaftMessage, ChannelTransport>;
pub struct NodeCluster {
trans: ChannelTransport,
pd_client: Arc<TestPdClient>,
nodes: HashMap<u64, Node<TestPdClient>>,
simulate_trans: HashMap<u64, SimulateChannelTransport>,
}
impl NodeCluster {
pub fn new(pd_client: Arc<TestPdClient>) -> NodeCluster |
}
impl NodeCluster {
#[allow(dead_code)]
pub fn get_node_router(&self, node_id: u64) -> SimulateTransport<Msg, ServerRaftStoreRouter> {
self.trans.rl().routers.get(&node_id).cloned().unwrap()
}
}
impl Simulator for NodeCluster {
fn run_node(&mut self, node_id: u64, cfg: TiKvConfig, engines: Engines) -> u64 {
assert!(node_id == 0 || !self.nodes.contains_key(&node_id));
let mut event_loop = create_event_loop(&cfg.raft_store).unwrap();
let (snap_status_sender, snap_status_receiver) = mpsc::channel();
let simulate_trans = SimulateTransport::new(self.trans.clone());
let mut node = Node::new(
&mut event_loop,
&cfg.server,
&cfg.raft_store,
self.pd_client.clone(),
);
let (snap_mgr, tmp) = if node_id == 0 ||
!self.trans.rl().snap_paths.contains_key(&node_id)
{
let tmp = TempDir::new("test_cluster").unwrap();
let snap_mgr = SnapManager::new(tmp.path().to_str().unwrap(), Some(node.get_sendch()));
(snap_mgr, Some(tmp))
} else {
let trans = self.trans.rl();
let &(ref snap_mgr, _) = &trans.snap_paths[&node_id];
(snap_mgr.clone(), None)
};
node.start(
event_loop,
engines.clone(),
simulate_trans.clone(),
snap_mgr.clone(),
snap_status_receiver,
).unwrap();
assert!(
engines
.kv_engine
.clone()
.get_msg::<metapb::Region>(&keys::prepare_bootstrap_key())
.unwrap()
.is_none()
);
assert!(node_id == 0 || node_id == node.id());
debug!(
"node_id: {} tmp: {:?}",
node_id,
tmp.as_ref().map(|p| p.path().to_str().unwrap().to_owned())
);
if let Some(tmp) = tmp {
self.trans
.wl()
.snap_paths
.insert(node.id(), (snap_mgr, tmp));
}
let node_id = node.id();
let router = ServerRaftStoreRouter::new(node.get_sendch());
self.trans
.wl()
.routers
.insert(node_id, SimulateTransport::new(router));
self.trans
.wl()
.snapshot_status_senders
.insert(node_id, Mutex::new(snap_status_sender));
self.nodes.insert(node_id, node);
self.simulate_trans.insert(node_id, simulate_trans);
node_id
}
fn get_snap_dir(&self, node_id: u64) -> String {
self.trans.wl().snap_paths[&node_id]
.1
.path()
.to_str()
.unwrap()
.to_owned()
}
fn stop_node(&mut self, node_id: u64) {
if let Some(mut node) = self.nodes.remove(&node_id) {
node.stop().unwrap();
}
self.trans.wl().routers.remove(&node_id).unwrap();
}
fn get_node_ids(&self) -> HashSet<u64> {
self.nodes.keys().cloned().collect()
}
fn call_command_on_node(
&self,
node_id: u64,
request: RaftCmdRequest,
timeout: Duration,
) -> Result<RaftCmdResponse> {
if !self.trans.rl().routers.contains_key(&node_id) {
return Err(box_err!("missing sender for store {}", node_id));
}
let router = self.trans.rl().routers.get(&node_id).cloned().unwrap();
wait_op!(
|cb: Box<FnBox(RaftCmdResponse) + 'static + Send>| {
router.send_command(request, cb).unwrap()
},
timeout
).ok_or_else(|| {
Error::Timeout(format!("request timeout for {:?}", timeout))
})
}
fn send_raft_msg(&mut self, msg: raft_serverpb::RaftMessage) -> Result<()> {
self.trans.send(msg)
}
fn add_send_filter(&mut self, node_id: u64, filter: SendFilter) {
self.simulate_trans
.get_mut(&node_id)
.unwrap()
.add_filter(filter);
}
fn clear_send_filters(&mut self, node_id: u64) {
self.simulate_trans
.get_mut(&node_id)
.unwrap()
.clear_filters();
}
fn add_recv_filter(&mut self, node_id: u64, filter: RecvFilter) {
let mut trans = self.trans.wl();
trans.routers.get_mut(&node_id).unwrap().add_filter(filter);
}
fn clear_recv_filters(&mut self, node_id: u64) {
let mut trans = self.trans.wl();
trans.routers.get_mut(&node_id).unwrap().clear_filters();
}
fn get_store_sendch(&self, node_id: u64) -> Option<SendCh<Msg>> {
self.nodes.get(&node_id).map(|node| node.get_sendch())
}
}
pub fn new_node_cluster(id: u64, count: usize) -> Cluster<NodeCluster> {
let pd_client = Arc::new(TestPdClient::new(id));
let sim = Arc::new(RwLock::new(NodeCluster::new(pd_client.clone())));
Cluster::new(id, count, &[], sim, pd_client)
}
| {
NodeCluster {
trans: ChannelTransport::new(),
pd_client: pd_client,
nodes: HashMap::new(),
simulate_trans: HashMap::new(),
}
} |
credentials.js | const CHECK = 'planner/login/credentials/CHECK';
const CREATE = 'planner/login/credentials/CREATE';
const initialState = [
{
email: '[email protected]',
password: 'skyhigh87',
},
];
// Action Creators
export const fetchCredentials = () => ({
type: CHECK,
});
export const createCredentials = (email, password) => ({
type: CREATE,
email,
password,
});
// Reducer
export default function | (state = initialState, action = {}) {
switch (action.type) {
case CHECK:
return state;
case CREATE:
return [...state, {
email: action.email,
password: action.password,
}];
default:
return state;
}
}
// side effects, only as applicable
// e.g. thunks, epics, etc
| reducer |
conditional_batchnorm.py | import torch.nn as nn
import torch.nn.functional as F
from torch.nn import init
class ConditionalBatchNorm2d(nn.BatchNorm2d):
"""Conditional Batch Normalization"""
def __init__(self, num_features, eps=1e-05, momentum=0.1,
affine=False, track_running_stats=True):
super(ConditionalBatchNorm2d, self).__init__( | self._check_input_dim(input)
exponential_average_factor = 0.0
if self.training and self.track_running_stats:
self.num_batches_tracked += 1
if self.momentum is None: # use cumulative moving average
exponential_average_factor = 1.0 /\
self.num_batches_tracked.item()
else: # use exponential moving average
exponential_average_factor = self.momentum
output = F.batch_norm(input, self.running_mean, self.running_var,
self.weight, self.bias,
self.training or not self.track_running_stats,
exponential_average_factor, self.eps)
if weight.dim() == 1:
weight = weight.unsqueeze(0)
if bias.dim() == 1:
bias = bias.unsqueeze(0)
size = output.size()
weight = weight.unsqueeze(-1).unsqueeze(-1).expand(size)
bias = bias.unsqueeze(-1).unsqueeze(-1).expand(size)
return weight * output + bias
class CategoricalConditionalBatchNorm2d(ConditionalBatchNorm2d):
def __init__(self, num_classes, num_features, eps=1e-5, momentum=0.1,
affine=False, track_running_stats=True):
super(CategoricalConditionalBatchNorm2d, self).__init__(
num_features, eps, momentum, affine, track_running_stats
)
self.weights = nn.Embedding(num_classes, num_features)
self.biases = nn.Embedding(num_classes, num_features)
self._initialize()
def _initialize(self):
init.ones_(self.weights.weight.data)
init.zeros_(self.biases.weight.data)
def forward(self, input, c, **kwargs):
weight = self.weights(c)
bias = self.biases(c)
return super(CategoricalConditionalBatchNorm2d, self)\
.forward(input, weight, bias)
if __name__ == '__main__':
"""Forward computation check."""
import torch
size = (3, 3, 12, 12)
batch_size, num_features = size[:2]
print('# Affirm embedding output')
naive_bn = nn.BatchNorm2d(3)
idx_input = torch.tensor([1, 2, 0], dtype=torch.long)
embedding = nn.Embedding(3, 3)
weights = embedding(idx_input)
print('# weights size', weights.size())
empty = torch.tensor((), dtype=torch.float)
running_mean = empty.new_zeros((3,))
running_var = empty.new_ones((3,))
naive_bn_W = naive_bn.weight
input = torch.rand(*size, dtype=torch.float32)
print('input size', input.size())
print('input ndim ', input.dim())
_ = naive_bn(input)
print('# batch_norm with given weights')
try:
with torch.no_grad():
output = F.batch_norm(input, running_mean, running_var,
weights, naive_bn.bias, False, 0.0, 1e-05)
except Exception as e:
print("\tFailed to use given weights")
print('# Error msg:', e)
print()
else:
print("Succeeded to use given weights")
print('\n# Batch norm before use given weights')
with torch.no_grad():
tmp_out = F.batch_norm(input, running_mean, running_var,
naive_bn_W, naive_bn.bias, False, .0, 1e-05)
weights_cast = weights.unsqueeze(-1).unsqueeze(-1)
weights_cast = weights_cast.expand(tmp_out.size())
try:
out = weights_cast * tmp_out
except Exception:
print("Failed")
else:
print("Succeeded!")
print('\t {}'.format(out.size()))
print(type(tuple(out.size())))
print('--- condBN and catCondBN ---')
catCondBN = CategoricalConditionalBatchNorm2d(3, 3)
output = catCondBN(input, idx_input)
assert tuple(output.size()) == size
condBN = ConditionalBatchNorm2d(3)
idx = torch.tensor([1], dtype=torch.long)
out = catCondBN(input, idx)
print('cat cond BN weights\n', catCondBN.weights.weight.data)
print('cat cond BN biases\n', catCondBN.biases.weight.data) | num_features, eps, momentum, affine, track_running_stats
)
def forward(self, input, weight, bias, **kwargs): |
rects.rs | // Copyright 2016 Joe Wilm, The Alacritty Project Contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::HashMap;
use font::Metrics;
use alacritty_terminal::index::{Column, Point};
use alacritty_terminal::term::cell::Flags;
use alacritty_terminal::term::color::Rgb;
use alacritty_terminal::term::{RenderableCell, SizeInfo};
#[derive(Debug, Copy, Clone)]
pub struct RenderRect {
pub x: f32,
pub y: f32,
pub width: f32,
pub height: f32,
pub color: Rgb,
pub alpha: f32,
}
impl RenderRect {
pub fn new(x: f32, y: f32, width: f32, height: f32, color: Rgb, alpha: f32) -> Self {
RenderRect { x, y, width, height, color, alpha }
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct | {
pub start: Point,
pub end: Point,
pub color: Rgb,
}
impl RenderLine {
pub fn rects(&self, flag: Flags, metrics: &Metrics, size: &SizeInfo) -> Vec<RenderRect> {
let mut rects = Vec::new();
let mut start = self.start;
while start.line < self.end.line {
let mut end = start;
end.col = size.cols() - 1;
rects.push(Self::create_rect(metrics, size, flag, start, end, self.color));
start.col = Column(0);
start.line += 1;
}
rects.push(Self::create_rect(metrics, size, flag, start, self.end, self.color));
rects
}
fn create_rect(
metrics: &Metrics,
size: &SizeInfo,
flag: Flags,
start: Point,
end: Point,
color: Rgb,
) -> RenderRect {
let start_x = start.col.0 as f32 * size.cell_width;
let end_x = (end.col.0 + 1) as f32 * size.cell_width;
let width = end_x - start_x;
let (position, mut height) = match flag {
Flags::UNDERLINE => (metrics.underline_position, metrics.underline_thickness),
Flags::STRIKEOUT => (metrics.strikeout_position, metrics.strikeout_thickness),
_ => unimplemented!("Invalid flag for cell line drawing specified"),
};
// Make sure lines are always visible
height = height.max(1.);
let line_bottom = (start.line.0 as f32 + 1.) * size.cell_height;
let baseline = line_bottom + metrics.descent;
let mut y = (baseline - position - height / 2.).ceil();
let max_y = line_bottom - height;
if y > max_y {
y = max_y;
}
RenderRect::new(start_x + size.padding_x, y + size.padding_y, width, height, color, 1.)
}
}
/// Lines for underline and strikeout.
#[derive(Default)]
pub struct RenderLines {
inner: HashMap<Flags, Vec<RenderLine>>,
}
impl RenderLines {
pub fn new() -> Self {
Self::default()
}
pub fn rects(&self, metrics: &Metrics, size: &SizeInfo) -> Vec<RenderRect> {
self.inner
.iter()
.map(|(flag, lines)| -> Vec<RenderRect> {
lines.iter().map(|line| line.rects(*flag, metrics, size)).flatten().collect()
})
.flatten()
.collect()
}
/// Update the stored lines with the next cell info.
pub fn update(&mut self, cell: RenderableCell) {
for flag in &[Flags::UNDERLINE, Flags::STRIKEOUT] {
if !cell.flags.contains(*flag) {
continue;
}
// Check if there's an active line
if let Some(line) = self.inner.get_mut(flag).and_then(|lines| lines.last_mut()) {
if cell.fg == line.color
&& cell.column == line.end.col + 1
&& cell.line == line.end.line
{
// Update the length of the line
line.end = cell.into();
continue;
}
}
// Start new line if there currently is none
let line = RenderLine { start: cell.into(), end: cell.into(), color: cell.fg };
match self.inner.get_mut(flag) {
Some(lines) => lines.push(line),
None => {
self.inner.insert(*flag, vec![line]);
},
}
}
}
}
| RenderLine |
lib.rs | // =================================================================
//
// * WARNING *
//
// This file is generated!
//
// Changes made to this file will be overwritten. If changes are
// required to the generated code, the service_crategen project
// must be updated to generate the changes.
//
// =================================================================
#![doc(html_logo_url = "https://raw.githubusercontent.com/rusoto/rusoto/master/assets/logo-square.png")]
//! <p>AWS IoT Jobs is a service that allows you to define a set of jobs — remote operations that are sent to and executed on one or more devices connected to AWS IoT. For example, you can define a job that instructs a set of devices to download and install application or firmware updates, reboot, rotate certificates, or perform remote troubleshooting operations.</p> <p> To create a job, you make a job document which is a description of the remote operations to be performed, and you specify a list of targets that should perform the operations. The targets can be individual things, thing groups or both.</p> <p> AWS IoT Jobs sends a message to inform the targets that a job is available. The target starts the execution of the job by downloading the job document, performing the operations it specifies, and reporting its progress to AWS IoT. The Jobs service provides commands to track the progress of a job on a specific target and for all the targets of the job</p>
//!
//! If you're using the service, you're probably looking for [IotJobsDataClient](struct.IotJobsDataClient.html) and [IotJobsData](trait.IotJobsData.html).
extern crate futures;
#[macro_use]
extern crate log; | extern crate rusoto_core;
extern crate serde;
#[macro_use]
extern crate serde_derive;
extern crate serde_json;
mod generated;
mod custom;
pub use generated::*;
pub use custom::*; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.