file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
handshake.go | package librtmp
import (
"bytes"
"crypto/hmac"
"crypto/rand"
"crypto/sha256"
"encoding/binary"
"fmt"
"time"
"github.com/pkg/errors"
)
const (
C0_LEN = 1
C1_LEN = 1536
C2_LEN = 1536
S0_LEN = 1
S1_LEN = 1536
S2_LEN = 1536
)
type HandshakeMode int
const (
SIMPLE HandshakeMode = 0
COMPLEX1 HandshakeMode = 1
COMPLEX2 HandshakeMode = 2
)
var (
FMSKey = []byte{
0x47, 0x65, 0x6e, 0x75, 0x69, 0x6e, 0x65, 0x20,
0x41, 0x64, 0x6f, 0x62, 0x65, 0x20, 0x46, 0x6c,
0x61, 0x73, 0x68, 0x20, 0x4d, 0x65, 0x64, 0x69,
0x61, 0x20, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72,
0x20, 0x30, 0x30, 0x31, // Genuine Adobe Flash Media Peer 001
0xf0, 0xee, 0xc2, 0x4a, 0x80, 0x68, 0xbe, 0xe8,
0x2e, 0x00, 0xd0, 0xd1, 0x02, 0x9e, 0x7e, 0x57,
0x6e, 0xec, 0x5d, 0x2d, 0x29, 0x80, 0x6f, 0xab,
0x93, 0xb8, 0xe6, 0x36, 0xcf, 0xeb, 0x31, 0xae,
}
FPkey = []byte{
0x47, 0x65, 0x6E, 0x75, 0x69, 0x6E, 0x65, 0x20,
0x41, 0x64, 0x6F, 0x62, 0x65, 0x20, 0x46, 0x6C,
0x61, 0x73, 0x68, 0x20, 0x50, 0x6C, 0x61, 0x79,
0x65, 0x72, 0x20, 0x30, 0x30, 0x31, // Genuine Adobe Flash Player 001
0xF0, 0xEE, 0xC2, 0x4A, 0x80, 0x68, 0xBE, 0xE8,
0x2E, 0x00, 0xD0, 0xD1, 0x02, 0x9E, 0x7E, 0x57,
0x6E, 0xEC, 0x5D, 0x2D, 0x29, 0x80, 0x6F, 0xAB,
0x93, 0xB8, 0xE6, 0x36, 0xCF, 0xEB, 0x31, 0xAE,
}
)
type Peer struct {
handshakeMode HandshakeMode
//c0 s0
clientVersion uint8
serverVersion uint8
//simple handshake c1 s1
clientTimeStamp uint32
serverTimeStamp uint32
clientZero uint32
serverZero uint32
//simple handshake c2 s2
clientRandom []byte
serverRandom []byte
//complex handshake c1 s1
clientDigest []byte //32byte
serverDigest []byte //32byte
clientKey []byte //128byte
serverKey []byte //128byte
}
func HandshakeServer(rtmp *RTMP) (err error) {
c0c1c2 := [C0_LEN + C1_LEN + C2_LEN]byte{}
c0 := c0c1c2[:C0_LEN]
c1 := c0c1c2[C0_LEN : C0_LEN+C1_LEN]
c2 := c0c1c2[C0_LEN+C1_LEN:]
p := &Peer{
serverVersion: 3,
}
err = rtmp.readerConn.ReadFull(c0)
if err != nil {
return errors.Wrap(err, "read c0 from conn")
}
fmt.Printf("c0:%x\n", c0)
p.parseC0(c0)
if p.clientVersion != p.serverVersion {
return errors.New("invalid client version")
}
err = rtmp.readerConn.ReadFull(c1)
if err != nil {
return errors.Wrap(err, "read c1 from conn")
}
fmt.Printf("c1:len:%d, data: %x\n", len(c1), c1)
s0 := p.makeS0()
fmt.Printf("s0:%x\n", s0)
p.parseC1(c1)
err = rtmp.writerConn.WriteFull(s0)
if err != nil {
return errors.Wrap(err, "write s0 to conn")
}
s1 := p.makeS1()
fmt.Printf("s1:len:%d, data: %x\n", len(s1), s1)
err = rtmp.writerConn.WriteFull(s1)
if err != nil {
return errors.Wrap(err, "write s1 to conn")
}
s2 := p.makeS2()
err = rtmp.writerConn.WriteFull(s2)
if err != nil {
return errors.Wrap(err, "write s2 to conn")
}
fmt.Printf("s2:%x\n", s2)
err = rtmp.readerConn.ReadFull(c2)
if err != nil {
return errors.Wrap(err, "read c2 from conn")
}
fmt.Printf("c2:%x\n", c2)
p.parseC2(c2)
return nil
}
func HandshakeClient(rtmp *RTMP) (err error) |
func (p *Peer) parseC0(c0 []byte) {
p.clientVersion = uint8(c0[0])
}
func (p *Peer) parseC1(c1 []byte) {
//try complex handshake first
//key-digest
// keyBufOffset := 8
digestBufOffset := 8 + 764
p.handshakeMode = COMPLEX1
try := 0
complex:
// fmt.Println("keyBufOffset:", keyBufOffset)
// fmt.Println("digestBufOffset:", digestBufOffset)
// keyOffset := (int(c1[keyBufOffset+760]) + int(c1[keyBufOffset+761]) + int(c1[keyBufOffset+762]) + int(c1[keyBufOffset+763]))
//XXX: what's mean about 728?
digestOffset := (int(c1[digestBufOffset]) + int(c1[digestBufOffset+1]) + int(c1[digestBufOffset+2]) + int(c1[digestBufOffset+3])) % 728
// fmt.Println("keyOffset:", keyOffset)
// fmt.Println("digestOffset:", digestOffset)
// p.clientKey = c1[keyBufOffset+keyOffset : keyBufOffset+keyOffset+128]
p.clientDigest = c1[digestBufOffset+4+digestOffset : digestBufOffset+4+digestOffset+32]
joined := append([]byte{}, c1[:digestBufOffset+4+digestOffset]...)
joined = append(joined, c1[digestBufOffset+4+digestOffset+32:]...)
// fmt.Printf("joined:%x\n", joined)
// fmt.Printf("client key:%x\n", FPkey[:30])
mac := hmac.New(sha256.New, FPkey[:30])
mac.Write(joined)
newDigest := mac.Sum(nil)
// fmt.Printf("newDigest, len:%d, data:%x\n", len(newDigest), newDigest)
// fmt.Printf("clientDigest, len:%d, data:%x\n", len(p.clientDigest), p.clientDigest)
if bytes.Compare(newDigest, p.clientDigest) == 0 {
fmt.Println("complex handshake success.")
return
} else {
if try == 0 {
fmt.Println("complex handshake mode 1 fail, try mode 2")
digestBufOffset = 8
// keyBufOffset = 8 + 764
p.handshakeMode = COMPLEX2
try++
goto complex
} else {
fmt.Println("complex handshake fail, using simple handshake")
goto simple
}
}
simple:
p.handshakeMode = SIMPLE
p.clientTimeStamp = binary.BigEndian.Uint32(c1[:4])
p.clientZero = binary.BigEndian.Uint32(c1[4:8])
p.clientRandom = c1[8:]
}
func (p *Peer) parseC2(c2 []byte) {
//TODO
}
func (p *Peer) makeS0() (s0 []byte) {
b := bytes.NewBuffer(s0)
binary.Write(b, binary.BigEndian, p.serverVersion)
return b.Bytes()
}
func (p *Peer) makeS1() (s1 []byte) {
p.serverTimeStamp = uint32(time.Now().Unix())
s1 = make([]byte, S1_LEN)
_, _ = rand.Read(s1[8:])
binary.BigEndian.PutUint32(s1[0:4], p.serverTimeStamp)
digestBufOffset := 8
switch p.handshakeMode {
case SIMPLE:
copy(s1[4:8], []byte{0x0, 0x0, 0x0, 0x0})
p.serverRandom = s1[8:]
case COMPLEX1:
digestBufOffset = 8 + 764
fallthrough
case COMPLEX2:
copy(s1[4:8], []byte{0x04, 0x05, 0x00, 0x01})
digestOffset := (int(s1[digestBufOffset]) + int(s1[digestBufOffset+1]) + int(s1[digestBufOffset+2]) + int(s1[digestBufOffset+3])) % 728
fmt.Println("digestOffset:", digestOffset)
joined := append([]byte{}, s1[:digestBufOffset+4+digestOffset]...)
joined = append(joined, s1[digestBufOffset+4+digestOffset+32:]...)
// fmt.Printf("joined:%x\n", joined)
mac := hmac.New(sha256.New, FMSKey[:36])
mac.Write(joined)
digest := mac.Sum(nil)
// fmt.Printf("digest:%x\n", digest)
copy(s1[digestBufOffset+4+digestOffset:digestBufOffset+4+digestOffset+32], digest)
default:
}
return s1
}
func (p *Peer) makeS2() (s2 []byte) {
switch p.handshakeMode {
case SIMPLE:
b := bytes.NewBuffer(s2)
binary.Write(b, binary.BigEndian, p.clientTimeStamp)
binary.Write(b, binary.BigEndian, p.clientZero)
binary.Write(b, binary.BigEndian, p.clientRandom)
return b.Bytes()
case COMPLEX1, COMPLEX2:
s2 = make([]byte, S2_LEN)
_, _ = rand.Read(s2)
mac := hmac.New(sha256.New, FMSKey)
mac.Write(p.clientDigest)
tmpDigest := mac.Sum(nil)
mac = hmac.New(sha256.New, tmpDigest)
mac.Write(s2[:S2_LEN-32])
s2Digest := mac.Sum(nil)
copy(s2[S2_LEN-32:S2_LEN], s2Digest)
default:
}
return
}
| {
//TODO
return nil
} |
int.go | // SPDX-License-Identifier: Apache-2.0
// Copyright Authors of Cilium
package math
// IntMin returns the minimum integer provided
func IntMin(a, b int) int {
if a < b |
return b
}
// IntMax returns the maximum integer provided
func IntMax(a, b int) int {
if a > b {
return a
}
return b
}
| {
return a
} |
file.go | // 进行一些基本的文件操作
// 参考:
// https://colobu.com/2016/10/12/go-file-operations/
package dm_1
import (
cfg "../config"
"crypto/md5"
"encoding/hex"
"errors"
"github.com/kpango/glg"
"io"
"io/ioutil"
"os"
"path"
"path/filepath"
"time"
)
type (
DMResource struct {
DMFileInfo
// 一个资源的绝对路径
Path string
}
// 详见os包中的 types.go
// 用于方便转化为json对象
DMFileInfo struct {
Name string // base name of the file
Size int64 // length in bytes for regular files; system-dependent for others
Mode os.FileMode // file mode bits
ModTime time.Time // modification time
IsDir bool // abbreviation for Mode().IsDire()
Sys interface{} // underlying data source (can return nil)
}
// 详见os包中的 types.go
// 用于方便转化为json对象
DMFileInfoViewModel struct {
Name string
Size int64
Mode string
ModTime string
IsDir bool
Sys interface{}
}
)
func ( R DMResource ) ToReadable() *DMFileInfoViewModel {
return &DMFileInfoViewModel{
Name: R.Name,
Size: R.Size,
Mode: R.Mode.String(),
ModTime: R.ModTime.Format("2006-01-02 15:04:05"),
IsDir: R.IsDir,
Sys: R.Sys,
}
}
// 获取文件的基本信息
func ( pR *DMResource ) GetBasicFileInfo() ( dfi *DMFileInfo, e error ) {
var fi os.FileInfo
if fi, e = os.Stat( pR.Path); e != nil {
return
} else {
dfi = &DMFileInfo{
Name: fi.Name(),
Size: fi.Size(),
Mode: fi.Mode(),
ModTime: fi.ModTime(),
IsDir: fi.IsDir(),
Sys: fi.Sys(),
}
pR.Name = dfi.Name
pR.Size = dfi.Size
pR.Mode = dfi.Mode
pR.ModTime = dfi.ModTime
pR.IsDir = dfi.IsDir
pR.Sys = dfi.Sys
return
}
}
func ( R DMResource ) Exists() bool {
_, err := os.Stat( R.Path )
if err != nil {
if os.IsExist( err ) {
return true
}
return false
}
return true
}
// 与IsDir区分
func ( R DMResource ) IsDire() bool {
s, err := os.Stat( R.Path )
if err != nil {
return false
}
return s.IsDir()
}
// 递归全部的子项目个数
func ( R DMResource ) LsRecruitCount() int {
return len( R.LsRecruit( nil ) )
}
// 如果目录下有exe文件,说明这个目录为二进制软件的目录
// 判断方式1
func ( R DMResource ) IsBinaryDirectory1() bool {
if !R.IsDire() {
return false
}
if rs, e := R.Ls(); e != nil {
return false
} else {
for _, r := range rs {
if r.IsFile() {
if ext, _ := r.GetExt(); ext == ".exe" {
return true
}
}
}
return false
}
}
func ( R DMResource ) IsFile() bool {
return !R.IsDire()
}
// 获取某个文件的md5
// 路径直接返回空字符串
func ( R DMResource ) GetMD5() ( md5str string, e error ) {
e = nil
md5str = ""
if R.IsDire() {
// e = errors.New("cannot get md5 from a directory")
return "", nil
} else {
return fileMD5( R.Path )
}
}
// 将MD5运算暂时保存
func ( R DMResource ) GetMD5Mask() ( md5str string, e error ) {
e = nil
md5str = ""
if R.IsDire() {
// e = errors.New("cannot get md5 from a directory")
return "", nil
} else {
return "md5notcomputed", nil
}
}
// https://stackoverflow.com/questions/43000621/how-can-go-md5-be-so-fast-crypto-md5
func fileMD5(path string) (string, error) {
var md5str string
file, err := os.Open(path)
if err != nil {
return md5str, err
}
defer file.Close()
hash := md5.New()
if _, err := io.Copy(hash, file); err != nil {
return md5str, err
}
hashInBytes := hash.Sum(nil)[:16]
md5str = hex.EncodeToString(hashInBytes)
return md5str, nil
} | // 枚举所有的子文件夹
// ls会自动调用所有子资源的GetBasicFileInfo进行填充
func ( R DMResource ) Ls() ( rs []DMResource, e error ) {
rs = []DMResource{}
var fs []os.FileInfo
if R.IsFile() {
e = errors.New("cannot ls a file")
return
}
if fs, e = ioutil.ReadDir( R.Path ); e != nil {
return
} else {
for i, f := range fs {
rs = append( rs, DMResource{
Path: R.Path +"/"+ f.Name(),
} )
_, e = rs[i].GetBasicFileInfo()
}
return
}
}
// 枚举所有的子文件夹
// ls会自动调用所有子资源的GetBasicFileInfo进行填充
func ( R DMResource ) LsLimited( head, end int ) ( totalCount int, rs []DMResource, e error ) {
rs = []DMResource{}
var fs []os.FileInfo
if R.IsFile() {
e = errors.New("cannot ls a file")
return
}
if fs, e = ioutil.ReadDir( R.Path ); e != nil {
return
} else {
totalCount = len( fs )
if end == -1 {
end = len(fs)
}
for i, f := range fs[head:end] {
rs = append( rs, DMResource{
Path: R.Path +"/"+ f.Name(),
} )
_, e = rs[i].GetBasicFileInfo()
}
return
}
}
// 获取文件扩展名
// 返回形如 ".ext"
// e != nil 时表明路径非法或为一个目录
func ( R DMResource ) GetExt() ( ext string, e error ) {
if R.IsFile() {
ext = path.Ext( R.Path )
return
} else {
e = errors.New("cannot get ext from a directory")
return
}
}
// dm_1.DMExts
func ( R DMResource ) GetGenre() string {
if R.IsDire() {
if R.IsBinaryDirectory1() {
return "binary"
}
return "directory"
} else {
ext, _ := R.GetExt()
for k, v := range DMExts {
for _, e := range v {
if ext == e {
return k
}
}
}
return "binary"
}
}
// 包含 R 自身
func ( R DMResource ) LsRecruit( status *DMTaskStatus ) []DMResource {
if status != nil {
status.ProgressStage = "walking directories..."
}
return append( DMRecruitLs( R, status), R )
}
func dirSize(path string) (int64, error) {
var size int64
err := filepath.Walk( path, func(_ string, info os.FileInfo, err error) error {
defer func() {
if e := recover(); e != nil {
glg.Error( e ); glg.Warn( "[in dirSize] this maybe not a critical problem")
}
}()
if cfg.IsIgnoreResource( path ) {
glg.Log("path["+path+"] ignored")
return nil
}
if !info.IsDir() {
size += info.Size()
}
return err
})
return size, err
}
// 获取文件或文件夹的大小
// 与属性Size不同,当接收者为路径时GetSize可以返回文件夹的递归大小
func ( R DMResource ) GetSize() ( int64, error ) {
if R.IsDire() {
return dirSize( R.Path )
} else {
return R.Size, nil
}
} | |
etcd_process.go | // Copyright 2017 The etcd Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package e2e
import (
"fmt"
"net/url"
"os"
"github.com/friendlyhank/etcd-3.4-annotated/pkg/expect"
"github.com/friendlyhank/etcd-3.4-annotated/pkg/fileutil"
)
var (
etcdServerReadyLines = []string{"enabled capabilities for version", "published"}
binPath string
ctlBinPath string
)
// etcdProcess is a process that serves etcd requests.
type etcdProcess interface {
EndpointsV2() []string
EndpointsV3() []string
EndpointsMetrics() []string
Start() error
Restart() error
Stop() error
Close() error
WithStopSignal(sig os.Signal) os.Signal
Config() *etcdServerProcessConfig
}
type etcdServerProcess struct {
cfg *etcdServerProcessConfig
proc *expect.ExpectProcess
donec chan struct{} // closed when Interact() terminates
}
type etcdServerProcessConfig struct {
execPath string
args []string
tlsArgs []string
dataDirPath string
keepDataDir bool
name string
purl url.URL
acurl string
murl string
initialToken string
initialCluster string
}
func newEtcdServerProcess(cfg *etcdServerProcessConfig) (*etcdServerProcess, error) {
if !fileutil.Exist(cfg.execPath) {
return nil, fmt.Errorf("could not find etcd binary")
}
if !cfg.keepDataDir {
if err := os.RemoveAll(cfg.dataDirPath); err != nil {
return nil, err
}
}
return &etcdServerProcess{cfg: cfg, donec: make(chan struct{})}, nil
}
func (ep *etcdServerProcess) EndpointsV2() []string { return []string{ep.cfg.acurl} }
func (ep *etcdServerProcess) EndpointsV3() []string { return ep.EndpointsV2() }
func (ep *etcdServerProcess) EndpointsMetrics() []string { return []string{ep.cfg.murl} }
func (ep *etcdServerProcess) Start() error {
if ep.proc != nil {
panic("already started")
}
proc, err := spawnCmd(append([]string{ep.cfg.execPath}, ep.cfg.args...))
if err != nil {
return err
}
ep.proc = proc
return ep.waitReady()
}
func (ep *etcdServerProcess) Restart() error {
if err := ep.Stop(); err != nil {
return err
}
ep.donec = make(chan struct{})
return ep.Start()
}
func (ep *etcdServerProcess) Stop() (err error) {
if ep == nil || ep.proc == nil {
return nil
}
err = ep.proc.Stop()
if err != nil {
return err
}
ep.proc = nil
<-ep.donec
ep.donec = make(chan struct{})
if ep.cfg.purl.Scheme == "unix" || ep.cfg.purl.Scheme == "unixs" {
err = os.Remove(ep.cfg.purl.Host + ep.cfg.purl.Path)
if err != nil |
}
return nil
}
func (ep *etcdServerProcess) Close() error {
if err := ep.Stop(); err != nil {
return err
}
return os.RemoveAll(ep.cfg.dataDirPath)
}
func (ep *etcdServerProcess) WithStopSignal(sig os.Signal) os.Signal {
ret := ep.proc.StopSignal
ep.proc.StopSignal = sig
return ret
}
func (ep *etcdServerProcess) waitReady() error {
defer close(ep.donec)
return waitReadyExpectProc(ep.proc, etcdServerReadyLines)
}
func (ep *etcdServerProcess) Config() *etcdServerProcessConfig { return ep.cfg }
| {
return err
} |
babycat.rs | //! This module is not really part of Babycat's pubic API, but is made public
//! to make benchmarking Babycat internals easier.
//!
//! If you want to use Babycat to resample audio, you should decode
//! the audio into a [`FloatWaveform`][crate::FloatWaveform]
//! and then use the [`FloatWaveform.resample()`][crate::FloatWaveform#method.resample] method.
use crate::backend::errors::Error;
use crate::backend::resample::common::get;
use crate::backend::resample::common::get_num_output_frames;
use crate::backend::resample::common::validate_args;
use std::f32::consts::PI;
const KERNEL_A: i32 = 5;
fn lanczos_kernel(x: f32, a: f32) -> f32 {
if float_cmp::approx_eq!(f32, x, 0.0_f32) {
return 1.0;
}
if -a <= x && x < a {
return (a * (PI * x).sin() * (PI * x / a).sin()) / (PI * PI * x * x);
}
0.0
}
fn compute_sample(
input_audio: &[f32], | channel_idx: usize,
num_channels: usize,
) -> f32 {
let num_input_frames: u64 = input_audio.len() as u64 / num_channels as u64;
let a: f32 = KERNEL_A as f32;
let x_floor = frame_idx as i64;
let i_start = x_floor - a as i64 + 1;
let i_end = x_floor + a as i64 + 1;
let mut the_sample: f32 = 0.0_f32;
for i in i_start..i_end {
if (i as u64) < num_input_frames {
the_sample += get(input_audio, i as usize, channel_idx, num_channels)
* lanczos_kernel(frame_idx - i as f32, a)
}
}
the_sample
}
pub fn resample(
input_frame_rate_hz: u32,
output_frame_rate_hz: u32,
num_channels: u32,
input_audio: &[f32],
) -> Result<Vec<f32>, Error> {
validate_args(input_frame_rate_hz, output_frame_rate_hz, num_channels)?;
let output_num_frames = get_num_output_frames(
input_audio,
input_frame_rate_hz,
output_frame_rate_hz,
num_channels,
);
let output_audio: Vec<f32> = (0..output_num_frames)
.flat_map(|input_frame_idx| {
(0..num_channels as usize).map(move |channel_idx| {
let output_frame_idx = (input_frame_idx as f32 * input_frame_rate_hz as f32)
/ output_frame_rate_hz as f32;
compute_sample(
input_audio,
output_frame_idx,
channel_idx,
num_channels as usize,
)
})
})
.collect();
Ok(output_audio)
} | frame_idx: f32, |
main.js | /******/ (function(modules) { // webpackBootstrap
/******/ // The module cache
/******/ var installedModules = {};
/******/
/******/ // The require function
/******/ function | (moduleId) {
/******/
/******/ // Check if module is in cache
/******/ if(installedModules[moduleId]) {
/******/ return installedModules[moduleId].exports;
/******/ }
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules[moduleId] = {
/******/ i: moduleId,
/******/ l: false,
/******/ exports: {}
/******/ };
/******/
/******/ // Execute the module function
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
/******/
/******/ // Flag the module as loaded
/******/ module.l = true;
/******/
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/
/******/
/******/ // expose the modules object (__webpack_modules__)
/******/ __webpack_require__.m = modules;
/******/
/******/ // expose the module cache
/******/ __webpack_require__.c = installedModules;
/******/
/******/ // define getter function for harmony exports
/******/ __webpack_require__.d = function(exports, name, getter) {
/******/ if(!__webpack_require__.o(exports, name)) {
/******/ Object.defineProperty(exports, name, { enumerable: true, get: getter });
/******/ }
/******/ };
/******/
/******/ // define __esModule on exports
/******/ __webpack_require__.r = function(exports) {
/******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) {
/******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
/******/ }
/******/ Object.defineProperty(exports, '__esModule', { value: true });
/******/ };
/******/
/******/ // create a fake namespace object
/******/ // mode & 1: value is a module id, require it
/******/ // mode & 2: merge all properties of value into the ns
/******/ // mode & 4: return value when already ns object
/******/ // mode & 8|1: behave like require
/******/ __webpack_require__.t = function(value, mode) {
/******/ if(mode & 1) value = __webpack_require__(value);
/******/ if(mode & 8) return value;
/******/ if((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;
/******/ var ns = Object.create(null);
/******/ __webpack_require__.r(ns);
/******/ Object.defineProperty(ns, 'default', { enumerable: true, value: value });
/******/ if(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));
/******/ return ns;
/******/ };
/******/
/******/ // getDefaultExport function for compatibility with non-harmony modules
/******/ __webpack_require__.n = function(module) {
/******/ var getter = module && module.__esModule ?
/******/ function getDefault() { return module['default']; } :
/******/ function getModuleExports() { return module; };
/******/ __webpack_require__.d(getter, 'a', getter);
/******/ return getter;
/******/ };
/******/
/******/ // Object.prototype.hasOwnProperty.call
/******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };
/******/
/******/ // __webpack_public_path__
/******/ __webpack_require__.p = "";
/******/
/******/
/******/ // Load entry module and return exports
/******/ return __webpack_require__(__webpack_require__.s = "./js/index.js");
/******/ })
/************************************************************************/
/******/ ({
/***/ "./js/index.js":
/*!*********************!*\
!*** ./js/index.js ***!
\*********************/
/*! no static exports found */
/***/ (function(module, exports) {
eval("(function () {\r\n 'use strict'\r\n\r\n fetch('https://api.myjson.com/bins/sqwaq', {\r\n method: 'GET'\r\n }).then(response => {\r\n response.json().then(data => {\r\n buildScreen(data);\r\n });\r\n }).catch(function (err) {\r\n console.error(err);\r\n });\r\n\r\n function buildScreen(data) {\r\n buildHeader(data)\r\n buildForFunArea(data)\r\n buildLobbyAndRankedArea(data)\r\n buildfooter(data)\r\n }\r\n\r\n function buildHeader(data) {\r\n console.log(data)\r\n\r\n const userName = document.querySelector('.gc-name');\r\n const userId = document.querySelector('.gc-id');\r\n const progressBar = document.querySelector('.progress-bar');\r\n\r\n let pinLevelCasual = document.querySelector('.pin-level-casual')\r\n let pinLevelCOmpetitive = document.querySelector('.pin-level-competitive')\r\n let pinLevelAmateur = document.querySelector('.pin-level-amateur')\r\n let pinLevelPro = document.querySelector('.pin-level-pro')\r\n\r\n let featuredMedal = document.querySelector('.-featured-medal')\r\n let position = document.querySelector('.-position')\r\n let patent = document.querySelector('.-patent')\r\n let level = document.querySelector('.-level')\r\n //let subscription = document.querySelector('.-subscription')\r\n\r\n userName.textContent = `GC.${data.user.name}`\r\n userId.textContent = `GC ID: ${data.user.id}`;\r\n\r\n featuredMedal.src = data.user.featured_medal.image;\r\n position.src = data.user.game_position.image;\r\n patent.src = data.user.patent;\r\n level.textContent = data.user.level;\r\n \r\n\r\n if (data.user.expertise == 'casual') {\r\n progressBar.style.cssText = \"width:25%;\"\r\n pinLevelCasual.src = 'img/pin-level-blue.png'\r\n\r\n } else if (data.user.expertise == 'competitive') {\r\n progressBar.style.cssText = \"width:50%;\"\r\n pinLevelCasual.src = 'img/pin-level-blue.png'\r\n pinLevelCOmpetitive.src = 'img/pin-level-blue.png'\r\n\r\n } else if (data.user.expertise == 'amateur') {\r\n progressBar.style.cssText = \"width:75%;\"\r\n pinLevelCasual.src = 'img/pin-level-blue.png'\r\n pinLevelCOmpetitive.src = 'img/pin-level-blue.png'\r\n pinLevelAmateur.src = 'img/pin-level-blue.png'\r\n\r\n } else if (data.user.expertise == 'pro') {\r\n progressBar.style.cssText = \"width:100%;\"\r\n pinLevelCasual.src = 'img/pin-level-blue.png'\r\n pinLevelCOmpetitive.src = 'img/pin-level-blue.png'\r\n pinLevelAmateur.src = 'img/pin-level-blue.png'\r\n pinLevelPro.src = 'img/pin-level-blue.png'\r\n } \r\n\r\n }\r\n\r\n function buildForFunArea(data) {\r\n\r\n const imageForFunTitle = document.querySelector('.image-forfun-title');\r\n const titleForFunArea = document.querySelector('.title-forfun-area');\r\n\r\n imageForFunTitle.src = \"https://gamersclub.com.br/assets/images/jogueagora/[email protected]\";\r\n titleForFunArea.textContent = data['4fun'].title;\r\n\r\n buildServersOptions(data)\r\n\r\n }\r\n\r\n function buildServersOptions(data) {\r\n\r\n let servers = data['4fun'].servers\r\n let forFunArea = document.querySelector('.forfun-area');\r\n let navigationArea = document.querySelector('.navigation-area')\r\n let countServers = 1;\r\n let countGroup = 1;\r\n let idCopy = 1;\r\n\r\n servers.forEach(element => {\r\n\r\n // <define the width size of progress bar server>\r\n const maxServer = element.max;\r\n const currentServer = element.current;\r\n const sizeProgressBar = (currentServer * 101) / maxServer\r\n // </define the width size of progress bar server>\r\n\r\n if (countServers == 1) {\r\n\r\n const navigationButton = `\r\n <label class=\"navigation-forfun\">\r\n <input type=\"radio\" class=\"reference-${countGroup}\" name=\"forfun\" id=\"navigation-${countGroup}\" ${countGroup == 1 ? 'checked=checked' : ''}\">\r\n <span></span>\r\n </label>\r\n `\r\n\r\n navigationArea.insertAdjacentHTML('beforeend', navigationButton);\r\n\r\n const groupServersArea = `\r\n <div class=\"group-quad-area reference-${countGroup} ${countGroup == 1 ? 'show':'hide'}\" id=\"group-quad-${countGroup}\">\r\n \r\n </div>\r\n `\r\n forFunArea.insertAdjacentHTML('beforeend', groupServersArea);\r\n }\r\n\r\n const groupServersArea = `\r\n <article class=\"forfun-quad\">\r\n <div class=\"flex-area w-100\">\r\n <button class=\"bt-forfun-area -btn-clipboard\" data-clipboard-id=\"${idCopy}\" data-clipboard-text=\"${element.copy}\">\r\n <img src=\"img/copy-paste-icon.png\" alt=\"Gamers Club\">\r\n </button>\r\n <a href=\"${element.join}\" class=\"bt-forfun-area -green\"> <img src=\"img/goto-icon.png\" alt=\"Gamers Club\">\r\n </a>\r\n </div>\r\n\r\n <div class=\"w-100\">\r\n <div>\r\n <p class=\"descri-forfun-area\">#${element.id} - Deathmatch <strong>${element.mode}</strong></p>\r\n </div>\r\n <div>\r\n <p class=\"title-progressbar-forfun-area\"><span class=\"map-name\">${element.map}</span> <span>${element.current}/${element.max}</span></p>\r\n <div class=\"progress-bar-forfun-area\">\r\n <span class=\"progress-bar-forfun\" style=\"width:${sizeProgressBar}%;\"></span>\r\n </div>\r\n </div>\r\n </div>\r\n </article> \r\n `\r\n\r\n //<Make Copy button works>\r\n new ClipboardJS(`[data-clipboard-id=\"${idCopy}\"]`);\r\n //</Make Copy button works>\r\n\r\n let groupArea = document.querySelector(`#group-quad-${countGroup}`);\r\n groupArea.insertAdjacentHTML('beforeend', groupServersArea);\r\n\r\n countServers++\r\n\r\n if (countServers == 4) {\r\n countServers = 1;\r\n countGroup++;\r\n }\r\n\r\n })\r\n\r\n listenNavigation()\r\n }\r\n\r\n\r\n function buildLobbyAndRankedArea(data) {\r\n\r\n let lobbyTitle = document.querySelector('.title-lobby-area')\r\n let RankedTitle = document.querySelector('.title-ranked-area')\r\n let imageLobbyTitle = document.querySelector('.image-lobby-title')\r\n let imageRankedTitle = document.querySelector('.image-ranked-title')\r\n\r\n let lobbyPlayedNumber = document.querySelector('.lobby-played-number')\r\n let lobbyVictoryNumber = document.querySelector('.lobby-victory-number')\r\n let lobbyLoseNumber = document.querySelector('.lobby-lose-number')\r\n\r\n let rankedPlayedNumber = document.querySelector('.ranked-played-number')\r\n let rankedVictoryNumber = document.querySelector('.ranked-victory-number')\r\n let rankedLoseNumber = document.querySelector('.ranked-lose-number')\r\n\r\n let buttonLobby = document.querySelector('.bt-lobby')\r\n let buttonRanked = document.querySelector('.bt-ranked')\r\n let buttonRankedNumber = document.querySelector('.users-number-area .number')\r\n\r\n\r\n lobbyTitle.textContent = data.games[0].title;\r\n imageLobbyTitle.src = data.games[0].image\r\n\r\n RankedTitle.textContent = data.games[1].title;\r\n imageRankedTitle.src = data.games[1].image\r\n\r\n lobbyPlayedNumber.textContent = data.games[0].matches\r\n lobbyVictoryNumber.textContent = data.games[0].win\r\n lobbyLoseNumber.textContent = data.games[0].lose\r\n\r\n rankedPlayedNumber.textContent = data.games[1].matches\r\n rankedVictoryNumber.textContent = data.games[1].win\r\n rankedLoseNumber.textContent = data.games[1].lose\r\n\r\n buttonLobby.href = data.games[0].cta.link\r\n buttonRanked.href = data.games[1].cta.link\r\n buttonRankedNumber.textContent = data.games[1].cta.line\r\n }\r\n\r\n function buildfooter(data) {\r\n let onlinePlayers = document.querySelector('.online-players')\r\n let bannedPlayers = document.querySelector('.banned-players')\r\n\r\n onlinePlayers.textContent = data.online\r\n bannedPlayers.textContent = data.latest_banned\r\n }\r\n\r\n})();\r\n\r\n\r\nfunction listenNavigation() {\r\n\r\n let allCheckedButtons = document.querySelectorAll('input[type=\"radio\"]');\r\n\r\n allCheckedButtons.forEach(element => {\r\n element.addEventListener('click', (event) => {\r\n let clickedNavigation = event.target\r\n\r\n if (clickedNavigation.checked) {\r\n let clickedNavigationReference = clickedNavigation.classList[0];\r\n let allGroups = document.querySelectorAll('.group-quad-area');\r\n\r\n allGroups.forEach((element) => {\r\n if (element.classList.contains(`${clickedNavigationReference}`)) {\r\n element.classList.add('show')\r\n element.classList.remove('hide')\r\n } else {\r\n element.classList.add('hide')\r\n element.classList.remove('show')\r\n }\r\n })\r\n }\r\n })\r\n })\r\n}\n\n//# sourceURL=webpack:///./js/index.js?");
/***/ })
/******/ }); | __webpack_require__ |
util.rs | use std::ffi::{OsStr, OsString};
use std::path::{Path, PathBuf};
use anyhow::anyhow;
use path_dedot::ParseDot;
use rustyline::{error::ReadlineError, Editor};
/// Formats the source to be url safe for the pull
pub fn format_source_pull(source: &Path) -> anyhow::Result<PathBuf> {
// just to through error if its a directory
get_source_file_name(source)?;
Ok(path_remove_prefix(source).parse_dot().unwrap().to_path_buf())
}
/// Formats the destination based of the source, does not need to be cleaned up for url unlike push
/// Ex: source data.txt and dest . then return data.txt
pub fn format_destination_pull(
source: &Path,
destination: &Path,
) -> anyhow::Result<PathBuf> {
let source_file_name = get_source_file_name(source)?;
let new_file_path = if path_is_file(destination) {
destination.to_path_buf()
} else {
path_with_file_name(destination, Path::new(&source_file_name))
};
Ok(new_file_path)
}
/// Formats the destination based of the source, and removes the '/', '..', or '.' prefixes
/// Ex: source data.txt and dest . then return data.txt
pub fn format_destination_push(
source: &Path,
destination: &Path,
) -> anyhow::Result<PathBuf> {
let source_file_name = get_source_file_name(source)?;
let new_file_path = if path_is_file(destination) {
path_remove_prefix(destination).parse_dot().unwrap().to_path_buf()
} else {
let fp = path_with_file_name(destination, Path::new(&source_file_name));
path_remove_prefix(&fp).parse_dot().unwrap().to_path_buf()
};
Ok(new_file_path)
}
/// Gets the file name from the source directory, returns Result of OsString or Error String
fn get_source_file_name(source: &Path) -> anyhow::Result<OsString> {
if !path_is_file(source) {
return Err(anyhow!("Source is a directory"));
};
if let Some(file_name) = source.file_name() {
Ok(file_name.to_os_string())
} else {
Err(anyhow!("Source has no file name"))
}
}
/// Checks if a generic path is pointing to a file as opposed to a directory
/// Directory is defined atm as ending with '.','..','/','*', though star is just multiple files, cant support it atm
fn path_is_file(path: &Path) -> bool {
let path_str = path.to_string_lossy();
!(path_str.ends_with('.')
|| path_str.ends_with('/')
|| path_str.ends_with('*'))
}
/// Removes the prefix from the path /, .., or .,
fn path_remove_prefix(mut path: &Path) -> PathBuf {
//TODO cleanup, seems like it could be dont better
while path.strip_prefix(".").is_ok()
|| path.strip_prefix("/").is_ok()
|| path.strip_prefix("..").is_ok()
{
path = if let Ok(new_path) = path.strip_prefix(".") {
new_path
} else {
path
};
path = if let Ok(new_path) = path.strip_prefix("..") {
new_path
} else {
path
};
path = if let Ok(new_path) = path.strip_prefix("/") {
new_path
} else {
path
};
}
path.components().collect()
}
/// Changes the file_name if the path but unlike the default method correctly handles paths ending with a .
fn path_with_file_name(path: &Path, file_name: &Path) -> PathBuf {
let parent = if let Some(p) = path.parent() {
if path_is_file(path) {
p.join(file_name)
} else {
p.join(path.file_name().unwrap_or_else(|| OsStr::new("")))
.join(file_name)
}
} else {
file_name.to_path_buf()
};
parent
}
pub fn get_confirmation(warning: &str) -> anyhow::Result<bool> {
let mut rl = Editor::<()>::new();
let prompt = format!("{}\n>> ", warning);
let readline = rl.readline(&prompt);
match readline {
Ok(line) => {
let clean_line = line.trim().to_lowercase();
if clean_line == "y" || clean_line == "yes" {
return Ok(true);
}
}
Err(ReadlineError::Interrupted) => {
println!("CTRL-C");
}
Err(ReadlineError::Eof) => {
println!("CTRL-D");
}
Err(err) => {
println!("Error: {:?}", err);
}
}
Ok(false)
}
pub fn join_dedot_path(
start: PathBuf,
end: PathBuf,
) -> anyhow::Result<PathBuf> {
// Overide dot methods cause they tend to fail
if end.to_str().is_some() && end.to_str().unwrap() == "." {
return Ok(start);
}
if end.to_str().is_some() && end.to_str().unwrap() == ".." {
return match end.parent() {
Some(p) => Ok(p.to_path_buf()),
None => Ok(PathBuf::from("/")),
};
}
if end.starts_with("/") {
match end.parse_dot() {
Ok(p) => {
Ok(PathBuf::from("/")
.join(path_remove_prefix(&p.to_path_buf())))
}
Err(_) => Ok(PathBuf::from("/")), | } else {
match start.join(end).parse_dot() {
Ok(p) => {
Ok(PathBuf::from("/")
.join(path_remove_prefix(&p.to_path_buf())))
}
Err(_) => Ok(PathBuf::from("/")),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn is_file() {
let path = Path::new("file");
assert!(path_is_file(path));
let path = Path::new("file.txt");
assert!(path_is_file(path));
let path = Path::new("file/");
assert!(!path_is_file(path));
let path = Path::new("file/.");
assert!(!path_is_file(path));
let path = Path::new("file/..");
assert!(!path_is_file(path));
let path = Path::new("file/*");
assert!(!path_is_file(path));
let path = Path::new(".");
assert!(!path_is_file(path));
let path = Path::new("/");
assert!(!path_is_file(path));
}
#[test]
fn remove_prefix() {
let path = Path::new("file");
assert_eq!(path_remove_prefix(path).to_str().unwrap(), "file");
let path = Path::new(".");
assert_eq!(path_remove_prefix(path).to_str().unwrap(), "");
let path = Path::new("./file.txt");
assert_eq!(path_remove_prefix(path).to_str().unwrap(), "file.txt");
}
#[test]
fn with_file_name() {
let path = Path::new("file");
let file_name = Path::new("file.txt");
assert_eq!(
path_with_file_name(path, file_name).to_str().unwrap(),
"file.txt"
);
let path = Path::new(".");
let file_name = Path::new("file.txt");
assert_eq!(
path_with_file_name(path, file_name).to_str().unwrap(),
"file.txt"
);
let path = Path::new("foo/bar");
let file_name = Path::new("file.txt");
assert_eq!(
path_with_file_name(path, file_name).to_str().unwrap(),
"foo/file.txt"
);
}
#[test]
fn format_push_name_both() {
let source = Path::new("source.txt");
let destination = Path::new("dest.txt");
assert_eq!(
format_destination_push(source, destination).unwrap(),
destination
);
}
#[test]
fn format_push_source_is_dir() {
let source = Path::new(".");
let destination = Path::new(".");
format_destination_push(source, destination).unwrap_err();
let source = Path::new("/ab/.");
let destination = Path::new("src/files");
format_destination_push(source, destination).unwrap_err();
let source = Path::new("/");
let destination = Path::new("src/files");
format_destination_push(source, destination).unwrap_err();
// wildcard not supported (yet)
let source = Path::new("/*");
let destination = Path::new("src/files");
format_destination_push(source, destination).unwrap_err();
}
#[test]
fn format_push_dest_is_dir() {
let source = Path::new("ab/test.txt");
let destination = Path::new(".");
assert_eq!(
format_destination_push(source, destination)
.unwrap()
.to_str()
.unwrap(),
"test.txt"
);
let source = Path::new("ab/test.txt");
let destination = Path::new("/file/to/.");
assert_eq!(
format_destination_push(source, destination)
.unwrap()
.to_str()
.unwrap(),
"file/to/test.txt"
);
let source = Path::new("/root/test.txt");
let destination = Path::new("file/to/.");
assert_eq!(
format_destination_push(source, destination)
.unwrap()
.to_str()
.unwrap(),
"file/to/test.txt"
);
let source = Path::new("/root/test.txt");
let destination = Path::new("file/to/");
assert_eq!(
format_destination_push(source, destination)
.unwrap()
.to_str()
.unwrap(),
"file/to/test.txt"
);
// TODO maybe handle this case but also its invalid syntax
let source = Path::new("/root/test.txt");
let destination = Path::new("file/to/*");
assert_ne!(
format_destination_push(source, destination)
.unwrap()
.to_str()
.unwrap(),
"file/to/test.txt"
);
}
#[test]
fn format_push_dest_is_file() {
let source = Path::new("/root/test.txt");
let destination = Path::new("/file/to/bar.txt");
assert_eq!(
format_destination_push(source, destination)
.unwrap()
.to_str()
.unwrap(),
"file/to/bar.txt"
);
let source = Path::new("/root/test.txt");
let destination = Path::new("./file/to/bar.txt");
assert_eq!(
format_destination_push(source, destination)
.unwrap()
.to_str()
.unwrap(),
"file/to/bar.txt"
);
}
#[test]
fn format_push_dedot() {
let source = Path::new("/root/../test.txt");
let destination = Path::new("/file/to/../bar.txt");
assert_eq!(
format_destination_push(source, destination)
.unwrap()
.to_str()
.unwrap(),
"file/bar.txt"
);
let source = Path::new("/root/test.txt");
let destination = Path::new("../../file/to/bar.txt");
assert_eq!(
format_destination_push(source, destination)
.unwrap()
.to_str()
.unwrap(),
"file/to/bar.txt"
);
}
#[test]
fn format_dest_pull() {
let source = Path::new("/ab/test.txt");
let destination = Path::new("foo/bar/bar.txt");
assert_eq!(
format_destination_pull(source, destination)
.unwrap()
.to_str()
.unwrap(),
"foo/bar/bar.txt"
);
let source = Path::new("/ab/test.txt");
let destination = Path::new("foo/bar/.");
assert_eq!(
format_destination_pull(source, destination)
.unwrap()
.to_str()
.unwrap(),
"foo/bar/test.txt"
);
}
#[test]
fn format_src_pull() {
let source = Path::new("/ab/.");
format_source_pull(source).unwrap_err();
let source = Path::new(".././..//foo/bar/test.txt");
assert_eq!(
format_source_pull(source).unwrap().to_str().unwrap(),
"foo/bar/test.txt"
);
}
#[test]
fn default_path_dedot_join() {
let base = PathBuf::from("/");
let end = PathBuf::from("//");
assert_eq!(join_dedot_path(base, end).unwrap().to_str().unwrap(), "/");
let base = PathBuf::from("/");
let end = PathBuf::from("//////test/");
assert_eq!(
join_dedot_path(base, end).unwrap().to_str().unwrap(),
"/test"
);
let base = PathBuf::from("/");
let end = PathBuf::from("....///..///test/");
assert_eq!(
join_dedot_path(base, end).unwrap().to_str().unwrap(),
"/test"
);
let base = PathBuf::from("/");
let end = PathBuf::from("/../");
assert_eq!(join_dedot_path(base, end).unwrap().to_str().unwrap(), "/");
}
#[test]
fn weird_middle_path_dedot_join() {
let base = PathBuf::from("/");
let end = PathBuf::from("/../");
assert_eq!(join_dedot_path(base, end).unwrap().to_str().unwrap(), "/");
let base = PathBuf::from("/");
let end = PathBuf::from("foo//bar/");
assert_eq!(
join_dedot_path(base, end).unwrap().to_str().unwrap(),
"/foo/bar"
);
}
} | } |
aws_infrastructure_access_roles_resource_json.go | /*
Copyright (c) 2020 Red Hat, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// IMPORTANT: This file has been generated automatically, refrain from modifying it manually as all
// your changes will be lost when the file is generated again.
package v1 // github.com/openshift-online/ocm-sdk-go/clustersmgmt/v1
import (
"io"
"net/http"
"github.com/openshift-online/ocm-sdk-go/helpers"
)
func readAWSInfrastructureAccessRolesListRequest(request *AWSInfrastructureAccessRolesListServerRequest, r *http.Request) error {
var err error
query := r.URL.Query()
request.order, err = helpers.ParseString(query, "order")
if err != nil {
return err
}
request.page, err = helpers.ParseInteger(query, "page")
if err != nil {
return err
}
if request.page == nil {
request.page = helpers.NewInteger(1)
}
request.search, err = helpers.ParseString(query, "search")
if err != nil {
return err
}
request.size, err = helpers.ParseInteger(query, "size")
if err != nil {
return err
}
if request.size == nil {
request.size = helpers.NewInteger(100)
}
return nil
}
func writeAWSInfrastructureAccessRolesListRequest(request *AWSInfrastructureAccessRolesListRequest, writer io.Writer) error {
return nil
}
func readAWSInfrastructureAccessRolesListResponse(response *AWSInfrastructureAccessRolesListResponse, reader io.Reader) error {
iterator, err := helpers.NewIterator(reader)
if err != nil {
return err
}
for {
field := iterator.ReadObject()
if field == "" {
break
}
switch field {
case "page":
value := iterator.ReadInt()
response.page = &value
case "size":
value := iterator.ReadInt()
response.size = &value
case "total":
value := iterator.ReadInt()
response.total = &value
case "items":
items := readAWSInfrastructureAccessRoleList(iterator)
response.items = &AWSInfrastructureAccessRoleList{
items: items,
}
default:
iterator.ReadAny()
}
}
return iterator.Error
}
func writeAWSInfrastructureAccessRolesListResponse(response *AWSInfrastructureAccessRolesListServerResponse, w http.ResponseWriter) error {
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(response.status)
stream := helpers.NewStream(w)
stream.WriteObjectStart()
stream.WriteObjectField("kind")
count := 1
stream.WriteString(AWSInfrastructureAccessRoleListKind)
if response.items != nil && response.items.href != "" {
stream.WriteMore()
stream.WriteObjectField("href")
stream.WriteString(response.items.href)
count++
}
if response.page != nil {
if count > 0 {
stream.WriteMore()
}
stream.WriteObjectField("page")
stream.WriteInt(*response.page)
count++
}
if response.size != nil {
if count > 0 {
stream.WriteMore()
}
stream.WriteObjectField("size")
stream.WriteInt(*response.size)
count++
}
if response.total != nil {
if count > 0 {
stream.WriteMore()
}
stream.WriteObjectField("total")
stream.WriteInt(*response.total)
count++
}
if response.items != nil {
if response.items.items != nil {
if count > 0 {
stream.WriteMore()
}
stream.WriteObjectField("items")
writeAWSInfrastructureAccessRoleList(response.items.items, stream)
count++
}
} | stream.Flush()
return stream.Error
} | stream.WriteObjectEnd() |
device_adsl.rs | // This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files
// DO NOT EDIT
use crate::Device;
use crate::Object;
use glib::object::ObjectType as ObjectType_;
use glib::signal::connect_raw;
use glib::signal::SignalHandlerId;
use glib::translate::*;
use std::boxed::Box as Box_;
use std::fmt;
use std::mem::transmute;
glib::wrapper! {
#[doc(alias = "NMDeviceAdsl")]
pub struct DeviceAdsl(Object<ffi::NMDeviceAdsl, ffi::NMDeviceAdslClass>) @extends Device, Object;
match fn {
type_ => || ffi::nm_device_adsl_get_type(),
}
}
impl DeviceAdsl {
/// Whether the device has carrier.
///
/// # Returns
///
/// [`true`] if the device has carrier
#[doc(alias = "nm_device_adsl_get_carrier")]
#[doc(alias = "get_carrier")]
pub fn is_carrier(&self) -> bool {
unsafe { from_glib(ffi::nm_device_adsl_get_carrier(self.to_glib_none().0)) }
}
#[doc(alias = "carrier")]
pub fn connect_carrier_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_carrier_trampoline<F: Fn(&DeviceAdsl) + 'static>(
this: *mut ffi::NMDeviceAdsl,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) {
let f: &F = &*(f as *const F);
f(&from_glib_borrow(this))
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::carrier\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_carrier_trampoline::<F> as *const (),
)),
Box_::into_raw(f),
)
}
}
}
impl fmt::Display for DeviceAdsl {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result |
}
| {
f.write_str("DeviceAdsl")
} |
BatchEnv.py | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: MIT-0
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this
# software and associated documentation files (the "Software"), to deal in the Software
# without restriction, including without limitation the rights to use, copy, modify,
# merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
# PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from aws_cdk import (
core,
aws_batch,
aws_ec2,
aws_events,
aws_events_targets,
aws_ssm,
aws_ecr,
aws_ecs
)
class BatchEnvironment(core.Stack):
def __init__(self, scope: core.Construct, id: str, namespace, ** kwargs) -> None:
super().__init__(scope, id, **kwargs)
# get the default VPC
default_vpc = aws_ec2.Vpc(self, "VPC")
# create a Batch compute environment with some default settings
compute_environment = aws_batch.ComputeEnvironment(
self, "Batch",
compute_environment_name="MyComputeEnvironment",
managed=True,
compute_resources=aws_batch.ComputeResources(
vpc=default_vpc,
type=aws_batch.ComputeResourceType.SPOT,
bid_percentage=100,
allocation_strategy=aws_batch.AllocationStrategy.SPOT_CAPACITY_OPTIMIZED,
maxv_cpus=256,
minv_cpus=0,
desiredv_cpus=0,
compute_resources_tags=core.Tags.of(self).add('Name', 'BatchComputeInstance')
))
# create the job queue and associate it to the CE just created
job_queue = aws_batch.JobQueue(self, "JobQueue",
job_queue_name="MyJobQueue",
compute_environments=[aws_batch.JobQueueComputeEnvironment(
compute_environment=compute_environment,
order=1,)],
enabled=True,
priority=1)
# get ecr repository project name
ecr_repository_name = aws_ssm.StringParameter.value_for_string_parameter(
self, f"{namespace}-ecrrepository")
# get build project reference
ecr_repository = aws_ecr.Repository.from_repository_name(
self, id=f"ecr-repo-name-{id}", repository_name=ecr_repository_name)
job_definition = aws_batch.JobDefinition(self, "JobDefinition",
job_definition_name="MyJobDefinition",
container={
"image": aws_ecs.EcrImage(ecr_repository, "latest"),
"vcpus": 4,
"memory_limit_mib": 256
})
# create an events pattern triggered on new image push on the ecr repository
event_pattern = aws_events.EventPattern(
detail_type=['ECR Image Action'],
detail={
"result": ["SUCCESS"],
"action-type": ["PUSH"],
"image-tag": ["latest"],
"repository-name": [ecr_repository_name]
}
)
ecr_batch_trigger_rule = aws_events.Rule(
self, "ECR to Batch Rule",
description="Trigger a Batch job on push to ECR",
event_pattern=event_pattern,
targets=[aws_events_targets.BatchJob(
job_queue_arn=job_queue.job_queue_arn,
job_queue_scope=job_queue,
job_definition_arn=job_definition.job_definition_arn,
job_definition_scope=job_definition
)])
| core.CfnOutput(
self, "JobQueueName",
description=f"Job Queue name {namespace}",
value=job_queue.job_queue_name,
)
core.CfnOutput(
self, "JobDefinitionName",
description=f"Job definition name {namespace}",
value=job_definition.job_definition_name
)
# self.output_props['batch_job_queue'] = job_queue
# self.output_props['batch_job_def'] = job_definition
# # pass objects to another stack
# @property
# def outputs(self):
# return self.output_props | # outputs |
test_rffi_platform.py | import py, sys, struct
from rpython.rtyper.tool import rffi_platform
from rpython.rtyper.lltypesystem import lltype
from rpython.rtyper.lltypesystem import rffi
from rpython.tool.udir import udir
from rpython.translator.tool.cbuild import ExternalCompilationInfo
from rpython.translator.platform import platform
from rpython.rlib.rarithmetic import r_uint, r_longlong, r_ulonglong
from rpython.rlib.rfloat import isnan
def import_ctypes():
try:
import ctypes
except ImportError:
py.test.skip("this test requires ctypes")
return ctypes
def test_dirent():
dirent = rffi_platform.getstruct("struct dirent",
"""
struct dirent /* for this example only, not the exact dirent */
{
long d_ino;
int d_off;
unsigned short d_reclen;
char d_name[32];
};
""",
[("d_reclen", rffi.USHORT)])
assert isinstance(dirent, lltype.Struct)
# check that we have the desired field
assert dirent.c_d_reclen is rffi.USHORT
ctypes = import_ctypes()
class CTypesDirent(ctypes.Structure):
_fields_ = [('d_ino', ctypes.c_long),
('d_off', ctypes.c_int),
('d_reclen', ctypes.c_ushort),
('d_name', ctypes.c_char * 32)]
assert dirent._hints['size'] == ctypes.sizeof(CTypesDirent)
def test_fit_type():
S = rffi_platform.getstruct("struct S",
"""
struct S {
signed char c;
unsigned char uc;
short s;
unsigned short us;
int i;
unsigned int ui;
long l;
unsigned long ul;
long long ll;
unsigned long long ull;
double d;
};
""",
[("c", rffi.INT),
("uc", rffi.INT),
("s", rffi.UINT), | ("us", rffi.INT),
("i", rffi.INT),
("ui", rffi.INT),
("l", rffi.INT),
("ul", rffi.INT),
("ll", rffi.INT),
("ull", rffi.INT),
("d", rffi.DOUBLE)])
# XXX we need to have a float here as well as soon as we'll
# have support
assert isinstance(S, lltype.Struct)
assert S.c_c == rffi.SIGNEDCHAR
assert S.c_uc == rffi.UCHAR
assert S.c_s == rffi.SHORT
assert S.c_us == rffi.USHORT
assert S.c_i == rffi.INT
assert S.c_ui == rffi.UINT
assert S.c_l == rffi.LONG
assert S.c_ul == rffi.ULONG
assert S.c_ll == rffi.LONGLONG
assert S.c_ull == rffi.ULONGLONG
assert S.c_d == rffi.DOUBLE
def test_simple_type():
ctype = rffi_platform.getsimpletype('test_t',
'typedef unsigned short test_t;',
rffi.INT)
assert ctype == rffi.USHORT
def test_constant_integer():
value = rffi_platform.getconstantinteger('BLAH',
'#define BLAH (6*7)')
assert value == 42
value = rffi_platform.getconstantinteger('BLAH',
'#define BLAH (-2147483648LL)')
assert value == -2147483648
value = rffi_platform.getconstantinteger('BLAH',
'#define BLAH (3333333333ULL)')
assert value == 3333333333
def test_defined():
res = rffi_platform.getdefined('ALFKJLKJFLKJFKLEJDLKEWMECEE', '')
assert not res
res = rffi_platform.getdefined('ALFKJLKJFLKJFKLEJDLKEWMECEE',
'#define ALFKJLKJFLKJFKLEJDLKEWMECEE')
assert res
def test_defined_constant():
res = rffi_platform.getdefineddouble('ABCDFGH', '#define ABCDFGH 2.0')
assert res == 2.0
res = rffi_platform.getdefinedinteger('ABCDFGH', '#define ABCDFGH 2')
assert res == 2
def test_defined_constant_float():
value = rffi_platform.getdefineddouble('BLAH', '#define BLAH 1.0')
assert value == 1.0
value = rffi_platform.getdefineddouble('BLAH', '#define BLAH 1.5')
assert value == 1.5
value = rffi_platform.getdefineddouble('BLAH', '#define BLAH 1.0e20')
assert value == 1.0e20
value = rffi_platform.getdefineddouble('BLAH', '#define BLAH 1.0e50000')
assert value == float("inf")
value = rffi_platform.getdefineddouble('BLAH', '#define BLAH (double)0/0')
assert isnan(value)
def test_getintegerfunctionresult():
func = 'int sum(int a, int b) {return a + b;}'
value = rffi_platform.getintegerfunctionresult('sum', [6, 7], func)
assert value == 13
value = rffi_platform.getintegerfunctionresult('lround', [6.7],
'#include <math.h>')
assert value == 7
value = rffi_platform.getintegerfunctionresult('lround', [9.1],
includes=['math.h'])
assert value == 9
def test_configure():
test_h = udir.join('test_ctypes_platform.h')
test_h.write('#define XYZZY 42\n')
class CConfig:
_compilation_info_ = ExternalCompilationInfo(
pre_include_bits = ["/* a C comment */",
"#include <stdio.h>",
"#include <test_ctypes_platform.h>"],
include_dirs = [str(udir)]
)
FILE = rffi_platform.Struct('FILE', [])
ushort = rffi_platform.SimpleType('unsigned short')
XYZZY = rffi_platform.ConstantInteger('XYZZY')
res = rffi_platform.configure(CConfig)
assert isinstance(res['FILE'], lltype.Struct)
assert res == {'FILE': res['FILE'],
'ushort': rffi.USHORT,
'XYZZY': 42}
def test_integer_function_result():
class CConfig:
_compilation_info_ = ExternalCompilationInfo(
pre_include_bits = ["""int sum(int a, int b){ return a+b;}"""],
)
SUM = rffi_platform.IntegerFunctionResult('sum', [12, 34])
SUM2 = rffi_platform.IntegerFunctionResult('sum', [-12, -34])
res = rffi_platform.configure(CConfig)
assert res['SUM'] == 46
assert res['SUM2'] == -46
def test_ifdef():
class CConfig:
_compilation_info_ = ExternalCompilationInfo(
post_include_bits = ['/* a C comment */',
'#define XYZZY 42',
'typedef int foo;',
'''
struct s {
int i;
double f;
};
'''])
s = rffi_platform.Struct('struct s', [('i', rffi.INT)],
ifdef='XYZZY')
z = rffi_platform.Struct('struct z', [('i', rffi.INT)],
ifdef='FOOBAR')
foo = rffi_platform.SimpleType('foo', ifdef='XYZZY')
bar = rffi_platform.SimpleType('bar', ifdef='FOOBAR')
res = rffi_platform.configure(CConfig)
assert res['s'] is not None
assert res['z'] is None
assert res['foo'] is not None
assert res['bar'] is None
def test_nested_structs():
class CConfig:
_compilation_info_ = ExternalCompilationInfo(
post_include_bits=["""
struct x {
int foo;
unsigned long bar;
};
struct y {
char c;
struct x x;
};
"""])
x = rffi_platform.Struct("struct x", [("bar", rffi.SHORT)])
y = rffi_platform.Struct("struct y", [("x", x)])
res = rffi_platform.configure(CConfig)
c_x = res["x"]
c_y = res["y"]
assert isinstance(c_x, lltype.Struct)
assert isinstance(c_y, lltype.Struct)
assert c_y.c_x is c_x
def test_nested_structs_in_the_opposite_order():
class CConfig:
_compilation_info_ = ExternalCompilationInfo(
post_include_bits=["""
struct y {
int foo;
unsigned long bar;
};
struct x {
char c;
struct y y;
};
"""])
y = rffi_platform.Struct("struct y", [("bar", rffi.SHORT)])
x = rffi_platform.Struct("struct x", [("y", y)])
res = rffi_platform.configure(CConfig)
c_x = res["x"]
c_y = res["y"]
assert isinstance(c_x, lltype.Struct)
assert isinstance(c_y, lltype.Struct)
assert c_x.c_y is c_y
def test_array():
dirent = rffi_platform.getstruct("struct dirent",
"""
struct dirent /* for this example only, not the exact dirent */
{
long d_ino;
int d_off;
unsigned short d_reclen;
char d_name[32];
};
""",
[("d_name", lltype.FixedSizeArray(rffi.CHAR, 1))])
assert dirent.c_d_name.length == 32
def test_has():
assert rffi_platform.has("x", "int x = 3;")
assert not rffi_platform.has("x", "")
# has() should also not crash if it is given an invalid #include
assert not rffi_platform.has("x", "#include <some/path/which/cannot/exist>")
def test_verify_eci():
eci = ExternalCompilationInfo()
rffi_platform.verify_eci(eci)
eci = ExternalCompilationInfo(libraries=['some_name_that_doesnt_exist_'])
py.test.raises(rffi_platform.CompilationError,
rffi_platform.verify_eci, eci)
def test_sizeof():
assert rffi_platform.sizeof("char", ExternalCompilationInfo()) == 1
def test_memory_alignment():
a = rffi_platform.memory_alignment()
print a
assert a % struct.calcsize("P") == 0
def test_external_lib():
# XXX this one seems to be a bit too platform-specific. Check
# how to test it on windows correctly (using so_prefix?)
# and what are alternatives to LD_LIBRARY_PATH
eci = ExternalCompilationInfo()
c_source = """
int f(int a, int b)
{
return (a + b);
}
"""
tmpdir = udir.join('external_lib').ensure(dir=1)
c_file = tmpdir.join('libc_lib.c')
c_file.write(c_source)
l = platform.compile([c_file], eci, standalone=False)
eci = ExternalCompilationInfo(
libraries = ['c_lib'],
library_dirs = [str(tmpdir)]
)
rffi_platform.verify_eci(eci)
def test_generate_padding():
# 'padding_drop' is a bit strange, but is what we need to write C code
# that defines prebuilt structures of that type. Normally, the C
# backend would generate '0' entries for every field c__pad#. That's
# usually much more than the number of real fields in the real structure
# definition. So 'padding_drop' allows a quick fix: it lists fields
# that should be ignored by the C backend. It should only be used in
# that situation because it lists some of the c__pad# fields a bit
# randomly -- to the effect that writing '0' for the other fields gives
# the right amount of '0's.
S = rffi_platform.getstruct("foobar_t", """
typedef struct {
char c1; /* followed by one byte of padding */
short s1;
} foobar_t;
""", [("c1", lltype.Signed),
("s1", lltype.Signed)])
assert S._hints['padding'] == ('c__pad0',)
d = {'c_c1': 'char', 'c_s1': 'short'}
assert S._hints['get_padding_drop'](d) == ['c__pad0']
#
S = rffi_platform.getstruct("foobar_t", """
typedef struct {
char c1;
char c2; /* _pad0 */
short s1;
} foobar_t;
""", [("c1", lltype.Signed),
("s1", lltype.Signed)])
assert S._hints['padding'] == ('c__pad0',)
d = {'c_c1': 'char', 'c_s1': 'short'}
assert S._hints['get_padding_drop'](d) == []
#
S = rffi_platform.getstruct("foobar_t", """
typedef struct {
char c1;
char c2; /* _pad0 */
/* _pad1, _pad2 */
int i1;
} foobar_t;
""", [("c1", lltype.Signed),
("i1", lltype.Signed)])
assert S._hints['padding'] == ('c__pad0', 'c__pad1', 'c__pad2')
d = {'c_c1': 'char', 'c_i1': 'int'}
assert S._hints['get_padding_drop'](d) == ['c__pad1', 'c__pad2']
#
S = rffi_platform.getstruct("foobar_t", """
typedef struct {
char c1;
char c2; /* _pad0 */
char c3; /* _pad1 */
/* _pad2 */
int i1;
} foobar_t;
""", [("c1", lltype.Signed),
("i1", lltype.Signed)])
assert S._hints['padding'] == ('c__pad0', 'c__pad1', 'c__pad2')
d = {'c_c1': 'char', 'c_i1': 'int'}
assert S._hints['get_padding_drop'](d) == ['c__pad2']
#
S = rffi_platform.getstruct("foobar_t", """
typedef struct {
char c1;
/* _pad0 */
short s1; /* _pad1, _pad2 */
int i1;
} foobar_t;
""", [("c1", lltype.Signed),
("i1", lltype.Signed)])
assert S._hints['padding'] == ('c__pad0', 'c__pad1', 'c__pad2')
d = {'c_c1': 'char', 'c_i1': 'int'}
assert S._hints['get_padding_drop'](d) == ['c__pad1', 'c__pad2']
#
S = rffi_platform.getstruct("foobar_t", """
typedef struct {
char c1;
char c2; /* _pad0 */
/* _pad1, _pad2 */
int i1;
char c3; /* _pad3 */
/* _pad4 */
short s1;
} foobar_t;
""", [("c1", lltype.Signed),
("i1", lltype.Signed),
("s1", lltype.Signed)])
assert S._hints['padding'] == ('c__pad0', 'c__pad1', 'c__pad2',
'c__pad3', 'c__pad4')
d = {'c_c1': 'char', 'c_i1': 'int', 'c_s1': 'short'}
assert S._hints['get_padding_drop'](d) == ['c__pad1', 'c__pad2', 'c__pad4']
#
S = rffi_platform.getstruct("foobar_t", """
typedef struct {
char c1;
long l2; /* some number of _pads */
} foobar_t;
""", [("c1", lltype.Signed)])
padding = list(S._hints['padding'])
d = {'c_c1': 'char'}
assert S._hints['get_padding_drop'](d) == padding
def test_expose_value_as_rpython():
def get(x):
x = rffi_platform.expose_value_as_rpython(x)
return (x, type(x))
assert get(5) == (5, int)
assert get(-82) == (-82, int)
assert get(sys.maxint) == (sys.maxint, int)
assert get(sys.maxint+1) == (sys.maxint+1, r_uint)
if sys.maxint == 2147483647:
assert get(9999999999) == (9999999999, r_longlong)
assert get(-9999999999) == (-9999999999, r_longlong)
assert get(2**63) == (2**63, r_ulonglong)
assert get(-2**63) == (-2**63, r_longlong)
py.test.raises(OverflowError, get, -2**63-1)
py.test.raises(OverflowError, get, 2**64) | |
empatica.py | """
Empatica E4 is a wearable device that offers real-time physiological data
acquisition such as blood volume pulse, electrodermal activity (EDA), heart
rate, interbeat intervals, 3-axis acceleration and skin temperature.
"""
import os
import random
import numpy as np
import pandas as pd
class EmpaticaReader:
"""
Read, timeshift and write data generated by Empatica E4.
Attributes
----------
start_times : dict
Contain the timestamp of the first measurement for all
measured signals (BVP, ACC, etc.).
sample_freqs : dict ]
Contain the sampling frequencies of all measured signals
in Hz.
IBI : pandas.DataFrame
Contain inter-beat interval data. The column
"seconds_since_start" is the time in seconds between the start of
measurements and the column "IBI" is the duration in seconds between
consecutive beats.
ACC : pandas.DataFrame
Contain the data measured with the onboard MEMS type
3-axis accelerometer, indexed by time of measurement.
BVP : pandas.DataFrame
Contain blood volume pulse data, indexed by time of
measurement.
EDA : pandas.DataFrame
Contain data captured from the electrodermal activity
sensor, indexed by time of measurement.
HR : pandas.DataFrame
Contain heart rate data, indexed by time of
measurement.
TEMP : pandas.DataFrame
Contain temperature data, indexed by time of
measurement.
data : pandas.DataFrame
Joined dataframe of the ACC, BVP, EDA, HR and TEMP
dataframes (see above). May contain NaN values because sampling
frequencies differ across signals.
"""
def __init__(self, path):
"""
Parse the csv files located in the specified directory into dataframes.
Parameters
----------
path : str
Path of the directory that contains the individual signal csv
files. The files must be named ACC.csv, BVP.csv, EDA.csv, HR.csv,
IBI.csv and TEMP.csv. If present, the file tags.csv is also read.
"""
self.start_times = {}
self.sample_freqs = {}
files = [f for f in os.listdir(path) if os.path.isfile(os.path.join(path, f))]
if files is None:
print('Empty directory. Nothing to read.')
return None
self.ACC = self._read_signal(os.path.join(path, 'ACC.csv'), 'ACC', col_names=['X', 'Y', 'Z'])
self.BVP = self._read_signal(os.path.join(path, 'BVP.csv'), 'BVP')
self.EDA = self._read_signal(os.path.join(path, 'EDA.csv'), 'EDA')
self.HR = self._read_signal(os.path.join(path, 'HR.csv'), 'HR')
self.TEMP = self._read_signal(os.path.join(path, 'TEMP.csv'), 'TEMP')
self.IBI = self._read_ibi(os.path.join(path, 'IBI.csv'))
self.tags = self._read_tags(os.path.join(path, 'tags.csv'))
self.data = self._get_joined_dataframe()
def write(self, dir_path):
"""
Write the signal dataframes back to individual csv files formatted the
same way as they were read.
Parameters
----------
path : str
Path of the directory in which the csv files are created.
If the directory exists, the csv files are written using writing mode 'w'
ignoring other files in the directory.
If the directory doe not exist, it will be created.
"""
if not os.path.exists(dir_path):
os.mkdir(dir_path)
if self.ACC is not None:
self._write_signal(os.path.join(dir_path, 'ACC.csv'), self.ACC, 'ACC')
if self.BVP is not None:
self._write_signal(os.path.join(dir_path, 'BVP.csv'), self.BVP, 'BVP')
if self.EDA is not None:
self._write_signal(os.path.join(dir_path, 'EDA.csv'), self.EDA, 'EDA')
if self.HR is not None:
self._write_signal(os.path.join(dir_path, 'HR.csv'), self.HR, 'HR')
if self.TEMP is not None:
self._write_signal(os.path.join(dir_path, 'TEMP.csv'), self.TEMP, 'TEMP')
if self.IBI is not None:
self._write_ibi(os.path.join(dir_path, 'IBI.csv'))
if self.tags is not None:
self._write_tags(os.path.join(dir_path, 'tags.csv'))
def _read_signal(self, path, signal_name, col_names=None):
|
def _write_signal(self, path, dataframe, signal_name):
n_cols = len(dataframe.columns) if isinstance(dataframe, pd.DataFrame) else 1
meta = np.array([[self.start_times[signal_name].value / 1e9] * n_cols,
[self.sample_freqs[signal_name]] * n_cols])
with open(path, 'w') as file:
np.savetxt(file, meta, fmt='%s', delimiter=', ', newline='\n')
dataframe.to_csv(file, index=None, header=None, line_terminator='\n')
def _read_ibi(self, path):
try:
if os.stat(path).st_size > 0:
with open(path, 'r') as file:
start_time = pd.Timestamp(float(file.readline().split(',')[0]), unit='s')
self.start_times['IBI'] = start_time
df = pd.read_csv(file, names=['time', 'IBI'], header=None)
df['time'] = pd.to_timedelta(df['time'], unit='s')
df['time'] = start_time + df['time']
return df.set_index('time')
else:
print(f"Not reading signal because the file {path} is empty.")
except OSError:
print(f"Not reading signal because the file {path} does not exist.")
return None
def _write_ibi(self, path):
with open(path, 'w') as file:
file.write(f"{self.start_times['IBI'].value // 1e9}, IBI\n")
write_df = self.IBI.copy()
write_df.index = (write_df.index - self.start_times['IBI']).values.astype(int) / 1e9
write_df.to_csv(file, header=None, line_terminator='\n')
def _read_tags(self, path):
try:
if os.stat(path).st_size > 0:
return pd.read_csv(path, header=None,
parse_dates=[0],
date_parser=lambda x : pd.to_datetime(x, unit='s'),
names=['tags'],
squeeze=True)
else:
print(f"Not reading tags because the file {path} is empty.")
except OSError:
print(f"Not reading tags because the file {path} does not exist.")
return None
def _write_tags(self, path):
if self.tags is not None:
tags_write_series = self.tags.map(lambda x: x.value / 1e9)
tags_write_series.to_csv(path, header=None, index=None, line_terminator='\n')
def timeshift(self, shift='random'):
"""
Timeshift all time related columns as well as the starting_times dict.
Parameters
----------
shift : None/'random', pd.Timestamp or pd.Timedelta
If shift is not specified, shifts the data by a random time interval
between one month and two years to the past.
If shift is a timdelta, adds that timedelta to all time-related attributes.
If shift is a timestamp, shifts the data such that the earliest entry
has that timestamp. The remaining values will mantain the same
time difference to the first entry.
"""
if shift == 'random':
one_month = pd.Timedelta('- 30 days').value
two_years = pd.Timedelta('- 730 days').value
random_timedelta = pd.Timedelta(random.uniform(one_month, two_years))
self.timeshift(random_timedelta)
dataframes = []
variables = [self.ACC, self.BVP, self.EDA,
self.HR, self.TEMP, self.data]
for variable in variables:
if variable is not None:
dataframes.append(variable)
if isinstance(shift, pd.Timestamp):
min_start_time = min(self.start_times.values())
new_start_times = dict()
for signal_name, start_time in self.start_times.items():
new_start_times[signal_name] = shift + (start_time - min_start_time)
self.start_times = new_start_times
if self.tags is not None:
timedeltas = self.tags - self.tags.min()
self.tags = shift + timedeltas
for dataframe in dataframes:
timedeltas = dataframe.index - dataframe.index.min()
dataframe.index = shift + timedeltas
if isinstance(shift, pd.Timedelta):
for signal_name in self.start_times:
self.start_times[signal_name] += shift
if self.tags is not None:
self.tags += shift
for dataframe in dataframes:
dataframe.index += shift
def _get_joined_dataframe(self):
dataframes = []
variables = [self.ACC, self.BVP, self.EDA,
self.HR, self.TEMP]
for variable in variables:
if variable is not None:
dataframes.append(variable)
if not dataframes:
print('No joined dataframe possible due to lack of data.')
return None
joined_idx = pd.concat([pd.Series(dataframe.index) for dataframe in dataframes])
joined_idx = pd.Index(joined_idx.drop_duplicates().sort_values())
joined_dataframe = pd.DataFrame(index=joined_idx)
if self.ACC is not None:
joined_dataframe.loc[self.ACC.index, 'ACC_X'] = self.ACC['X']
joined_dataframe.loc[self.ACC.index, 'ACC_Y'] = self.ACC['Y']
joined_dataframe.loc[self.ACC.index, 'ACC_Z'] = self.ACC['Z']
if self.BVP is not None:
joined_dataframe.loc[self.BVP.index, 'BVP'] = self.BVP
if self.EDA is not None:
joined_dataframe.loc[self.EDA.index, 'EDA'] = self.EDA
if self.HR is not None:
joined_dataframe.loc[self.HR.index, 'HR'] = self.HR
if self.TEMP is not None:
joined_dataframe.loc[self.TEMP.index, 'TEMP'] = self.TEMP
return joined_dataframe
| try:
if os.stat(path).st_size > 0:
with open(path, 'r') as file:
start_time_str = file.readline().split(', ')[0]
self.start_times[signal_name] = pd.Timestamp(float(start_time_str), unit='s')
sample_freq_str = file.readline().split(', ')[0]
self.sample_freqs[signal_name] = float(sample_freq_str)
col_names = [signal_name] if col_names is None else col_names
dataframe = pd.read_csv(file, header=None, names=col_names)
dataframe.index = pd.date_range(
start=self.start_times[signal_name],
freq=f"{1 / self.sample_freqs[signal_name]}S",
periods=len(dataframe))
if col_names is not None:
dataframe.rename(dict(enumerate(col_names)), inplace=True)
else:
dataframe.rename({0: signal_name}, inplace=True)
return dataframe.squeeze()
else:
print(f"Not reading signal because the file {path} is empty.")
except OSError:
print(f"Not reading signal because the file {path} does not exist.")
return None |
client.go | package main
import (
"bufio"
"context"
"io"
"log"
"os"
"google.golang.org/grpc"
proto "grpcflow/chat" // 根据proto文件自动生成的代码
)
func main() {
// 创建连接
| onn, err := grpc.Dial("localhost:3000", grpc.WithInsecure())
if err != nil {
log.Printf("连接失败: [%v]\n", err)
return
}
defer conn.Close()
// 声明客户端
client := proto.NewChatClient(conn)
// 声明 context
ctx := context.Background()
// 创建双向数据流
stream, err := client.BidStream(ctx)
if err != nil {
log.Printf("创建数据流失败: [%v]\n", err)
}
// 启动一个 goroutine 接收命令行输入的指令
go func() {
log.Println("请输入消息...")
输入 := bufio.NewReader(os.Stdin)
for {
// 获取 命令行输入的字符串, 以回车 \n 作为结束标志
命令行输入的字符串, _ := 输入.ReadString('\n')
// 向服务端发送 指令
if err := stream.Send(&proto.Request{Input: 命令行输入的字符串}); err != nil {
return
}
}
}()
for {
// 接收从 服务端返回的数据流
响应, err := stream.Recv()
if err == io.EOF {
log.Println("⚠️ 收到服务端的结束信号")
break //如果收到结束信号,则退出“接收循环”,结束客户端程序
}
if err != nil {
// TODO: 处理接收错误
log.Println("接收数据出错:", err)
}
// 没有错误的情况下,打印来自服务端的消息
log.Printf("[客户端收到]: %s", 响应.Output)
}
} | c |
clean_end_regions.rs | // Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! This module provides one pass, `CleanEndRegions`, that reduces the
//! set of `EndRegion` statements in the MIR.
//!
//! The "pass" is actually implemented as two traversals (aka visits)
//! of the input MIR. The first traversal, `GatherBorrowedRegions`,
//! finds all of the regions in the MIR that are involved in a borrow.
//!
//! The second traversal, `DeleteTrivialEndRegions`, walks over the
//! MIR and removes any `EndRegion` that is applied to a region that
//! was not seen in the previous pass.
use rustc_data_structures::fx::FxHashSet;
use rustc::middle::region;
use rustc::mir::{BasicBlock, Location, Mir, Rvalue, Statement, StatementKind};
use rustc::mir::visit::{MutVisitor, Visitor, TyContext};
use rustc::ty::{Ty, RegionKind, TyCtxt};
use transform::{MirPass, MirSource};
pub struct CleanEndRegions;
struct GatherBorrowedRegions {
seen_regions: FxHashSet<region::Scope>,
}
struct DeleteTrivialEndRegions<'a> {
seen_regions: &'a FxHashSet<region::Scope>,
}
impl MirPass for CleanEndRegions {
fn run_pass<'a, 'tcx>(&self,
tcx: TyCtxt<'a, 'tcx, 'tcx>,
_source: MirSource,
mir: &mut Mir<'tcx>) |
}
impl<'tcx> Visitor<'tcx> for GatherBorrowedRegions {
fn visit_rvalue(&mut self,
rvalue: &Rvalue<'tcx>,
location: Location) {
// Gather regions that are used for borrows
if let Rvalue::Ref(r, _, _) = *rvalue {
if let RegionKind::ReScope(ce) = *r {
self.seen_regions.insert(ce);
}
}
self.super_rvalue(rvalue, location);
}
fn visit_ty(&mut self, ty: &Ty<'tcx>, _: TyContext) {
// Gather regions that occur in types
for re in ty.walk().flat_map(|t| t.regions()) {
match *re {
RegionKind::ReScope(ce) => { self.seen_regions.insert(ce); }
_ => {},
}
}
self.super_ty(ty);
}
}
impl<'a, 'tcx> MutVisitor<'tcx> for DeleteTrivialEndRegions<'a> {
fn visit_statement(&mut self,
block: BasicBlock,
statement: &mut Statement<'tcx>,
location: Location) {
let mut delete_it = false;
if let StatementKind::EndRegion(ref region_scope) = statement.kind {
if !self.seen_regions.contains(region_scope) {
delete_it = true;
}
}
if delete_it {
statement.kind = StatementKind::Nop;
}
self.super_statement(block, statement, location);
}
}
| {
if !tcx.sess.emit_end_regions() { return; }
let mut gather = GatherBorrowedRegions {
seen_regions: FxHashSet()
};
gather.visit_mir(mir);
let mut delete = DeleteTrivialEndRegions { seen_regions: &mut gather.seen_regions };
delete.visit_mir(mir);
} |
load_pytorch_weights.py | # Copyright (c) 2021 PPViT Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""convert pytorch model weights to paddle pdparams"""
import os
import numpy as np
import paddle
import torch
import timm
from mlp_mixer import build_mlp_mixer as build_model
from config import get_config
def print_model_named_params(model):
print('----------------------------------')
for name, param in model.named_parameters():
print(name, param.shape)
print('----------------------------------')
def print_model_named_buffers(model):
print('----------------------------------')
for name, param in model.named_buffers():
print(name, param.shape)
print('----------------------------------')
def torch_to_paddle_mapping(model_name, config):
mapping = [
('stem.proj', 'patch_embed.patch_embed'),
]
for stage_idx in range(config.MODEL.MIXER.DEPTH):
th_prefix = f'blocks.{stage_idx}'
pp_prefix = f'mixer_layers.{stage_idx}'
layer_mapping = [
(f'{th_prefix}.norm1', f'{pp_prefix}.norm1'),
(f'{th_prefix}.norm2', f'{pp_prefix}.norm2'),
(f'{th_prefix}.mlp_tokens.fc1', f'{pp_prefix}.mlp_tokens.fc1'),
(f'{th_prefix}.mlp_tokens.fc2', f'{pp_prefix}.mlp_tokens.fc2'),
(f'{th_prefix}.mlp_channels.fc1', f'{pp_prefix}.mlp_channels.fc1'),
(f'{th_prefix}.mlp_channels.fc2', f'{pp_prefix}.mlp_channels.fc2'),
]
mapping.extend(layer_mapping)
head_mapping = [
('norm', 'norm'),
('head', 'head'),
]
mapping.extend(head_mapping)
return mapping
def convert(torch_model, paddle_model, model_name, config):
def _set_value(th_name, pd_name, transpose=True):
th_shape = th_params[th_name].shape
pd_shape = tuple(pd_params[pd_name].shape) # paddle shape default type is list
#assert th_shape == pd_shape, f'{th_shape} != {pd_shape}'
print(f'**SET** {th_name} {th_shape} **TO** {pd_name} {pd_shape}')
if isinstance(th_params[th_name], torch.nn.parameter.Parameter):
value = th_params[th_name].data.numpy()
else:
value = th_params[th_name].numpy()
if len(value.shape) == 2 and transpose:
value = value.transpose((1, 0))
pd_params[pd_name].set_value(value)
# 1. get paddle and torch model parameters
pd_params = {}
th_params = {}
for name, param in paddle_model.named_parameters():
pd_params[name] = param
for name, param in torch_model.named_parameters():
th_params[name] = param
for name, param in paddle_model.named_buffers():
pd_params[name] = param
for name, param in torch_model.named_buffers():
th_params[name] = param
# 2. get name mapping pairs
mapping = torch_to_paddle_mapping(model_name, config)
missing_keys_th = []
missing_keys_pd = []
zip_map = list(zip(*mapping))
th_keys = list(zip_map[0])
pd_keys = list(zip_map[1])
for key in th_params:
missing = False
if key not in th_keys:
missing = True
if key.endswith('.weight'):
|
if key.endswith('.bias'):
if key[:-5] in th_keys:
missing = False
if missing:
missing_keys_th.append(key)
for key in pd_params:
missing = False
if key not in pd_keys:
missing = True
if key.endswith('.weight'):
if key[:-7] in pd_keys:
missing = False
if key.endswith('.bias'):
if key[:-5] in pd_keys:
missing = False
if missing:
missing_keys_pd.append(key)
print('====================================')
print('missing_keys_pytorch:')
print(missing_keys_th)
print('missing_keys_paddle:')
print(missing_keys_pd)
print('====================================')
# 3. set torch param values to paddle params: may needs transpose on weights
for th_name, pd_name in mapping:
if th_name in th_params and pd_name in pd_params: # nn.Parameters
_set_value(th_name, pd_name)
else:
if f'{th_name}.weight' in th_params and f'{pd_name}.weight' in pd_params:
th_name_w = f'{th_name}.weight'
pd_name_w = f'{pd_name}.weight'
_set_value(th_name_w, pd_name_w)
if f'{th_name}.bias' in th_params and f'{pd_name}.bias' in pd_params:
th_name_b = f'{th_name}.bias'
pd_name_b = f'{pd_name}.bias'
_set_value(th_name_b, pd_name_b)
return paddle_model
def main():
paddle.set_device('cpu')
model_name_list = [
'mixer_b16_224',
'mixer_l16_224',
]
for model_name in model_name_list:
print(f'============= NOW: {model_name} =============')
sz = 224
config = get_config(f'./configs/{model_name}.yaml')
paddle_model = build_model(config)
paddle_model.eval()
print_model_named_params(paddle_model)
print_model_named_buffers(paddle_model)
print('+++++++++++++++++++++++++++++++++++')
device = torch.device('cpu')
torch_model = timm.create_model(model_name, pretrained=True)
torch_model = torch_model.to(device)
torch_model.eval()
print_model_named_params(torch_model)
print_model_named_buffers(torch_model)
# convert weights
paddle_model = convert(torch_model, paddle_model, model_name, config)
# check correctness
x = np.random.randn(2, 3, sz, sz).astype('float32')
x_paddle = paddle.to_tensor(x)
x_torch = torch.Tensor(x).to(device)
out_torch = torch_model(x_torch)
out_paddle = paddle_model(x_paddle)
out_torch = out_torch.data.cpu().numpy()
out_paddle = out_paddle.cpu().numpy()
print(out_torch.shape, out_paddle.shape)
print(out_torch[0, 0:100])
print('========================================================')
print(out_paddle[0, 0:100])
assert np.allclose(out_torch, out_paddle, atol = 1e-3)
# save weights for paddle model
model_path = os.path.join(f'./{model_name}.pdparams')
paddle.save(paddle_model.state_dict(), model_path)
print(f'{model_name} done')
print('all done')
if __name__ == "__main__":
main()
| if key[:-7] in th_keys:
missing = False |
amazon_mws_api.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Basic interface to Amazon MWS
# Based on http://code.google.com/p/amazon-mws-python
# Extended to include finances object
from __future__ import unicode_literals
import urllib
import hashlib
import hmac
import base64
import six
from erpnext.erpnext_integrations.doctype.amazon_mws_settings import xml_utils
import re
try:
from xml.etree.ElementTree import ParseError as XMLError
except ImportError:
from xml.parsers.expat import ExpatError as XMLError
from time import strftime, gmtime
from requests import request
from requests.exceptions import HTTPError
__all__ = [
'Feeds',
'Inventory',
'MWSError',
'Reports',
'Orders',
'Products',
'Recommendations',
'Sellers',
'Finances'
]
# See https://images-na.ssl-images-amazon.com/images/G/01/mwsportal/doc/en_US/bde/MWSDeveloperGuide._V357736853_.pdf page 8
# for a list of the end points and marketplace IDs
MARKETPLACES = {
"CA": "https://mws.amazonservices.ca", #A2EUQ1WTGCTBG2
"US": "https://mws.amazonservices.com", #ATVPDKIKX0DER",
"DE": "https://mws-eu.amazonservices.com", #A1PA6795UKMFR9
"ES": "https://mws-eu.amazonservices.com", #A1RKKUPIHCS9HS
"FR": "https://mws-eu.amazonservices.com", #A13V1IB3VIYZZH
"IN": "https://mws.amazonservices.in", #A21TJRUUN4KGV
"IT": "https://mws-eu.amazonservices.com", #APJ6JRA9NG5V4
"UK": "https://mws-eu.amazonservices.com", #A1F83G8C2ARO7P
"JP": "https://mws.amazonservices.jp", #A1VC38T7YXB528
"CN": "https://mws.amazonservices.com.cn", #AAHKV2X7AFYLW
"AE": " https://mws.amazonservices.ae", #A2VIGQ35RCS4UG
"MX": "https://mws.amazonservices.com.mx", #A1AM78C64UM0Y8
"BR": "https://mws.amazonservices.com", #A2Q3Y263D00KWC
}
class MWSError(Exception):
"""
Main MWS Exception class
"""
# Allows quick access to the response object.
# Do not rely on this attribute, always check if its not None.
response = None
def calc_md5(string):
"""Calculates the MD5 encryption for the given string
"""
md = hashlib.md5()
md.update(string)
return base64.encodestring(md.digest()).strip('\n') if six.PY2 \
else base64.encodebytes(md.digest()).decode().strip()
def remove_empty(d):
"""
Helper function that removes all keys from a dictionary (d),
that have an empty value.
"""
for key in list(d):
if not d[key]:
del d[key]
return d
def remove_namespace(xml):
xml = xml.decode('utf-8')
regex = re.compile(' xmlns(:ns2)?="[^"]+"|(ns2:)|(xml:)')
return regex.sub('', xml)
class DictWrapper(object):
def __init__(self, xml, rootkey=None):
self.original = xml
self._rootkey = rootkey
self._mydict = xml_utils.xml2dict().fromstring(remove_namespace(xml))
self._response_dict = self._mydict.get(list(self._mydict)[0], self._mydict)
@property
def parsed(self):
if self._rootkey:
return self._response_dict.get(self._rootkey)
else:
return self._response_dict
class DataWrapper(object):
"""
Text wrapper in charge of validating the hash sent by Amazon.
"""
def __init__(self, data, header):
self.original = data
if 'content-md5' in header:
hash_ = calc_md5(self.original)
if header['content-md5'] != hash_:
raise MWSError("Wrong Contentlength, maybe amazon error...")
@property
def parsed(self):
return self.original
class MWS(object):
""" Base Amazon API class """
# This is used to post/get to the different uris used by amazon per api
# ie. /Orders/2011-01-01
# All subclasses must define their own URI only if needed
URI = "/"
# The API version varies in most amazon APIs
VERSION = "2009-01-01"
# There seem to be some xml namespace issues. therefore every api subclass
# is recommended to define its namespace, so that it can be referenced
# like so AmazonAPISubclass.NS.
# For more information see http://stackoverflow.com/a/8719461/389453
NS = ''
# Some APIs are available only to either a "Merchant" or "Seller"
# the type of account needs to be sent in every call to the amazon MWS.
# This constant defines the exact name of the parameter Amazon expects
# for the specific API being used.
# All subclasses need to define this if they require another account type
# like "Merchant" in which case you define it like so.
# ACCOUNT_TYPE = "Merchant"
# Which is the name of the parameter for that specific account type.
ACCOUNT_TYPE = "SellerId"
def __init__(self, access_key, secret_key, account_id, region='US', domain='', uri="", version=""):
|
def make_request(self, extra_data, method="GET", **kwargs):
"""Make request to Amazon MWS API with these parameters
"""
# Remove all keys with an empty value because
# Amazon's MWS does not allow such a thing.
extra_data = remove_empty(extra_data)
params = {
'AWSAccessKeyId': self.access_key,
self.ACCOUNT_TYPE: self.account_id,
'SignatureVersion': '2',
'Timestamp': self.get_timestamp(),
'Version': self.version,
'SignatureMethod': 'HmacSHA256',
}
params.update(extra_data)
quote = urllib.quote if six.PY2 else urllib.parse.quote
request_description = '&'.join(['%s=%s' % (k, quote(params[k], safe='-_.~')) for k in sorted(params)])
signature = self.calc_signature(method, request_description)
url = '%s%s?%s&Signature=%s' % (self.domain, self.uri, request_description, quote(signature))
headers = {'User-Agent': 'python-amazon-mws/0.0.1 (Language=Python)'}
headers.update(kwargs.get('extra_headers', {}))
try:
# Some might wonder as to why i don't pass the params dict as the params argument to request.
# My answer is, here i have to get the url parsed string of params in order to sign it, so
# if i pass the params dict as params to request, request will repeat that step because it will need
# to convert the dict to a url parsed string, so why do it twice if i can just pass the full url :).
response = request(method, url, data=kwargs.get('body', ''), headers=headers)
response.raise_for_status()
# When retrieving data from the response object,
# be aware that response.content returns the content in bytes while response.text calls
# response.content and converts it to unicode.
data = response.content
# I do not check the headers to decide which content structure to server simply because sometimes
# Amazon's MWS API returns XML error responses with "text/plain" as the Content-Type.
try:
parsed_response = DictWrapper(data, extra_data.get("Action") + "Result")
except XMLError:
parsed_response = DataWrapper(data, response.headers)
except HTTPError as e:
error = MWSError(str(e))
error.response = e.response
raise error
# Store the response object in the parsed_response for quick access
parsed_response.response = response
return parsed_response
def get_service_status(self):
"""
Returns a GREEN, GREEN_I, YELLOW or RED status.
Depending on the status/availability of the API its being called from.
"""
return self.make_request(extra_data=dict(Action='GetServiceStatus'))
def calc_signature(self, method, request_description):
"""Calculate MWS signature to interface with Amazon
"""
sig_data = method + '\n' + self.domain.replace('https://', '').lower() + '\n' + self.uri + '\n' + request_description
sig_data = sig_data.encode('utf-8')
secret_key = self.secret_key.encode('utf-8')
digest = hmac.new(secret_key, sig_data, hashlib.sha256).digest()
return base64.b64encode(digest).decode('utf-8')
def get_timestamp(self):
"""
Returns the current timestamp in proper format.
"""
return strftime("%Y-%m-%dT%H:%M:%SZ", gmtime())
def enumerate_param(self, param, values):
"""
Builds a dictionary of an enumerated parameter.
Takes any iterable and returns a dictionary.
ie.
enumerate_param('MarketplaceIdList.Id', (123, 345, 4343))
returns
{
MarketplaceIdList.Id.1: 123,
MarketplaceIdList.Id.2: 345,
MarketplaceIdList.Id.3: 4343
}
"""
params = {}
if values is not None:
if not param.endswith('.'):
param = "%s." % param
for num, value in enumerate(values):
params['%s%d' % (param, (num + 1))] = value
return params
class Feeds(MWS):
""" Amazon MWS Feeds API """
ACCOUNT_TYPE = "Merchant"
def submit_feed(self, feed, feed_type, marketplaceids=None,
content_type="text/xml", purge='false'):
"""
Uploads a feed ( xml or .tsv ) to the seller's inventory.
Can be used for creating/updating products on Amazon.
"""
data = dict(Action='SubmitFeed',
FeedType=feed_type,
PurgeAndReplace=purge)
data.update(self.enumerate_param('MarketplaceIdList.Id.', marketplaceids))
md = calc_md5(feed)
return self.make_request(data, method="POST", body=feed,
extra_headers={'Content-MD5': md, 'Content-Type': content_type})
def get_feed_submission_list(self, feedids=None, max_count=None, feedtypes=None,
processingstatuses=None, fromdate=None, todate=None):
"""
Returns a list of all feed submissions submitted in the previous 90 days.
That match the query parameters.
"""
data = dict(Action='GetFeedSubmissionList',
MaxCount=max_count,
SubmittedFromDate=fromdate,
SubmittedToDate=todate,)
data.update(self.enumerate_param('FeedSubmissionIdList.Id', feedids))
data.update(self.enumerate_param('FeedTypeList.Type.', feedtypes))
data.update(self.enumerate_param('FeedProcessingStatusList.Status.', processingstatuses))
return self.make_request(data)
def get_submission_list_by_next_token(self, token):
data = dict(Action='GetFeedSubmissionListByNextToken', NextToken=token)
return self.make_request(data)
def get_feed_submission_count(self, feedtypes=None, processingstatuses=None, fromdate=None, todate=None):
data = dict(Action='GetFeedSubmissionCount',
SubmittedFromDate=fromdate,
SubmittedToDate=todate)
data.update(self.enumerate_param('FeedTypeList.Type.', feedtypes))
data.update(self.enumerate_param('FeedProcessingStatusList.Status.', processingstatuses))
return self.make_request(data)
def cancel_feed_submissions(self, feedids=None, feedtypes=None, fromdate=None, todate=None):
data = dict(Action='CancelFeedSubmissions',
SubmittedFromDate=fromdate,
SubmittedToDate=todate)
data.update(self.enumerate_param('FeedSubmissionIdList.Id.', feedids))
data.update(self.enumerate_param('FeedTypeList.Type.', feedtypes))
return self.make_request(data)
def get_feed_submission_result(self, feedid):
data = dict(Action='GetFeedSubmissionResult', FeedSubmissionId=feedid)
return self.make_request(data)
class Reports(MWS):
""" Amazon MWS Reports API """
ACCOUNT_TYPE = "Merchant"
## REPORTS ###
def get_report(self, report_id):
data = dict(Action='GetReport', ReportId=report_id)
return self.make_request(data)
def get_report_count(self, report_types=(), acknowledged=None, fromdate=None, todate=None):
data = dict(Action='GetReportCount',
Acknowledged=acknowledged,
AvailableFromDate=fromdate,
AvailableToDate=todate)
data.update(self.enumerate_param('ReportTypeList.Type.', report_types))
return self.make_request(data)
def get_report_list(self, requestids=(), max_count=None, types=(), acknowledged=None,
fromdate=None, todate=None):
data = dict(Action='GetReportList',
Acknowledged=acknowledged,
AvailableFromDate=fromdate,
AvailableToDate=todate,
MaxCount=max_count)
data.update(self.enumerate_param('ReportRequestIdList.Id.', requestids))
data.update(self.enumerate_param('ReportTypeList.Type.', types))
return self.make_request(data)
def get_report_list_by_next_token(self, token):
data = dict(Action='GetReportListByNextToken', NextToken=token)
return self.make_request(data)
def get_report_request_count(self, report_types=(), processingstatuses=(), fromdate=None, todate=None):
data = dict(Action='GetReportRequestCount',
RequestedFromDate=fromdate,
RequestedToDate=todate)
data.update(self.enumerate_param('ReportTypeList.Type.', report_types))
data.update(self.enumerate_param('ReportProcessingStatusList.Status.', processingstatuses))
return self.make_request(data)
def get_report_request_list(self, requestids=(), types=(), processingstatuses=(),
max_count=None, fromdate=None, todate=None):
data = dict(Action='GetReportRequestList',
MaxCount=max_count,
RequestedFromDate=fromdate,
RequestedToDate=todate)
data.update(self.enumerate_param('ReportRequestIdList.Id.', requestids))
data.update(self.enumerate_param('ReportTypeList.Type.', types))
data.update(self.enumerate_param('ReportProcessingStatusList.Status.', processingstatuses))
return self.make_request(data)
def get_report_request_list_by_next_token(self, token):
data = dict(Action='GetReportRequestListByNextToken', NextToken=token)
return self.make_request(data)
def request_report(self, report_type, start_date=None, end_date=None, marketplaceids=()):
data = dict(Action='RequestReport',
ReportType=report_type,
StartDate=start_date,
EndDate=end_date)
data.update(self.enumerate_param('MarketplaceIdList.Id.', marketplaceids))
return self.make_request(data)
### ReportSchedule ###
def get_report_schedule_list(self, types=()):
data = dict(Action='GetReportScheduleList')
data.update(self.enumerate_param('ReportTypeList.Type.', types))
return self.make_request(data)
def get_report_schedule_count(self, types=()):
data = dict(Action='GetReportScheduleCount')
data.update(self.enumerate_param('ReportTypeList.Type.', types))
return self.make_request(data)
class Orders(MWS):
""" Amazon Orders API """
URI = "/Orders/2013-09-01"
VERSION = "2013-09-01"
NS = '{https://mws.amazonservices.com/Orders/2011-01-01}'
def list_orders(self, marketplaceids, created_after=None, created_before=None, lastupdatedafter=None,
lastupdatedbefore=None, orderstatus=(), fulfillment_channels=(),
payment_methods=(), buyer_email=None, seller_orderid=None, max_results='100'):
data = dict(Action='ListOrders',
CreatedAfter=created_after,
CreatedBefore=created_before,
LastUpdatedAfter=lastupdatedafter,
LastUpdatedBefore=lastupdatedbefore,
BuyerEmail=buyer_email,
SellerOrderId=seller_orderid,
MaxResultsPerPage=max_results,
)
data.update(self.enumerate_param('OrderStatus.Status.', orderstatus))
data.update(self.enumerate_param('MarketplaceId.Id.', marketplaceids))
data.update(self.enumerate_param('FulfillmentChannel.Channel.', fulfillment_channels))
data.update(self.enumerate_param('PaymentMethod.Method.', payment_methods))
return self.make_request(data)
def list_orders_by_next_token(self, token):
data = dict(Action='ListOrdersByNextToken', NextToken=token)
return self.make_request(data)
def get_order(self, amazon_order_ids):
data = dict(Action='GetOrder')
data.update(self.enumerate_param('AmazonOrderId.Id.', amazon_order_ids))
return self.make_request(data)
def list_order_items(self, amazon_order_id):
data = dict(Action='ListOrderItems', AmazonOrderId=amazon_order_id)
return self.make_request(data)
def list_order_items_by_next_token(self, token):
data = dict(Action='ListOrderItemsByNextToken', NextToken=token)
return self.make_request(data)
class Products(MWS):
""" Amazon MWS Products API """
URI = '/Products/2011-10-01'
VERSION = '2011-10-01'
NS = '{http://mws.amazonservices.com/schema/Products/2011-10-01}'
def list_matching_products(self, marketplaceid, query, contextid=None):
""" Returns a list of products and their attributes, ordered by
relevancy, based on a search query that you specify.
Your search query can be a phrase that describes the product
or it can be a product identifier such as a UPC, EAN, ISBN, or JAN.
"""
data = dict(Action='ListMatchingProducts',
MarketplaceId=marketplaceid,
Query=query,
QueryContextId=contextid)
return self.make_request(data)
def get_matching_product(self, marketplaceid, asins):
""" Returns a list of products and their attributes, based on a list of
ASIN values that you specify.
"""
data = dict(Action='GetMatchingProduct', MarketplaceId=marketplaceid)
data.update(self.enumerate_param('ASINList.ASIN.', asins))
return self.make_request(data)
def get_matching_product_for_id(self, marketplaceid, type, id):
""" Returns a list of products and their attributes, based on a list of
product identifier values (asin, sellersku, upc, ean, isbn and JAN)
Added in Fourth Release, API version 2011-10-01
"""
data = dict(Action='GetMatchingProductForId',
MarketplaceId=marketplaceid,
IdType=type)
data.update(self.enumerate_param('IdList.Id', id))
return self.make_request(data)
def get_competitive_pricing_for_sku(self, marketplaceid, skus):
""" Returns the current competitive pricing of a product,
based on the SellerSKU and MarketplaceId that you specify.
"""
data = dict(Action='GetCompetitivePricingForSKU', MarketplaceId=marketplaceid)
data.update(self.enumerate_param('SellerSKUList.SellerSKU.', skus))
return self.make_request(data)
def get_competitive_pricing_for_asin(self, marketplaceid, asins):
""" Returns the current competitive pricing of a product,
based on the ASIN and MarketplaceId that you specify.
"""
data = dict(Action='GetCompetitivePricingForASIN', MarketplaceId=marketplaceid)
data.update(self.enumerate_param('ASINList.ASIN.', asins))
return self.make_request(data)
def get_lowest_offer_listings_for_sku(self, marketplaceid, skus, condition="Any", excludeme="False"):
data = dict(Action='GetLowestOfferListingsForSKU',
MarketplaceId=marketplaceid,
ItemCondition=condition,
ExcludeMe=excludeme)
data.update(self.enumerate_param('SellerSKUList.SellerSKU.', skus))
return self.make_request(data)
def get_lowest_offer_listings_for_asin(self, marketplaceid, asins, condition="Any", excludeme="False"):
data = dict(Action='GetLowestOfferListingsForASIN',
MarketplaceId=marketplaceid,
ItemCondition=condition,
ExcludeMe=excludeme)
data.update(self.enumerate_param('ASINList.ASIN.', asins))
return self.make_request(data)
def get_product_categories_for_sku(self, marketplaceid, sku):
data = dict(Action='GetProductCategoriesForSKU',
MarketplaceId=marketplaceid,
SellerSKU=sku)
return self.make_request(data)
def get_product_categories_for_asin(self, marketplaceid, asin):
data = dict(Action='GetProductCategoriesForASIN',
MarketplaceId=marketplaceid,
ASIN=asin)
return self.make_request(data)
def get_my_price_for_sku(self, marketplaceid, skus, condition=None):
data = dict(Action='GetMyPriceForSKU',
MarketplaceId=marketplaceid,
ItemCondition=condition)
data.update(self.enumerate_param('SellerSKUList.SellerSKU.', skus))
return self.make_request(data)
def get_my_price_for_asin(self, marketplaceid, asins, condition=None):
data = dict(Action='GetMyPriceForASIN',
MarketplaceId=marketplaceid,
ItemCondition=condition)
data.update(self.enumerate_param('ASINList.ASIN.', asins))
return self.make_request(data)
class Sellers(MWS):
""" Amazon MWS Sellers API """
URI = '/Sellers/2011-07-01'
VERSION = '2011-07-01'
NS = '{http://mws.amazonservices.com/schema/Sellers/2011-07-01}'
def list_marketplace_participations(self):
"""
Returns a list of marketplaces a seller can participate in and
a list of participations that include seller-specific information in that marketplace.
The operation returns only those marketplaces where the seller's account is in an active state.
"""
data = dict(Action='ListMarketplaceParticipations')
return self.make_request(data)
def list_marketplace_participations_by_next_token(self, token):
"""
Takes a "NextToken" and returns the same information as "list_marketplace_participations".
Based on the "NextToken".
"""
data = dict(Action='ListMarketplaceParticipations', NextToken=token)
return self.make_request(data)
#### Fulfillment APIs ####
class InboundShipments(MWS):
URI = "/FulfillmentInboundShipment/2010-10-01"
VERSION = '2010-10-01'
# To be completed
class Inventory(MWS):
""" Amazon MWS Inventory Fulfillment API """
URI = '/FulfillmentInventory/2010-10-01'
VERSION = '2010-10-01'
NS = "{http://mws.amazonaws.com/FulfillmentInventory/2010-10-01}"
def list_inventory_supply(self, skus=(), datetime=None, response_group='Basic'):
""" Returns information on available inventory """
data = dict(Action='ListInventorySupply',
QueryStartDateTime=datetime,
ResponseGroup=response_group,
)
data.update(self.enumerate_param('SellerSkus.member.', skus))
return self.make_request(data, "POST")
def list_inventory_supply_by_next_token(self, token):
data = dict(Action='ListInventorySupplyByNextToken', NextToken=token)
return self.make_request(data, "POST")
class OutboundShipments(MWS):
URI = "/FulfillmentOutboundShipment/2010-10-01"
VERSION = "2010-10-01"
# To be completed
class Recommendations(MWS):
""" Amazon MWS Recommendations API """
URI = '/Recommendations/2013-04-01'
VERSION = '2013-04-01'
NS = "{https://mws.amazonservices.com/Recommendations/2013-04-01}"
def get_last_updated_time_for_recommendations(self, marketplaceid):
"""
Checks whether there are active recommendations for each category for the given marketplace, and if there are,
returns the time when recommendations were last updated for each category.
"""
data = dict(Action='GetLastUpdatedTimeForRecommendations',
MarketplaceId=marketplaceid)
return self.make_request(data, "POST")
def list_recommendations(self, marketplaceid, recommendationcategory=None):
"""
Returns your active recommendations for a specific category or for all categories for a specific marketplace.
"""
data = dict(Action="ListRecommendations",
MarketplaceId=marketplaceid,
RecommendationCategory=recommendationcategory)
return self.make_request(data, "POST")
def list_recommendations_by_next_token(self, token):
"""
Returns the next page of recommendations using the NextToken parameter.
"""
data = dict(Action="ListRecommendationsByNextToken",
NextToken=token)
return self.make_request(data, "POST")
class Finances(MWS):
""" Amazon Finances API"""
URI = '/Finances/2015-05-01'
VERSION = '2015-05-01'
NS = "{https://mws.amazonservices.com/Finances/2015-05-01}"
def list_financial_events(self , posted_after=None, posted_before=None,
amazon_order_id=None, max_results='100'):
data = dict(Action='ListFinancialEvents',
PostedAfter=posted_after,
PostedBefore=posted_before,
AmazonOrderId=amazon_order_id,
MaxResultsPerPage=max_results,
)
return self.make_request(data)
| self.access_key = access_key
self.secret_key = secret_key
self.account_id = account_id
self.version = version or self.VERSION
self.uri = uri or self.URI
if domain:
self.domain = domain
elif region in MARKETPLACES:
self.domain = MARKETPLACES[region]
else:
error_msg = "Incorrect region supplied ('%(region)s'). Must be one of the following: %(marketplaces)s" % {
"marketplaces" : ', '.join(MARKETPLACES.keys()),
"region" : region,
}
raise MWSError(error_msg) |
lib.rs | #[cfg(any(unix, windows))] |
#[cfg(all(unix, target_pointer_width = "32"))]
fn bar() {}
#[cfg(not(foo))]
fn not_foo() {}
#[cfg(not(test))]
fn not_test() {} | fn foo() {} |
zz_generated.openapi.go | // +build !ignore_autogenerated
// This file was autogenerated by openapi-gen. Do not edit it manually!
package v1alpha1
import (
spec "github.com/go-openapi/spec"
common "k8s.io/kube-openapi/pkg/common"
)
func GetOpenAPIDefinitions(ref common.ReferenceCallback) map[string]common.OpenAPIDefinition {
return map[string]common.OpenAPIDefinition{
"./pkg/apis/dynatrace/v1alpha1.OneAgent": schema_pkg_apis_dynatrace_v1alpha1_OneAgent(ref),
"./pkg/apis/dynatrace/v1alpha1.OneAgentSpec": schema_pkg_apis_dynatrace_v1alpha1_OneAgentSpec(ref),
"./pkg/apis/dynatrace/v1alpha1.OneAgentStatus": schema_pkg_apis_dynatrace_v1alpha1_OneAgentStatus(ref),
}
}
func | (ref common.ReferenceCallback) common.OpenAPIDefinition {
return common.OpenAPIDefinition{
Schema: spec.Schema{
SchemaProps: spec.SchemaProps{
Description: "OneAgent is the Schema for the oneagents API",
Type: []string{"object"},
Properties: map[string]spec.Schema{
"kind": {
SchemaProps: spec.SchemaProps{
Description: "Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds",
Type: []string{"string"},
Format: "",
},
},
"apiVersion": {
SchemaProps: spec.SchemaProps{
Description: "APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources",
Type: []string{"string"},
Format: "",
},
},
"metadata": {
SchemaProps: spec.SchemaProps{
Ref: ref("k8s.io/apimachinery/pkg/apis/meta/v1.ObjectMeta"),
},
},
"spec": {
SchemaProps: spec.SchemaProps{
Ref: ref("./pkg/apis/dynatrace/v1alpha1.OneAgentSpec"),
},
},
"status": {
SchemaProps: spec.SchemaProps{
Ref: ref("./pkg/apis/dynatrace/v1alpha1.OneAgentStatus"),
},
},
},
Required: []string{"spec"},
},
},
Dependencies: []string{
"./pkg/apis/dynatrace/v1alpha1.OneAgentSpec", "./pkg/apis/dynatrace/v1alpha1.OneAgentStatus", "k8s.io/apimachinery/pkg/apis/meta/v1.ObjectMeta"},
}
}
func schema_pkg_apis_dynatrace_v1alpha1_OneAgentSpec(ref common.ReferenceCallback) common.OpenAPIDefinition {
return common.OpenAPIDefinition{
Schema: spec.Schema{
SchemaProps: spec.SchemaProps{
Description: "OneAgentSpec defines the desired state of OneAgent",
Type: []string{"object"},
Properties: map[string]spec.Schema{
"apiUrl": {
SchemaProps: spec.SchemaProps{
Description: "Dynatrace api url including `/api` path at the end either set ENVIRONMENTID to the proper tenant id or change the apiUrl as a whole, e.q. for Managed",
Type: []string{"string"},
Format: "",
},
},
"skipCertCheck": {
SchemaProps: spec.SchemaProps{
Description: "Disable certificate validation checks for installer download and API communication",
Type: []string{"boolean"},
Format: "",
},
},
"nodeSelector": {
SchemaProps: spec.SchemaProps{
Description: "Node selector to control the selection of nodes (optional)",
Type: []string{"object"},
AdditionalProperties: &spec.SchemaOrBool{
Allows: true,
Schema: &spec.Schema{
SchemaProps: spec.SchemaProps{
Type: []string{"string"},
Format: "",
},
},
},
},
},
"tolerations": {
VendorExtensible: spec.VendorExtensible{
Extensions: spec.Extensions{
"x-kubernetes-list-type": "set",
},
},
SchemaProps: spec.SchemaProps{
Description: "https://kubernetes.io/docs/concepts/configuration/taint-and-toleration/ (optional)",
Type: []string{"array"},
Items: &spec.SchemaOrArray{
Schema: &spec.Schema{
SchemaProps: spec.SchemaProps{
Ref: ref("k8s.io/api/core/v1.Toleration"),
},
},
},
},
},
"waitReadySeconds": {
SchemaProps: spec.SchemaProps{
Description: "Defines the time to wait until OneAgent pod is ready after update - default 300 sec (optional)",
Type: []string{"integer"},
Format: "int32",
},
},
"image": {
SchemaProps: spec.SchemaProps{
Description: "Installer image Defaults to docker.io/dynatrace/oneagent:latest",
Type: []string{"string"},
Format: "",
},
},
"tokens": {
SchemaProps: spec.SchemaProps{
Description: "Name of secret containing tokens Secret must contain keys `apiToken` and `paasToken`",
Type: []string{"string"},
Format: "",
},
},
"args": {
VendorExtensible: spec.VendorExtensible{
Extensions: spec.Extensions{
"x-kubernetes-list-type": "set",
},
},
SchemaProps: spec.SchemaProps{
Description: "Arguments to the installer.",
Type: []string{"array"},
Items: &spec.SchemaOrArray{
Schema: &spec.Schema{
SchemaProps: spec.SchemaProps{
Type: []string{"string"},
Format: "",
},
},
},
},
},
"env": {
VendorExtensible: spec.VendorExtensible{
Extensions: spec.Extensions{
"x-kubernetes-list-type": "set",
},
},
SchemaProps: spec.SchemaProps{
Description: "List of environment variables to set for the installer.",
Type: []string{"array"},
Items: &spec.SchemaOrArray{
Schema: &spec.Schema{
SchemaProps: spec.SchemaProps{
Ref: ref("k8s.io/api/core/v1.EnvVar"),
},
},
},
},
},
"resources": {
SchemaProps: spec.SchemaProps{
Description: "Compute Resources required by OneAgent containers.",
Ref: ref("k8s.io/api/core/v1.ResourceRequirements"),
},
},
"priorityClassName": {
SchemaProps: spec.SchemaProps{
Description: "If specified, indicates the pod's priority. Name must be defined by creating a PriorityClass object with that name. If not specified the setting will be removed from the DaemonSet.",
Type: []string{"string"},
Format: "",
},
},
"disableAgentUpdate": {
SchemaProps: spec.SchemaProps{
Description: "If enabled, OneAgent pods won't be restarted automatically in case a new version is available",
Type: []string{"boolean"},
Format: "",
},
},
"enableIstio": {
SchemaProps: spec.SchemaProps{
Description: "If enabled, Istio on the cluster will be configured automatically to allow access to the Dynatrace environment.",
Type: []string{"boolean"},
Format: "",
},
},
"dnsPolicy": {
SchemaProps: spec.SchemaProps{
Description: "DNS Policy for the OneAgent pods.",
Type: []string{"string"},
Format: "",
},
},
"serviceAccountName": {
SchemaProps: spec.SchemaProps{
Description: "Name of the service account for the OneAgent",
Type: []string{"string"},
Format: "",
},
},
"labels": {
SchemaProps: spec.SchemaProps{
Description: "Labels for the OneAgent pods",
Type: []string{"object"},
AdditionalProperties: &spec.SchemaOrBool{
Allows: true,
Schema: &spec.Schema{
SchemaProps: spec.SchemaProps{
Type: []string{"string"},
Format: "",
},
},
},
},
},
},
Required: []string{"apiUrl"},
},
},
Dependencies: []string{
"k8s.io/api/core/v1.EnvVar", "k8s.io/api/core/v1.ResourceRequirements", "k8s.io/api/core/v1.Toleration"},
}
}
func schema_pkg_apis_dynatrace_v1alpha1_OneAgentStatus(ref common.ReferenceCallback) common.OpenAPIDefinition {
return common.OpenAPIDefinition{
Schema: spec.Schema{
SchemaProps: spec.SchemaProps{
Description: "OneAgentStatus defines the observed state of OneAgent",
Type: []string{"object"},
Properties: map[string]spec.Schema{
"version": {
SchemaProps: spec.SchemaProps{
Type: []string{"string"},
Format: "",
},
},
"instances": {
SchemaProps: spec.SchemaProps{
Type: []string{"object"},
AdditionalProperties: &spec.SchemaOrBool{
Allows: true,
Schema: &spec.Schema{
SchemaProps: spec.SchemaProps{
Ref: ref("./pkg/apis/dynatrace/v1alpha1.OneAgentInstance"),
},
},
},
},
},
"updatedTimestamp": {
SchemaProps: spec.SchemaProps{
Ref: ref("k8s.io/apimachinery/pkg/apis/meta/v1.Time"),
},
},
"phase": {
SchemaProps: spec.SchemaProps{
Description: "Defines the current state (Running, Updating, Error, ...)",
Type: []string{"string"},
Format: "",
},
},
"conditions": {
VendorExtensible: spec.VendorExtensible{
Extensions: spec.Extensions{
"x-kubernetes-list-type": "set",
},
},
SchemaProps: spec.SchemaProps{
Type: []string{"array"},
Items: &spec.SchemaOrArray{
Schema: &spec.Schema{
SchemaProps: spec.SchemaProps{
Ref: ref("./pkg/apis/dynatrace/v1alpha1.OneAgentCondition"),
},
},
},
},
},
"lastAPITokenProbeTimestamp": {
SchemaProps: spec.SchemaProps{
Description: "LastAPITokenProbeTimestamp tracks when the last request for the API token validity was sent.",
Ref: ref("k8s.io/apimachinery/pkg/apis/meta/v1.Time"),
},
},
"lastPaaSTokenProbeTimestamp": {
SchemaProps: spec.SchemaProps{
Description: "LastPaaSTokenProbeTimestamp tracks when the last request for the PaaS token validity was sent.",
Ref: ref("k8s.io/apimachinery/pkg/apis/meta/v1.Time"),
},
},
},
},
},
Dependencies: []string{
"./pkg/apis/dynatrace/v1alpha1.OneAgentCondition", "./pkg/apis/dynatrace/v1alpha1.OneAgentInstance", "k8s.io/apimachinery/pkg/apis/meta/v1.Time"},
}
}
| schema_pkg_apis_dynatrace_v1alpha1_OneAgent |
reward-tree.interface.ts | export interface RewardTree { | index: string;
cycle: string;
user: string;
tokens: string[];
cumulativeAmounts: string[];
proof: string[];
node: string;
} | |
lib.rs | //! # Randomized Condorcet Voting System
//!
//! The crate `rcvs` implements the Randomized Condorcet Voting System, a
//! strategy-proof voting system using game theory to generalize the original
//! Condorcet method.
//!
//! ## Condorcet method
//!
//! The Condorcet method consists of building a directed graph called the _duel
//! graph_ of the election. Its vertices are the alternatives to vote among,
//! and an arrow between two alternatives _A_ and _B_ means _A_ is preferred
//! over _B_ more often than the opposite. The Condorcet criterion states that
//! if the duel graph has a unique source, then the alternative it corresponds
//! to never loses in a duel against another alternative and therefore must be
//! elected.
//!
//! ## Randomization
//!
//! If no source or several exist, then the Condorcet criterion is not
//! applicable and something else must be used. As surprising as it seems,
//! randomly picking the winner usually yields very good properties in voting
//! systems, but in order to maximize the electors' utility (or rather minimize
//! the number of electors who end up wishing another alternative won), the
//! probability distribution used to pick the winner (called strategy) is not
//! necessarily uniform. Computing the optimal strategy requires some knowledge
//! of game theory and linear programming, and the resulting voting system has
//! excellent strategic properties.
//!
//! ## Implementation
//!
//! This crate provides structures to carry out elections using the Randomized
//! Condorcet Voting System in Rust. It uses the crate
//! [nalgebra](https://crates.io/crates/nalgebra) to solve linear programs
//! and compute the optimal strategy, and [rand](https://crates.io/crates/rand)
//! to generate pseudo-random numbers which are used both for picking winners
//! randomly and for more efficient internal numerical algorithms.
//!
//! It is never mentioned in this documentation, but whenever a method takes an
//! argument implementing `rand::Rng`, it means it will make use of
//! pseudo-random numbers and the programmer will need to provide one,
//! `rand::thread_rng()` being a quick-and-dirty default which is used in this
//! crate's unit tests.
extern crate nalgebra as na;
extern crate rand;
mod ballot;
mod simplex;
mod strategies;
pub mod util;
use std::{
clone::Clone,
cmp::{Eq, Ordering},
collections::{HashMap, HashSet},
error::Error,
fmt,
hash::Hash,
ops::Index,
};
pub use ballot::Ballot;
pub use ballot::Rank;
pub use simplex::SimplexError;
pub use strategies::Strategy;
type Adjacency = na::DMatrix<bool>;
type Matrix = na::DMatrix<f64>;
type Vector = na::DVector<f64>;
#[derive(Clone, Debug, Hash)]
struct Arrow<A>(A, A);
impl<A: Eq> PartialEq for Arrow<A> {
fn eq(&self, other: &Arrow<A>) -> bool {
self.0 == other.0 && self.1 == other.1
}
}
impl<A: Eq> Eq for Arrow<A> {}
/// Implements the duel graph of an election.
#[derive(Clone, Debug)]
pub struct DuelGraph<A: fmt::Debug> {
v: Vec<A>,
a: Adjacency,
}
/// Implements errors in the election process. Interfaces with simplex errors.
#[derive(Debug)]
pub enum ElectionError {
/// The simplex algorithm failed to compute both the minimax and maximin
/// strategies; the underlying errors are contained in the arguments.
BothFailed(simplex::SimplexError, simplex::SimplexError),
/// The simplex algorithm failed to compute the strategy; the underlying
/// error is contained in the argument.
SimplexFailed(simplex::SimplexError),
/// The operation failed because the election is already closed.
ElectionClosed,
/// The operation failed because the election is still open.
ElectionOpen,
}
impl fmt::Display for ElectionError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
ElectionError::BothFailed(a, b) => {
writeln!(f, "Both methods failed:")?;
writeln!(f, " * minimax: {}", a)?;
writeln!(f, " * maximin: {}", b)
}
ElectionError::SimplexFailed(e) => write!(f, "Simplex algorithm failed: {}", e),
ElectionError::ElectionClosed => write!(f, "Election is closed"),
ElectionError::ElectionOpen => write!(f, "Election is open"),
}
}
}
impl From<simplex::SimplexError> for ElectionError {
fn from(error: simplex::SimplexError) -> Self {
ElectionError::SimplexFailed(error)
}
}
impl Error for ElectionError {
fn description(&self) -> &str {
match self {
ElectionError::BothFailed(_, _) => {
"Both minimax and maximin strategies failed to be solved"
}
ElectionError::SimplexFailed(_) => {
"The simplex algorithm failed to compute the strategy"
}
ElectionError::ElectionClosed => "Election is already closed",
ElectionError::ElectionOpen => "Election is still open",
}
}
fn source(&self) -> Option<&(dyn Error + 'static)> {
// in case of multiple cause, no other choice but to return itself
match self {
ElectionError::BothFailed(_, _) => Some(self),
ElectionError::SimplexFailed(e) => Some(e),
_ => None,
}
}
}
impl<A: fmt::Debug> fmt::Display for DuelGraph<A> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f, "Graph {{")?;
writeln!(f, "Alternatives: {:?}", self.v)?;
writeln!(f, "{}", self.a)?;
write!(f, "}}")
}
}
impl<A: fmt::Debug> Index<(usize, usize)> for DuelGraph<A> {
type Output = bool;
/// Decides whether an arrow is in the graph
fn index(&self, (from, to): (usize, usize)) -> &Self::Output {
&self.a[(from, to)]
}
}
impl<A: Clone + Eq + Hash + fmt::Debug> DuelGraph<A> {
/// Returns a slice of the vertice labels
pub fn get_vertices(&self) -> &[A] {
&self.v
}
fn get_special_node(&self, f: impl Fn(usize, usize) -> (usize, usize)) -> Option<A> {
let mut n: Option<A> = None;
for i in 0..self.v.len() {
if (0..self.v.len()).all(|j| !self.a[f(i, j)]) {
match n {
Some(_) => return None,
None => n = Some(self.v[i].clone()),
}
}
}
n
}
/// Returns the source of the graph if it is unique, `None` otherwise.
pub fn get_source(&self) -> Option<A> {
self.get_special_node(|i, j| (j, i))
}
/// Returns the sink of the graph if it is unique, `None` otherwise.
pub fn get_sink(&self) -> Option<A> {
self.get_special_node(|i, j| (i, j))
}
fn adjacency_to_matrix(a: &Adjacency) -> Matrix {
let (n, nn) = a.shape();
assert_eq!(n, nn);
let mut m = Matrix::zeros(n, n);
for i in 0..n {
for j in 0..i {
if a[(i, j)] {
m[(i, j)] = 1f64;
m[(j, i)] = -1f64;
} else if a[(j, i)] {
m[(j, i)] = 1f64;
m[(i, j)] = -1f64;
}
}
}
m
}
fn compute_strategy(
&self,
m: &Matrix,
bval: f64,
cval: f64,
) -> Result<Strategy<A>, simplex::SimplexError> {
let n = self.v.len();
let b = Vector::from_element(n, bval);
let c = Vector::from_element(n, cval);
let x = simplex::simplex(m, &c, &b)?;
let mut mixed_data: Vec<(A, f64)> = self
.v
.iter()
.cloned()
.zip(x.into_iter().map(|&x| x))
.collect();
mixed_data.sort_unstable_by(|(_, p), (_, q)| p.partial_cmp(&q).unwrap());
let sum: f64 = mixed_data.iter().map(|(_, p)| p).sum();
for (_, p) in mixed_data.iter_mut() {
*p /= sum;
}
Ok(Strategy::Mixed(mixed_data))
}
/// Returns the minimax strategy of the duel graph.
///
/// # Errors
///
/// If the simplex algorithm fails, returns an error describing the reason
/// why.
pub fn get_minimax_strategy(&self) -> Result<Strategy<A>, simplex::SimplexError> {
let mut m = Self::adjacency_to_matrix(&self.a);
m.iter_mut().for_each(|e| *e += 2f64);
self.compute_strategy(&m, 1f64, -1f64)
}
/// Returns the maximin strategy of the duel graph.
///
/// # Errors
///
/// If the simplex algorithm fails, returns an error describing the reason
/// why.
pub fn get_maximin_strategy(&self) -> Result<Strategy<A>, simplex::SimplexError> {
let mut m = Self::adjacency_to_matrix(&self.a).transpose();
m.iter_mut().for_each(|e| *e = -(*e + 2f64));
self.compute_strategy(&m, -1f64, 1f64)
}
/// Returns an optimal strategy for the duel graph.
/// * If the graph has a source, returns a pure strategy electing it.
/// * If the simplex algorithm manages to compute both the minimax and
/// maximin strategies, floating-point operations might cause one to score
/// slightly higher. Returns the higher-scoring one.
/// * If the simplex algorithm only manages to compute one of minimax and
/// maximin, returns said strategy.
///
/// # Errors
///
/// If the simplex algorithm fails to compute both strategies, returns an
/// error giving both reasons.
pub fn get_optimal_strategy(&self) -> Result<Strategy<A>, ElectionError> {
match self.get_source() {
Some(x) => Ok(Strategy::Pure(x)),
None => match (self.get_minimax_strategy(), self.get_maximin_strategy()) {
(Ok(minimax), Ok(maximin)) => {
Ok(match self.compare_strategies(&minimax, &maximin) {
Ordering::Less => maximin,
_ => minimax,
})
}
(Err(_), Ok(maximin)) => Ok(maximin),
(Ok(minimax), Err(_)) => Ok(minimax),
(Err(e), Err(f)) => Err(ElectionError::BothFailed(e, f)),
},
}
}
fn strategy_vector(&self, p: &Strategy<A>) -> Vector {
match p {
Strategy::Pure(x) => Vector::from_iterator(
self.v.len(),
self.v.iter().map(|e| if e == x { 1f64 } else { 0f64 }),
),
Strategy::Mixed(u) => Vector::from_iterator(
self.v.len(),
self.v
.iter()
.map(|x| match u.iter().find(|(y, _)| *y == *x) {
None => panic!("Alternative not found"),
Some((_, p)) => p.clone(),
}),
),
}
}
/// Returns a comparating number between two strategies `x` and `y`. If
/// negative, then `x` performs worse than `y` for the graph `self`. If
/// positive, then `x` performs better than `y` for the graph `self`.
pub fn confront_strategies(&self, x: &Strategy<A>, y: &Strategy<A>) -> f64 {
let m = Self::adjacency_to_matrix(&self.a);
let p = self.strategy_vector(x);
let q = self.strategy_vector(y);
(p.transpose() * m * q)[(0, 0)]
}
// NOTE: This is numerically unstable
/// Compares two strategies for the given graph to determine which one
/// scores the better.
///
/// Floating-point operations can make this method unsuitable for some
/// uses. Consider using `confront_strategies()` with an epsilon instead.
pub fn compare_strategies(&self, x: &Strategy<A>, y: &Strategy<A>) -> std::cmp::Ordering {
self.confront_strategies(x, y).partial_cmp(&0f64).unwrap()
}
}
/// Implements an election using the Randomized Condorcet Voting System.
#[derive(Clone)]
pub struct Election<A: Clone + Eq + Hash> {
alternatives: HashSet<A>,
duels: HashMap<Arrow<A>, u64>,
open: bool,
}
impl<A: Clone + Eq + Hash + fmt::Debug> Election<A> {
/// Creates a new empty election.
pub fn new() -> Election<A> {
Election::<A> {
alternatives: HashSet::new(),
duels: HashMap::new(),
open: true,
}
}
fn get(&self, x: &A, y: &A) -> Option<u64> {
self.duels
.get(&Arrow::<A>(x.to_owned(), y.to_owned()))
.cloned()
}
/// Closes the election, preventing the casting of ballots.
pub fn close(&mut self) {
self.open = false;
}
/// Attemps to cast a ballot. Returns `true` if the casting was successful
/// and `false` if it was not (which only happens if the election is
/// closed).
///
/// Casting an alternative that is not in the set of the alternatives of
/// the election will add it to the set; if the electors are not supposed
/// to be able to add their own alternatives, enforcing this rule is at the
/// responsibility of the programmer using the structure.
pub fn cast(&mut self, ballot: Ballot<A>) -> bool {
if !self.open {
return false;
}
for x in ballot.iter() {
let (a, r) = x;
self.alternatives.insert(a.to_owned());
for y in ballot.iter() {
let (b, s) = y;
self.alternatives.insert(b.to_owned());
if r > s {
let n = self.get(a, b).unwrap_or(0) + 1;
self.duels.insert(Arrow::<A>(a.to_owned(), b.to_owned()), n);
}
}
}
true
}
/// Attempts to agregate an election `sub` into the main election `self`,
/// merging their lists of alternatives and duels. Returns `true` if the
/// merging was possible, or `false` if it failed.
///
/// Agregating `sub` into `self` requires `sub` to be closed and `self` to
/// be open.
pub fn agregate(&mut self, sub: Election<A>) -> bool {
if !self.open || sub.open {
return false;
}
for x in sub.alternatives.into_iter() {
self.alternatives.insert(x);
}
for (Arrow::<A>(x, y), m) in sub.duels.into_iter() {
let n = m + self.get(&x, &y).unwrap_or(0);
self.duels.insert(Arrow::<A>(x, y), n);
}
true
}
/// Attempts to normalize an election. If the election is still open, this
/// method does nothing. Normalizing means setting the election's internal
/// state so that it reflects what the duel graph would be. In other
/// words, if the election counted that `a` electors prefer `A` over `B`
/// and `b` electors prefer `B` over `A`, then:
/// * if `a > b`, then it will be as if it only counted one elector
/// prefering `A` over `B`;
/// * if `b > a`, then it will be as if it only counted one elector
/// prefering `B` over `A`;
/// * if `a == b`, then it will be as if no elector ever compared `A` to
/// `B`.
///
/// Since this method requires the election to be closed, it cannot be
/// used to mess with a direct election. This method is intended to be used
/// with `agregate()` to carry out elections working like the American
/// Electoral College.
///
/// Normalizing an election before computing its duel graph is not
/// necessary.
///
/// # Example
///
/// ```
/// # use rcvs::Election;
/// let mut sub_a = Election::new();
/// // -- carry out election sub_a --
/// # sub_a.add_alternative(&"Alpha");
/// sub_a.close();
///
/// let mut sub_b = Election::new();
/// // -- carry out election sub_b --
/// # sub_b.add_alternative(&"Alpha");
/// sub_b.close();
///
/// /*
/// * normalize both elections so that the main election treats them
/// * equally
/// */
/// sub_a.normalize();
/// sub_b.normalize();
///
/// // agregate both elections into a main election
/// let mut e = Election::new();
/// e.agregate(sub_a);
/// e.agregate(sub_b);
/// e.close();
/// ```
pub fn normalize(&mut self) {
if self.open {
return;
}
for x in self.alternatives.iter() {
for y in self.alternatives.iter() {
let xy = Arrow::<A>(x.clone(), y.clone());
let yx = Arrow::<A>(y.clone(), x.clone());
// Dirty workaround for the fact `if let` borrows self.duels
let m;
if let Some(k) = self.duels.get(&xy) {
m = k.clone();
} else {
continue;
}
let n;
if let Some(k) = self.duels.get(&yx) {
n = k.clone();
} else {
continue;
}
match m.cmp(&n) {
Ordering::Less => {
self.duels.remove(&xy);
self.duels.insert(yx, 1);
}
Ordering::Equal => {
self.duels.remove(&xy);
self.duels.remove(&yx);
}
Ordering::Greater => {
self.duels.insert(xy, 1);
self.duels.remove(&yx);
}
}
}
}
}
/// Adds an alternative to the set of alternatives without casting any
/// vote. Returns `true` if the addition was successful, and `false` if the
/// election is closed or if the alternative was already present.
pub fn | (&mut self, v: &A) -> bool {
if !self.open {
return false;
}
self.alternatives.insert(v.to_owned())
}
/// Returns the duel graph of an election. A duel graph may be computed
/// before the election is closed, giving information on a partial result
/// of the election.
pub fn get_duel_graph(&self) -> DuelGraph<A> {
let v: Vec<A> = self.alternatives.iter().cloned().collect();
let n = v.len();
let mut a = Adjacency::from_element(n, n, false);
for (i, x) in v.iter().enumerate() {
for (j, y) in v.iter().enumerate() {
match (self.get(x, y), self.get(y, x)) {
(Some(m), Some(n)) if m > n => a[(i, j)] = true,
(Some(_), None) => a[(i, j)] = true,
_ => (),
}
}
}
DuelGraph { v: v, a: a }
}
/// Decides if `x` is already in the set of alternatives known to the
/// election. For an alternative to be there, at least one ballot involving
/// it must have been cast, or it must have been manually added with the
/// method `add_alternative()`.
pub fn has_alternative(&self, x: &A) -> bool {
self.alternatives.contains(x)
}
/// Returns the Condorcet winner of the election if it exists, `None`
/// otherwise.
///
/// Internally, this method computes the duel graph of the election.
/// Instead of calling several methods that do it in the same scope,
/// consider computing the duel graph separately and operating on it.
pub fn get_condorcet_winner(&self) -> Option<A> {
self.get_duel_graph().get_source()
}
/// Returns the Condorcet loser of the election if it exists, `None`
/// otherwise.
///
/// Internally, this method computes the duel graph of the election.
/// Instead of calling several methods that do it in the same scope,
/// consider computing the duel graph separately and operating on it.
pub fn get_condorcet_loser(&self) -> Option<A> {
self.get_duel_graph().get_sink()
}
/// Returns the minimax strategy of the election.
///
/// Internally, this method computes the duel graph of the election.
/// Instead of calling several methods that do it in the same scope,
/// consider computing the duel graph separately and operating on it.
///
/// # Errors
///
/// If the simplex algorithm fails to compute the strategy, an error
/// describing the reason why is returned.
pub fn get_minimax_strategy(&self) -> Result<Strategy<A>, simplex::SimplexError> {
self.get_duel_graph().get_minimax_strategy()
}
/// Returns the maximin strategy of the election.
///
/// Internally, this method computes the duel graph of the election.
/// Instead of calling several methods that do it in the same scope,
/// consider computing the duel graph separately and operating on it.
///
/// # Errors
///
/// If the simplex algorithm fails to compute the strategy, an error
/// describing the reason why is returned.
pub fn get_maximin_strategy(&self) -> Result<Strategy<A>, simplex::SimplexError> {
self.get_duel_graph().get_maximin_strategy()
}
/// Returns the optimal strategy of the election.
///
/// Internally, this method computes the duel graph of the election.
/// Instead of calling several methods that do it in the same scope,
/// consider computing the duel graph separately and operating on it.
///
/// # Errors
///
/// If the election has no Condorcet winner and the simplex algorithm fails
/// to compute both the minimax and maximin strategies, an error describing
/// both failures is returned.
pub fn get_optimal_strategy(&self) -> Result<Strategy<A>, ElectionError> {
self.get_duel_graph().get_optimal_strategy()
}
/// Elects the winner of the election using the optimal strategy.
///
/// Internally, this method computes the duel graph of the election.
/// Instead of calling several methods that do it in the same scope,
/// consider computing the duel graph separately and operating on it.
///
/// # Errors
///
/// If the election has no Condorcet winner and the simplex algorithm fails
/// to compute both the minimax and maximin strategies, an error describing
/// both failures is returned.
pub fn get_randomized_winner(
&self,
rng: &mut impl rand::Rng,
) -> Result<Option<A>, ElectionError> {
Ok(self.get_optimal_strategy()?.play(rng))
}
}
impl<A: Clone + Eq + Hash + fmt::Display> fmt::Display for Election<A> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f, "Election {{")?;
for x in self.duels.iter() {
let (Arrow::<A>(a, b), n) = x;
writeln!(f, " {} beats {} {} times", a, b, n)?;
}
write!(f, "}}")
}
}
pub fn build_graph<A, I, J>(alternatives: I, ballots: J) -> DuelGraph<A>
where
A: Clone + Eq + fmt::Debug + Hash,
I: Iterator<Item = A>,
J: Iterator<Item = ballot::Ballot<A>>,
{
let mut election = Election::new();
for alternative in alternatives {
election.add_alternative(&alternative);
}
for ballot in ballots {
election.cast(ballot);
}
election.get_duel_graph()
}
#[cfg(test)]
mod tests {
use super::*;
fn random_graph(names: &[String]) -> DuelGraph<String> {
let n = rand::random::<usize>() % names.len() + 1;
let v = names.iter().take(n).cloned().collect();
let mut a = Adjacency::from_element(n, n, false);
for i in 1..n {
for j in 0..i {
if rand::random::<f64>() < 0.5f64 {
a[(i, j)] = true;
} else if rand::random::<f64>() < 0.5f64 {
a[(j, i)] = true;
}
}
}
DuelGraph { v: v, a: a }
}
#[test]
fn source_strategy() {
let names = string_vec!["Alpha", "Bravo", "Charlie", "Delta", "Echo", "Foxtrot"];
for n in 1..=names.len() {
for _ in 0..100 {
let mut m = Adjacency::from_element(n, n, false);
(0..n).for_each(|i| {
(0..i).for_each(|j| {
if rand::random::<f64>() < 0.5f64 {
m[(i, j)] = true;
} else {
m[(j, i)] = true;
}
})
});
let s = rand::random::<usize>() % n;
(0..n).filter(|i| *i != s).for_each(|i| {
m[(s, i)] = true;
m[(i, s)] = false;
});
let g = DuelGraph {
v: names.iter().cloned().take(n).collect(),
a: m,
};
let w;
match g.get_source() {
Some(x) => w = x,
None => panic!("No source in graph {}", g),
}
assert!(
g.get_minimax_strategy()
.unwrap()
.almost_chooses(&w.to_string(), 1e-6),
"Minimax doesn't choose {}",
w
);
assert!(
g.get_maximin_strategy()
.unwrap()
.almost_chooses(&w.to_string(), 1e-6),
"Minimax doesn't choose {}",
w
);
assert!(
g.get_optimal_strategy().unwrap().is_pure(),
"Optimal strategy is mixed"
);
}
}
}
#[test]
fn condorcet_paradox() {
let mut e = Election::<String>::new();
let mut b = vec![
Ballot::<String>::new(),
Ballot::<String>::new(),
Ballot::<String>::new(),
];
let names = string_vec!["Alpha", "Bravo", "Charlie"];
for (i, b) in b.iter_mut().enumerate() {
for j in 0u64..3u64 {
assert!(
b.insert(names[(i + (j as usize)) % 3].to_owned(), j, j),
"add_entry failed"
);
}
}
for b in b.iter().cloned() {
e.cast(b);
}
let g = e.get_duel_graph();
assert_eq!(g.get_source(), None);
assert_eq!(g.get_sink(), None);
assert!(
g.get_optimal_strategy().unwrap().is_uniform(&names, 1e-6),
"Non uniform strategy for Condorcet paradox"
);
}
// Last name commented out for convenience (doubles testing time)
#[test]
fn tournament() {
let names = string_vec!["Alpha", "Bravo", "Charlie", "Delta", "Echo" /*, "Foxtrot"*/];
for n in 1..=names.len() {
println!("Size {}", n);
let v: Vec<String> = names.iter().take(n).cloned().collect();
let mut a = Adjacency::from_element(n, n, false);
(0..(n - 1)).for_each(|i| ((i + 1)..n).for_each(|j| a[(i, j)] = true));
loop {
// Test graph
let g = DuelGraph::<String> {
v: v.clone(),
a: a.clone(),
};
match (g.get_minimax_strategy(), g.get_maximin_strategy()) {
(Ok(minimax), Ok(maximin)) => {
for _ in 0..100 {
let p = Strategy::random_mixed(&v, &mut rand::thread_rng());
let vminimax = g.confront_strategies(&minimax, &p);
let vmaximin = g.confront_strategies(&maximin, &p);
if vminimax < -1e-6 && vmaximin < -1e-6 {
panic!(
"{:?} beats both:\n * minimax by {}\n{:?}\n * maximin by {}\n{:?}",
p,
vminimax,
minimax,
vmaximin,
maximin
);
}
}
}
(Err(e), Ok(maximin)) => {
println!("{}\nMinimax failed: {}", g, e);
for _ in 0..100 {
let p = Strategy::random_mixed(&v, &mut rand::thread_rng());
let v = g.confront_strategies(&maximin, &p);
if v < -1e-6 {
panic!("{:?} beats maximin by {}\n{:?}", p, v, maximin);
}
}
}
(Ok(minimax), Err(e)) => {
println!("{}\nMaximin failed: {}", g, e);
for _ in 0..100 {
let p = Strategy::random_mixed(&v, &mut rand::thread_rng());
let v = g.confront_strategies(&minimax, &p);
if v < -1e-6 {
panic!("{:?} beats minimax by {}\n{:?}", p, v, minimax);
}
}
}
(Err(e), Err(f)) => {
panic!("{}\nBoth failed:\n * minimax: {}\n * maximin: {}", g, e, f)
}
};
// Next graph
let mut carry = true;
for i in 1..n {
for j in 0..i {
if !carry {
break;
}
if a[(i, j)] {
a[(i, j)] = false;
a[(j, i)] = true;
} else {
a[(i, j)] = true;
a[(j, i)] = false;
carry = false;
}
}
}
// Stop test
if (1..n).all(|i| (0..i).all(|j| !a[(i, j)])) {
break;
}
}
}
}
/*
* NOTE:
* Wasn't observed to fail anymore after fixing simplex; keep an eye on it
* anyway...
*/
#[test]
fn optimal_strategy() {
let names = string_vec!["Alpha", "Bravo", "Charlie", "Delta", "Echo", "Foxtrot"];
for _pass in 0..1000 {
println!("Pass {}", _pass);
let g = random_graph(&names);
println!("{}", g);
match (g.get_minimax_strategy(), g.get_maximin_strategy()) {
(Ok(minimax), Ok(maximin)) => {
let opt = g.get_optimal_strategy().unwrap();
assert!(
g.confront_strategies(&opt, &minimax) > -1e-6,
"Minimax beats optimal strategy"
);
assert!(
g.confront_strategies(&opt, &maximin) > -1e-6,
"Maximin beats optimal strategy"
);
}
(Ok(minimax), Err(e)) => {
println!("Maximin failed: {}", e);
let opt = g.get_optimal_strategy().unwrap();
assert!(
g.confront_strategies(&opt, &minimax) > -1e-6,
"Minimax beats optimal strategy"
);
}
(Err(e), Ok(maximin)) => {
println!("Minimax failed: {}", e);
let opt = g.get_optimal_strategy().unwrap();
assert!(
g.confront_strategies(&opt, &maximin) > -1e-6,
"Maximin beats optimal strategy"
);
}
(Err(e), Err(f)) => panic!("Both failed: {}\n{}", e, f),
}
}
}
fn random_ballot(v: &[String]) -> Ballot<String> {
let mut b = Ballot::<String>::new();
for x in v.iter() {
let s = rand::random::<u64>();
let r = rand::random::<u64>() % (s + 1);
assert!(
b.insert(x.to_string(), r, s),
"Insert ({}, {}) failed",
r,
s
);
}
b
}
#[test]
fn agregate() {
let names = string_vec!["Alpha", "Bravo", "Charlie", "Delta", "Echo", "Foxtrot"];
for _ in 0..50 {
let mut e = Election::<String>::new();
let mut sum = Election::<String>::new();
let num_district = rand::random::<u64>() % 49 + 2;
for _ in 0..num_district {
let mut f = Election::<String>::new();
let num_ballot = rand::random::<u64>() % 100;
for _ in 0..num_ballot {
let b = random_ballot(&names);
e.cast(b.clone());
f.cast(b);
}
f.close();
sum.agregate(f);
}
// e and sum must be identical
assert_eq!(e.alternatives, sum.alternatives, "Alternative lists differ");
for (a, n) in e.duels.into_iter() {
match sum.duels.get(&a) {
Some(m) => assert_eq!(*m, n, "{:?} is {} in e but {} in sum", a, n, m),
None => panic!("{:?} isn't in sum", a),
}
}
}
}
#[test]
fn normalize() {
let names = string_vec!["Alpha", "Bravo", "Charlie", "Delta", "Echo", "Foxtrot"];
for _pass in 0..100 {
let mut e = Election::<String>::new();
for _ in 0..500 {
e.cast(random_ballot(&names));
}
let mut n = e.clone();
n.close();
n.normalize();
for x in n.alternatives.iter() {
let xx = Arrow::<String>(x.to_string(), x.to_string());
assert_eq!(n.duels.get(&xx), None, "{} wins over itself", x);
for y in n.alternatives.iter() {
let xy = Arrow::<String>(x.to_string(), y.to_string());
let yx = Arrow::<String>(y.to_string(), x.to_string());
if let Some(m) = n.duels.get(&xy) {
assert_eq!(n.duels.get(&yx), None, "{} and {} loop", x, y);
assert_eq!(*m, 1, "Normalized election has {}", m);
if let Some(n) = e.duels.get(&yx) {
assert!(e.duels.get(&xy).unwrap() > n, "Backward normalization");
}
}
}
}
}
}
#[test]
fn iterators() {
let mut b = vec![
Ballot::<String>::new(),
Ballot::<String>::new(),
Ballot::<String>::new(),
];
let names = string_vec!["Alpha", "Bravo", "Charlie"];
for (i, b) in b.iter_mut().enumerate() {
for j in 0u64..3u64 {
assert!(
b.insert(names[(i + (j as usize)) % 3].to_owned(), j, j),
"add_entry failed"
);
}
}
let g = build_graph(names.iter().cloned(), b.into_iter());
assert_eq!(g.get_source(), None);
assert_eq!(g.get_sink(), None);
assert!(
g.get_optimal_strategy().unwrap().is_uniform(&names, 1e-6),
"Non uniform strategy for Condorcet paradox"
);
}
}
| add_alternative |
Opengauss_Function_Tools_Gaussdb_Case0014.py | """
Copyright (c) 2022 Huawei Technologies Co.,Ltd.
openGauss is licensed under Mulan PSL v2.
You can use this software according to the terms and conditions of the Mulan PSL v2.
You may obtain a copy of Mulan PSL v2 at:
http://license.coscl.org.cn/MulanPSL2
THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
See the Mulan PSL v2 for more details.
"""
"""
Case Type : tools
Case Name : 启动gaussdb进程时,使用-e参数把缺省日期风格设置为"European"是否成功
Description :
1.查看当前日期风格
show datestyle;
2.关闭正在运行的数据库
gs_ctl stop -D /opt/openGauss_zl/cluster/dn1
3.查看进程,确定关闭成功
ps -ef|grep zl
4.使用gaussdb工具后台运行进程,缺省日期风格设置为"European"
gaussdb -D /opt/openGauss_zl/cluster/dn1 -p 19701 -e -M primary &
5.查看当前日期风格,是否为European风格
show datestyle;
Expect :
1.查看当前日期风格成功,显示为:ISO, MDY
2.关闭正在运行的数据库成功
3.查看进程,确定关闭成功
查看进程成功,确认数据库已关闭
4.使用gaussdb工具后台运行进程,缺省日期风格设置为"European"成功
5.查看当前日期风格,为European风格,显示为:ISO, DMY
show datestyle;
History :
"""
import unittest
from testcase.utils.ComThread import ComThread
from yat.test import Node
from yat.test import macro
from testcase.utils.Common import Common
from testcase.utils.CommonSH import CommonSH
from testcase.utils.Logger import Logger
class Tools(unittest.TestCase):
def setUp(self):
self.logger = Logger()
self.logger.info('--Opengauss_Function_Tools_Gaussdb_Case0014 start--')
self.userNode = Node('PrimaryDbUser')
self.userNode2 = Node('PrimaryDbUser')
self.DB_ENV_PATH = macro.DB_ENV_PATH
self.DB_INSTANCE_PATH = macro.DB_INSTANCE_PATH
self.sh_primy = CommonSH('PrimaryDbUser')
self.common = Common() | excute_cmd1 = f'source {self.DB_ENV_PATH};' \
f'gs_ctl stop -D {self.DB_INSTANCE_PATH}'
self.logger.info(excute_cmd1)
msg1 = self.userNode.sh(excute_cmd1).result()
self.logger.info(msg1)
self.logger.info('--------查看进程,确定关闭成功--------')
excute_cmd2 = f'ps -ef|grep {self.userNode.ssh_user}'
self.logger.info(excute_cmd2)
msg2 = self.userNode.sh(excute_cmd2).result()
self.logger.info(msg2)
self.assertFalse(self.DB_INSTANCE_PATH in msg2)
self.logger.info('使用gaussdb工具后台运行进程,缺省日期风格设置为European')
excute_cmd3 = f'source {self.DB_ENV_PATH};' \
f'gaussdb -D {self.DB_INSTANCE_PATH} -p ' \
f'{self.userNode.db_port} -e -M primary'
self.logger.info(excute_cmd3)
thread_2 = ComThread(self.userNode2.sh, args=(excute_cmd3,))
thread_2.setDaemon(True)
thread_2.start()
thread_2.join(10)
msg_result_2 = thread_2.get_result()
self.logger.info(msg_result_2)
self.logger.info('--------查看当前日期风格,是否为European风格--------')
sql_cmd3 = f'show datestyle;'
self.logger.info(excute_cmd3)
msg3 = self.sh_primy.execut_db_sql(sql_cmd3)
self.logger.info(msg3)
self.common.equal_sql_mdg(msg3, 'DateStyle', 'ISO, DMY', '(1 row)',
flag='1')
def tearDown(self):
self.logger.info('-Opengauss_Function_Tools_Gaussdb_Case0014 finish-') |
def test_systools(self):
self.logger.info('--------关闭正在运行的数据库--------') |
pulumiTypes.go | // *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package codepipeline
import (
"context"
"reflect"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
type PipelineArtifactStore struct {
// The encryption key block AWS CodePipeline uses to encrypt the data in the artifact store, such as an AWS Key Management Service (AWS KMS) key. If you don't specify a key, AWS CodePipeline uses the default key for Amazon Simple Storage Service (Amazon S3). An `encryptionKey` block is documented below.
EncryptionKey *PipelineArtifactStoreEncryptionKey `pulumi:"encryptionKey"`
// The location where AWS CodePipeline stores artifacts for a pipeline; currently only `S3` is supported.
Location string `pulumi:"location"`
// The region where the artifact store is located. Required for a cross-region CodePipeline, do not provide for a single-region CodePipeline.
Region *string `pulumi:"region"`
// The type of the artifact store, such as Amazon S3
Type string `pulumi:"type"`
}
// PipelineArtifactStoreInput is an input type that accepts PipelineArtifactStoreArgs and PipelineArtifactStoreOutput values.
// You can construct a concrete instance of `PipelineArtifactStoreInput` via:
//
// PipelineArtifactStoreArgs{...}
type PipelineArtifactStoreInput interface {
pulumi.Input
ToPipelineArtifactStoreOutput() PipelineArtifactStoreOutput
ToPipelineArtifactStoreOutputWithContext(context.Context) PipelineArtifactStoreOutput
}
type PipelineArtifactStoreArgs struct {
// The encryption key block AWS CodePipeline uses to encrypt the data in the artifact store, such as an AWS Key Management Service (AWS KMS) key. If you don't specify a key, AWS CodePipeline uses the default key for Amazon Simple Storage Service (Amazon S3). An `encryptionKey` block is documented below.
EncryptionKey PipelineArtifactStoreEncryptionKeyPtrInput `pulumi:"encryptionKey"`
// The location where AWS CodePipeline stores artifacts for a pipeline; currently only `S3` is supported.
Location pulumi.StringInput `pulumi:"location"`
// The region where the artifact store is located. Required for a cross-region CodePipeline, do not provide for a single-region CodePipeline.
Region pulumi.StringPtrInput `pulumi:"region"`
// The type of the artifact store, such as Amazon S3
Type pulumi.StringInput `pulumi:"type"`
}
func (PipelineArtifactStoreArgs) ElementType() reflect.Type {
return reflect.TypeOf((*PipelineArtifactStore)(nil)).Elem()
}
func (i PipelineArtifactStoreArgs) ToPipelineArtifactStoreOutput() PipelineArtifactStoreOutput {
return i.ToPipelineArtifactStoreOutputWithContext(context.Background())
}
func (i PipelineArtifactStoreArgs) ToPipelineArtifactStoreOutputWithContext(ctx context.Context) PipelineArtifactStoreOutput {
return pulumi.ToOutputWithContext(ctx, i).(PipelineArtifactStoreOutput)
}
func (i PipelineArtifactStoreArgs) ToPipelineArtifactStorePtrOutput() PipelineArtifactStorePtrOutput {
return i.ToPipelineArtifactStorePtrOutputWithContext(context.Background())
}
func (i PipelineArtifactStoreArgs) ToPipelineArtifactStorePtrOutputWithContext(ctx context.Context) PipelineArtifactStorePtrOutput {
return pulumi.ToOutputWithContext(ctx, i).(PipelineArtifactStoreOutput).ToPipelineArtifactStorePtrOutputWithContext(ctx)
}
// PipelineArtifactStorePtrInput is an input type that accepts PipelineArtifactStoreArgs, PipelineArtifactStorePtr and PipelineArtifactStorePtrOutput values.
// You can construct a concrete instance of `PipelineArtifactStorePtrInput` via:
//
// PipelineArtifactStoreArgs{...}
//
// or:
//
// nil
type PipelineArtifactStorePtrInput interface {
pulumi.Input
ToPipelineArtifactStorePtrOutput() PipelineArtifactStorePtrOutput
ToPipelineArtifactStorePtrOutputWithContext(context.Context) PipelineArtifactStorePtrOutput
}
type pipelineArtifactStorePtrType PipelineArtifactStoreArgs
func | (v *PipelineArtifactStoreArgs) PipelineArtifactStorePtrInput {
return (*pipelineArtifactStorePtrType)(v)
}
func (*pipelineArtifactStorePtrType) ElementType() reflect.Type {
return reflect.TypeOf((**PipelineArtifactStore)(nil)).Elem()
}
func (i *pipelineArtifactStorePtrType) ToPipelineArtifactStorePtrOutput() PipelineArtifactStorePtrOutput {
return i.ToPipelineArtifactStorePtrOutputWithContext(context.Background())
}
func (i *pipelineArtifactStorePtrType) ToPipelineArtifactStorePtrOutputWithContext(ctx context.Context) PipelineArtifactStorePtrOutput {
return pulumi.ToOutputWithContext(ctx, i).(PipelineArtifactStorePtrOutput)
}
type PipelineArtifactStoreOutput struct{ *pulumi.OutputState }
func (PipelineArtifactStoreOutput) ElementType() reflect.Type {
return reflect.TypeOf((*PipelineArtifactStore)(nil)).Elem()
}
func (o PipelineArtifactStoreOutput) ToPipelineArtifactStoreOutput() PipelineArtifactStoreOutput {
return o
}
func (o PipelineArtifactStoreOutput) ToPipelineArtifactStoreOutputWithContext(ctx context.Context) PipelineArtifactStoreOutput {
return o
}
func (o PipelineArtifactStoreOutput) ToPipelineArtifactStorePtrOutput() PipelineArtifactStorePtrOutput {
return o.ToPipelineArtifactStorePtrOutputWithContext(context.Background())
}
func (o PipelineArtifactStoreOutput) ToPipelineArtifactStorePtrOutputWithContext(ctx context.Context) PipelineArtifactStorePtrOutput {
return o.ApplyTWithContext(ctx, func(_ context.Context, v PipelineArtifactStore) *PipelineArtifactStore {
return &v
}).(PipelineArtifactStorePtrOutput)
}
// The encryption key block AWS CodePipeline uses to encrypt the data in the artifact store, such as an AWS Key Management Service (AWS KMS) key. If you don't specify a key, AWS CodePipeline uses the default key for Amazon Simple Storage Service (Amazon S3). An `encryptionKey` block is documented below.
func (o PipelineArtifactStoreOutput) EncryptionKey() PipelineArtifactStoreEncryptionKeyPtrOutput {
return o.ApplyT(func(v PipelineArtifactStore) *PipelineArtifactStoreEncryptionKey { return v.EncryptionKey }).(PipelineArtifactStoreEncryptionKeyPtrOutput)
}
// The location where AWS CodePipeline stores artifacts for a pipeline; currently only `S3` is supported.
func (o PipelineArtifactStoreOutput) Location() pulumi.StringOutput {
return o.ApplyT(func(v PipelineArtifactStore) string { return v.Location }).(pulumi.StringOutput)
}
// The region where the artifact store is located. Required for a cross-region CodePipeline, do not provide for a single-region CodePipeline.
func (o PipelineArtifactStoreOutput) Region() pulumi.StringPtrOutput {
return o.ApplyT(func(v PipelineArtifactStore) *string { return v.Region }).(pulumi.StringPtrOutput)
}
// The type of the artifact store, such as Amazon S3
func (o PipelineArtifactStoreOutput) Type() pulumi.StringOutput {
return o.ApplyT(func(v PipelineArtifactStore) string { return v.Type }).(pulumi.StringOutput)
}
type PipelineArtifactStorePtrOutput struct{ *pulumi.OutputState }
func (PipelineArtifactStorePtrOutput) ElementType() reflect.Type {
return reflect.TypeOf((**PipelineArtifactStore)(nil)).Elem()
}
func (o PipelineArtifactStorePtrOutput) ToPipelineArtifactStorePtrOutput() PipelineArtifactStorePtrOutput {
return o
}
func (o PipelineArtifactStorePtrOutput) ToPipelineArtifactStorePtrOutputWithContext(ctx context.Context) PipelineArtifactStorePtrOutput {
return o
}
func (o PipelineArtifactStorePtrOutput) Elem() PipelineArtifactStoreOutput {
return o.ApplyT(func(v *PipelineArtifactStore) PipelineArtifactStore {
if v != nil {
return *v
}
var ret PipelineArtifactStore
return ret
}).(PipelineArtifactStoreOutput)
}
// The encryption key block AWS CodePipeline uses to encrypt the data in the artifact store, such as an AWS Key Management Service (AWS KMS) key. If you don't specify a key, AWS CodePipeline uses the default key for Amazon Simple Storage Service (Amazon S3). An `encryptionKey` block is documented below.
func (o PipelineArtifactStorePtrOutput) EncryptionKey() PipelineArtifactStoreEncryptionKeyPtrOutput {
return o.ApplyT(func(v *PipelineArtifactStore) *PipelineArtifactStoreEncryptionKey {
if v == nil {
return nil
}
return v.EncryptionKey
}).(PipelineArtifactStoreEncryptionKeyPtrOutput)
}
// The location where AWS CodePipeline stores artifacts for a pipeline; currently only `S3` is supported.
func (o PipelineArtifactStorePtrOutput) Location() pulumi.StringPtrOutput {
return o.ApplyT(func(v *PipelineArtifactStore) *string {
if v == nil {
return nil
}
return &v.Location
}).(pulumi.StringPtrOutput)
}
// The region where the artifact store is located. Required for a cross-region CodePipeline, do not provide for a single-region CodePipeline.
func (o PipelineArtifactStorePtrOutput) Region() pulumi.StringPtrOutput {
return o.ApplyT(func(v *PipelineArtifactStore) *string {
if v == nil {
return nil
}
return v.Region
}).(pulumi.StringPtrOutput)
}
// The type of the artifact store, such as Amazon S3
func (o PipelineArtifactStorePtrOutput) Type() pulumi.StringPtrOutput {
return o.ApplyT(func(v *PipelineArtifactStore) *string {
if v == nil {
return nil
}
return &v.Type
}).(pulumi.StringPtrOutput)
}
type PipelineArtifactStoreEncryptionKey struct {
// The KMS key ARN or ID
Id string `pulumi:"id"`
// The type of key; currently only `KMS` is supported
Type string `pulumi:"type"`
}
// PipelineArtifactStoreEncryptionKeyInput is an input type that accepts PipelineArtifactStoreEncryptionKeyArgs and PipelineArtifactStoreEncryptionKeyOutput values.
// You can construct a concrete instance of `PipelineArtifactStoreEncryptionKeyInput` via:
//
// PipelineArtifactStoreEncryptionKeyArgs{...}
type PipelineArtifactStoreEncryptionKeyInput interface {
pulumi.Input
ToPipelineArtifactStoreEncryptionKeyOutput() PipelineArtifactStoreEncryptionKeyOutput
ToPipelineArtifactStoreEncryptionKeyOutputWithContext(context.Context) PipelineArtifactStoreEncryptionKeyOutput
}
type PipelineArtifactStoreEncryptionKeyArgs struct {
// The KMS key ARN or ID
Id pulumi.StringInput `pulumi:"id"`
// The type of key; currently only `KMS` is supported
Type pulumi.StringInput `pulumi:"type"`
}
func (PipelineArtifactStoreEncryptionKeyArgs) ElementType() reflect.Type {
return reflect.TypeOf((*PipelineArtifactStoreEncryptionKey)(nil)).Elem()
}
func (i PipelineArtifactStoreEncryptionKeyArgs) ToPipelineArtifactStoreEncryptionKeyOutput() PipelineArtifactStoreEncryptionKeyOutput {
return i.ToPipelineArtifactStoreEncryptionKeyOutputWithContext(context.Background())
}
func (i PipelineArtifactStoreEncryptionKeyArgs) ToPipelineArtifactStoreEncryptionKeyOutputWithContext(ctx context.Context) PipelineArtifactStoreEncryptionKeyOutput {
return pulumi.ToOutputWithContext(ctx, i).(PipelineArtifactStoreEncryptionKeyOutput)
}
func (i PipelineArtifactStoreEncryptionKeyArgs) ToPipelineArtifactStoreEncryptionKeyPtrOutput() PipelineArtifactStoreEncryptionKeyPtrOutput {
return i.ToPipelineArtifactStoreEncryptionKeyPtrOutputWithContext(context.Background())
}
func (i PipelineArtifactStoreEncryptionKeyArgs) ToPipelineArtifactStoreEncryptionKeyPtrOutputWithContext(ctx context.Context) PipelineArtifactStoreEncryptionKeyPtrOutput {
return pulumi.ToOutputWithContext(ctx, i).(PipelineArtifactStoreEncryptionKeyOutput).ToPipelineArtifactStoreEncryptionKeyPtrOutputWithContext(ctx)
}
// PipelineArtifactStoreEncryptionKeyPtrInput is an input type that accepts PipelineArtifactStoreEncryptionKeyArgs, PipelineArtifactStoreEncryptionKeyPtr and PipelineArtifactStoreEncryptionKeyPtrOutput values.
// You can construct a concrete instance of `PipelineArtifactStoreEncryptionKeyPtrInput` via:
//
// PipelineArtifactStoreEncryptionKeyArgs{...}
//
// or:
//
// nil
type PipelineArtifactStoreEncryptionKeyPtrInput interface {
pulumi.Input
ToPipelineArtifactStoreEncryptionKeyPtrOutput() PipelineArtifactStoreEncryptionKeyPtrOutput
ToPipelineArtifactStoreEncryptionKeyPtrOutputWithContext(context.Context) PipelineArtifactStoreEncryptionKeyPtrOutput
}
type pipelineArtifactStoreEncryptionKeyPtrType PipelineArtifactStoreEncryptionKeyArgs
func PipelineArtifactStoreEncryptionKeyPtr(v *PipelineArtifactStoreEncryptionKeyArgs) PipelineArtifactStoreEncryptionKeyPtrInput {
return (*pipelineArtifactStoreEncryptionKeyPtrType)(v)
}
func (*pipelineArtifactStoreEncryptionKeyPtrType) ElementType() reflect.Type {
return reflect.TypeOf((**PipelineArtifactStoreEncryptionKey)(nil)).Elem()
}
func (i *pipelineArtifactStoreEncryptionKeyPtrType) ToPipelineArtifactStoreEncryptionKeyPtrOutput() PipelineArtifactStoreEncryptionKeyPtrOutput {
return i.ToPipelineArtifactStoreEncryptionKeyPtrOutputWithContext(context.Background())
}
func (i *pipelineArtifactStoreEncryptionKeyPtrType) ToPipelineArtifactStoreEncryptionKeyPtrOutputWithContext(ctx context.Context) PipelineArtifactStoreEncryptionKeyPtrOutput {
return pulumi.ToOutputWithContext(ctx, i).(PipelineArtifactStoreEncryptionKeyPtrOutput)
}
type PipelineArtifactStoreEncryptionKeyOutput struct{ *pulumi.OutputState }
func (PipelineArtifactStoreEncryptionKeyOutput) ElementType() reflect.Type {
return reflect.TypeOf((*PipelineArtifactStoreEncryptionKey)(nil)).Elem()
}
func (o PipelineArtifactStoreEncryptionKeyOutput) ToPipelineArtifactStoreEncryptionKeyOutput() PipelineArtifactStoreEncryptionKeyOutput {
return o
}
func (o PipelineArtifactStoreEncryptionKeyOutput) ToPipelineArtifactStoreEncryptionKeyOutputWithContext(ctx context.Context) PipelineArtifactStoreEncryptionKeyOutput {
return o
}
func (o PipelineArtifactStoreEncryptionKeyOutput) ToPipelineArtifactStoreEncryptionKeyPtrOutput() PipelineArtifactStoreEncryptionKeyPtrOutput {
return o.ToPipelineArtifactStoreEncryptionKeyPtrOutputWithContext(context.Background())
}
func (o PipelineArtifactStoreEncryptionKeyOutput) ToPipelineArtifactStoreEncryptionKeyPtrOutputWithContext(ctx context.Context) PipelineArtifactStoreEncryptionKeyPtrOutput {
return o.ApplyTWithContext(ctx, func(_ context.Context, v PipelineArtifactStoreEncryptionKey) *PipelineArtifactStoreEncryptionKey {
return &v
}).(PipelineArtifactStoreEncryptionKeyPtrOutput)
}
// The KMS key ARN or ID
func (o PipelineArtifactStoreEncryptionKeyOutput) Id() pulumi.StringOutput {
return o.ApplyT(func(v PipelineArtifactStoreEncryptionKey) string { return v.Id }).(pulumi.StringOutput)
}
// The type of key; currently only `KMS` is supported
func (o PipelineArtifactStoreEncryptionKeyOutput) Type() pulumi.StringOutput {
return o.ApplyT(func(v PipelineArtifactStoreEncryptionKey) string { return v.Type }).(pulumi.StringOutput)
}
type PipelineArtifactStoreEncryptionKeyPtrOutput struct{ *pulumi.OutputState }
func (PipelineArtifactStoreEncryptionKeyPtrOutput) ElementType() reflect.Type {
return reflect.TypeOf((**PipelineArtifactStoreEncryptionKey)(nil)).Elem()
}
func (o PipelineArtifactStoreEncryptionKeyPtrOutput) ToPipelineArtifactStoreEncryptionKeyPtrOutput() PipelineArtifactStoreEncryptionKeyPtrOutput {
return o
}
func (o PipelineArtifactStoreEncryptionKeyPtrOutput) ToPipelineArtifactStoreEncryptionKeyPtrOutputWithContext(ctx context.Context) PipelineArtifactStoreEncryptionKeyPtrOutput {
return o
}
func (o PipelineArtifactStoreEncryptionKeyPtrOutput) Elem() PipelineArtifactStoreEncryptionKeyOutput {
return o.ApplyT(func(v *PipelineArtifactStoreEncryptionKey) PipelineArtifactStoreEncryptionKey {
if v != nil {
return *v
}
var ret PipelineArtifactStoreEncryptionKey
return ret
}).(PipelineArtifactStoreEncryptionKeyOutput)
}
// The KMS key ARN or ID
func (o PipelineArtifactStoreEncryptionKeyPtrOutput) Id() pulumi.StringPtrOutput {
return o.ApplyT(func(v *PipelineArtifactStoreEncryptionKey) *string {
if v == nil {
return nil
}
return &v.Id
}).(pulumi.StringPtrOutput)
}
// The type of key; currently only `KMS` is supported
func (o PipelineArtifactStoreEncryptionKeyPtrOutput) Type() pulumi.StringPtrOutput {
return o.ApplyT(func(v *PipelineArtifactStoreEncryptionKey) *string {
if v == nil {
return nil
}
return &v.Type
}).(pulumi.StringPtrOutput)
}
type PipelineStage struct {
// The action(s) to include in the stage. Defined as an `action` block below
Actions []PipelineStageAction `pulumi:"actions"`
// The name of the stage.
Name string `pulumi:"name"`
}
// PipelineStageInput is an input type that accepts PipelineStageArgs and PipelineStageOutput values.
// You can construct a concrete instance of `PipelineStageInput` via:
//
// PipelineStageArgs{...}
type PipelineStageInput interface {
pulumi.Input
ToPipelineStageOutput() PipelineStageOutput
ToPipelineStageOutputWithContext(context.Context) PipelineStageOutput
}
type PipelineStageArgs struct {
// The action(s) to include in the stage. Defined as an `action` block below
Actions PipelineStageActionArrayInput `pulumi:"actions"`
// The name of the stage.
Name pulumi.StringInput `pulumi:"name"`
}
func (PipelineStageArgs) ElementType() reflect.Type {
return reflect.TypeOf((*PipelineStage)(nil)).Elem()
}
func (i PipelineStageArgs) ToPipelineStageOutput() PipelineStageOutput {
return i.ToPipelineStageOutputWithContext(context.Background())
}
func (i PipelineStageArgs) ToPipelineStageOutputWithContext(ctx context.Context) PipelineStageOutput {
return pulumi.ToOutputWithContext(ctx, i).(PipelineStageOutput)
}
// PipelineStageArrayInput is an input type that accepts PipelineStageArray and PipelineStageArrayOutput values.
// You can construct a concrete instance of `PipelineStageArrayInput` via:
//
// PipelineStageArray{ PipelineStageArgs{...} }
type PipelineStageArrayInput interface {
pulumi.Input
ToPipelineStageArrayOutput() PipelineStageArrayOutput
ToPipelineStageArrayOutputWithContext(context.Context) PipelineStageArrayOutput
}
type PipelineStageArray []PipelineStageInput
func (PipelineStageArray) ElementType() reflect.Type {
return reflect.TypeOf((*[]PipelineStage)(nil)).Elem()
}
func (i PipelineStageArray) ToPipelineStageArrayOutput() PipelineStageArrayOutput {
return i.ToPipelineStageArrayOutputWithContext(context.Background())
}
func (i PipelineStageArray) ToPipelineStageArrayOutputWithContext(ctx context.Context) PipelineStageArrayOutput {
return pulumi.ToOutputWithContext(ctx, i).(PipelineStageArrayOutput)
}
type PipelineStageOutput struct{ *pulumi.OutputState }
func (PipelineStageOutput) ElementType() reflect.Type {
return reflect.TypeOf((*PipelineStage)(nil)).Elem()
}
func (o PipelineStageOutput) ToPipelineStageOutput() PipelineStageOutput {
return o
}
func (o PipelineStageOutput) ToPipelineStageOutputWithContext(ctx context.Context) PipelineStageOutput {
return o
}
// The action(s) to include in the stage. Defined as an `action` block below
func (o PipelineStageOutput) Actions() PipelineStageActionArrayOutput {
return o.ApplyT(func(v PipelineStage) []PipelineStageAction { return v.Actions }).(PipelineStageActionArrayOutput)
}
// The name of the stage.
func (o PipelineStageOutput) Name() pulumi.StringOutput {
return o.ApplyT(func(v PipelineStage) string { return v.Name }).(pulumi.StringOutput)
}
type PipelineStageArrayOutput struct{ *pulumi.OutputState }
func (PipelineStageArrayOutput) ElementType() reflect.Type {
return reflect.TypeOf((*[]PipelineStage)(nil)).Elem()
}
func (o PipelineStageArrayOutput) ToPipelineStageArrayOutput() PipelineStageArrayOutput {
return o
}
func (o PipelineStageArrayOutput) ToPipelineStageArrayOutputWithContext(ctx context.Context) PipelineStageArrayOutput {
return o
}
func (o PipelineStageArrayOutput) Index(i pulumi.IntInput) PipelineStageOutput {
return pulumi.All(o, i).ApplyT(func(vs []interface{}) PipelineStage {
return vs[0].([]PipelineStage)[vs[1].(int)]
}).(PipelineStageOutput)
}
type PipelineStageAction struct {
// A category defines what kind of action can be taken in the stage, and constrains the provider type for the action. Possible values are `Approval`, `Build`, `Deploy`, `Invoke`, `Source` and `Test`.
Category string `pulumi:"category"`
// A map of the action declaration's configuration. Configurations options for action types and providers can be found in the [Pipeline Structure Reference](http://docs.aws.amazon.com/codepipeline/latest/userguide/reference-pipeline-structure.html#action-requirements) and [Action Structure Reference](https://docs.aws.amazon.com/codepipeline/latest/userguide/action-reference.html) documentation.
Configuration map[string]string `pulumi:"configuration"`
// A list of artifact names to be worked on.
InputArtifacts []string `pulumi:"inputArtifacts"`
// The action declaration's name.
Name string `pulumi:"name"`
// The namespace all output variables will be accessed from.
Namespace *string `pulumi:"namespace"`
// A list of artifact names to output. Output artifact names must be unique within a pipeline.
OutputArtifacts []string `pulumi:"outputArtifacts"`
// The creator of the action being called. Possible values are `AWS`, `Custom` and `ThirdParty`.
Owner string `pulumi:"owner"`
// The provider of the service being called by the action. Valid providers are determined by the action category. Provider names are listed in the [Action Structure Reference](https://docs.aws.amazon.com/codepipeline/latest/userguide/action-reference.html) documentation.
Provider string `pulumi:"provider"`
// The region in which to run the action.
Region *string `pulumi:"region"`
// The ARN of the IAM service role that will perform the declared action. This is assumed through the roleArn for the pipeline.
RoleArn *string `pulumi:"roleArn"`
// The order in which actions are run.
RunOrder *int `pulumi:"runOrder"`
// A string that identifies the action type.
Version string `pulumi:"version"`
}
// PipelineStageActionInput is an input type that accepts PipelineStageActionArgs and PipelineStageActionOutput values.
// You can construct a concrete instance of `PipelineStageActionInput` via:
//
// PipelineStageActionArgs{...}
type PipelineStageActionInput interface {
pulumi.Input
ToPipelineStageActionOutput() PipelineStageActionOutput
ToPipelineStageActionOutputWithContext(context.Context) PipelineStageActionOutput
}
type PipelineStageActionArgs struct {
// A category defines what kind of action can be taken in the stage, and constrains the provider type for the action. Possible values are `Approval`, `Build`, `Deploy`, `Invoke`, `Source` and `Test`.
Category pulumi.StringInput `pulumi:"category"`
// A map of the action declaration's configuration. Configurations options for action types and providers can be found in the [Pipeline Structure Reference](http://docs.aws.amazon.com/codepipeline/latest/userguide/reference-pipeline-structure.html#action-requirements) and [Action Structure Reference](https://docs.aws.amazon.com/codepipeline/latest/userguide/action-reference.html) documentation.
Configuration pulumi.StringMapInput `pulumi:"configuration"`
// A list of artifact names to be worked on.
InputArtifacts pulumi.StringArrayInput `pulumi:"inputArtifacts"`
// The action declaration's name.
Name pulumi.StringInput `pulumi:"name"`
// The namespace all output variables will be accessed from.
Namespace pulumi.StringPtrInput `pulumi:"namespace"`
// A list of artifact names to output. Output artifact names must be unique within a pipeline.
OutputArtifacts pulumi.StringArrayInput `pulumi:"outputArtifacts"`
// The creator of the action being called. Possible values are `AWS`, `Custom` and `ThirdParty`.
Owner pulumi.StringInput `pulumi:"owner"`
// The provider of the service being called by the action. Valid providers are determined by the action category. Provider names are listed in the [Action Structure Reference](https://docs.aws.amazon.com/codepipeline/latest/userguide/action-reference.html) documentation.
Provider pulumi.StringInput `pulumi:"provider"`
// The region in which to run the action.
Region pulumi.StringPtrInput `pulumi:"region"`
// The ARN of the IAM service role that will perform the declared action. This is assumed through the roleArn for the pipeline.
RoleArn pulumi.StringPtrInput `pulumi:"roleArn"`
// The order in which actions are run.
RunOrder pulumi.IntPtrInput `pulumi:"runOrder"`
// A string that identifies the action type.
Version pulumi.StringInput `pulumi:"version"`
}
func (PipelineStageActionArgs) ElementType() reflect.Type {
return reflect.TypeOf((*PipelineStageAction)(nil)).Elem()
}
func (i PipelineStageActionArgs) ToPipelineStageActionOutput() PipelineStageActionOutput {
return i.ToPipelineStageActionOutputWithContext(context.Background())
}
func (i PipelineStageActionArgs) ToPipelineStageActionOutputWithContext(ctx context.Context) PipelineStageActionOutput {
return pulumi.ToOutputWithContext(ctx, i).(PipelineStageActionOutput)
}
// PipelineStageActionArrayInput is an input type that accepts PipelineStageActionArray and PipelineStageActionArrayOutput values.
// You can construct a concrete instance of `PipelineStageActionArrayInput` via:
//
// PipelineStageActionArray{ PipelineStageActionArgs{...} }
type PipelineStageActionArrayInput interface {
pulumi.Input
ToPipelineStageActionArrayOutput() PipelineStageActionArrayOutput
ToPipelineStageActionArrayOutputWithContext(context.Context) PipelineStageActionArrayOutput
}
type PipelineStageActionArray []PipelineStageActionInput
func (PipelineStageActionArray) ElementType() reflect.Type {
return reflect.TypeOf((*[]PipelineStageAction)(nil)).Elem()
}
func (i PipelineStageActionArray) ToPipelineStageActionArrayOutput() PipelineStageActionArrayOutput {
return i.ToPipelineStageActionArrayOutputWithContext(context.Background())
}
func (i PipelineStageActionArray) ToPipelineStageActionArrayOutputWithContext(ctx context.Context) PipelineStageActionArrayOutput {
return pulumi.ToOutputWithContext(ctx, i).(PipelineStageActionArrayOutput)
}
type PipelineStageActionOutput struct{ *pulumi.OutputState }
func (PipelineStageActionOutput) ElementType() reflect.Type {
return reflect.TypeOf((*PipelineStageAction)(nil)).Elem()
}
func (o PipelineStageActionOutput) ToPipelineStageActionOutput() PipelineStageActionOutput {
return o
}
func (o PipelineStageActionOutput) ToPipelineStageActionOutputWithContext(ctx context.Context) PipelineStageActionOutput {
return o
}
// A category defines what kind of action can be taken in the stage, and constrains the provider type for the action. Possible values are `Approval`, `Build`, `Deploy`, `Invoke`, `Source` and `Test`.
func (o PipelineStageActionOutput) Category() pulumi.StringOutput {
return o.ApplyT(func(v PipelineStageAction) string { return v.Category }).(pulumi.StringOutput)
}
// A map of the action declaration's configuration. Configurations options for action types and providers can be found in the [Pipeline Structure Reference](http://docs.aws.amazon.com/codepipeline/latest/userguide/reference-pipeline-structure.html#action-requirements) and [Action Structure Reference](https://docs.aws.amazon.com/codepipeline/latest/userguide/action-reference.html) documentation.
func (o PipelineStageActionOutput) Configuration() pulumi.StringMapOutput {
return o.ApplyT(func(v PipelineStageAction) map[string]string { return v.Configuration }).(pulumi.StringMapOutput)
}
// A list of artifact names to be worked on.
func (o PipelineStageActionOutput) InputArtifacts() pulumi.StringArrayOutput {
return o.ApplyT(func(v PipelineStageAction) []string { return v.InputArtifacts }).(pulumi.StringArrayOutput)
}
// The action declaration's name.
func (o PipelineStageActionOutput) Name() pulumi.StringOutput {
return o.ApplyT(func(v PipelineStageAction) string { return v.Name }).(pulumi.StringOutput)
}
// The namespace all output variables will be accessed from.
func (o PipelineStageActionOutput) Namespace() pulumi.StringPtrOutput {
return o.ApplyT(func(v PipelineStageAction) *string { return v.Namespace }).(pulumi.StringPtrOutput)
}
// A list of artifact names to output. Output artifact names must be unique within a pipeline.
func (o PipelineStageActionOutput) OutputArtifacts() pulumi.StringArrayOutput {
return o.ApplyT(func(v PipelineStageAction) []string { return v.OutputArtifacts }).(pulumi.StringArrayOutput)
}
// The creator of the action being called. Possible values are `AWS`, `Custom` and `ThirdParty`.
func (o PipelineStageActionOutput) Owner() pulumi.StringOutput {
return o.ApplyT(func(v PipelineStageAction) string { return v.Owner }).(pulumi.StringOutput)
}
// The provider of the service being called by the action. Valid providers are determined by the action category. Provider names are listed in the [Action Structure Reference](https://docs.aws.amazon.com/codepipeline/latest/userguide/action-reference.html) documentation.
func (o PipelineStageActionOutput) Provider() pulumi.StringOutput {
return o.ApplyT(func(v PipelineStageAction) string { return v.Provider }).(pulumi.StringOutput)
}
// The region in which to run the action.
func (o PipelineStageActionOutput) Region() pulumi.StringPtrOutput {
return o.ApplyT(func(v PipelineStageAction) *string { return v.Region }).(pulumi.StringPtrOutput)
}
// The ARN of the IAM service role that will perform the declared action. This is assumed through the roleArn for the pipeline.
func (o PipelineStageActionOutput) RoleArn() pulumi.StringPtrOutput {
return o.ApplyT(func(v PipelineStageAction) *string { return v.RoleArn }).(pulumi.StringPtrOutput)
}
// The order in which actions are run.
func (o PipelineStageActionOutput) RunOrder() pulumi.IntPtrOutput {
return o.ApplyT(func(v PipelineStageAction) *int { return v.RunOrder }).(pulumi.IntPtrOutput)
}
// A string that identifies the action type.
func (o PipelineStageActionOutput) Version() pulumi.StringOutput {
return o.ApplyT(func(v PipelineStageAction) string { return v.Version }).(pulumi.StringOutput)
}
type PipelineStageActionArrayOutput struct{ *pulumi.OutputState }
func (PipelineStageActionArrayOutput) ElementType() reflect.Type {
return reflect.TypeOf((*[]PipelineStageAction)(nil)).Elem()
}
func (o PipelineStageActionArrayOutput) ToPipelineStageActionArrayOutput() PipelineStageActionArrayOutput {
return o
}
func (o PipelineStageActionArrayOutput) ToPipelineStageActionArrayOutputWithContext(ctx context.Context) PipelineStageActionArrayOutput {
return o
}
func (o PipelineStageActionArrayOutput) Index(i pulumi.IntInput) PipelineStageActionOutput {
return pulumi.All(o, i).ApplyT(func(vs []interface{}) PipelineStageAction {
return vs[0].([]PipelineStageAction)[vs[1].(int)]
}).(PipelineStageActionOutput)
}
type WebhookAuthenticationConfiguration struct {
// A valid CIDR block for `IP` filtering. Required for `IP`.
AllowedIpRange *string `pulumi:"allowedIpRange"`
// The shared secret for the GitHub repository webhook. Set this as `secret` in your `githubRepositoryWebhook`'s `configuration` block. Required for `GITHUB_HMAC`.
SecretToken *string `pulumi:"secretToken"`
}
// WebhookAuthenticationConfigurationInput is an input type that accepts WebhookAuthenticationConfigurationArgs and WebhookAuthenticationConfigurationOutput values.
// You can construct a concrete instance of `WebhookAuthenticationConfigurationInput` via:
//
// WebhookAuthenticationConfigurationArgs{...}
type WebhookAuthenticationConfigurationInput interface {
pulumi.Input
ToWebhookAuthenticationConfigurationOutput() WebhookAuthenticationConfigurationOutput
ToWebhookAuthenticationConfigurationOutputWithContext(context.Context) WebhookAuthenticationConfigurationOutput
}
type WebhookAuthenticationConfigurationArgs struct {
// A valid CIDR block for `IP` filtering. Required for `IP`.
AllowedIpRange pulumi.StringPtrInput `pulumi:"allowedIpRange"`
// The shared secret for the GitHub repository webhook. Set this as `secret` in your `githubRepositoryWebhook`'s `configuration` block. Required for `GITHUB_HMAC`.
SecretToken pulumi.StringPtrInput `pulumi:"secretToken"`
}
func (WebhookAuthenticationConfigurationArgs) ElementType() reflect.Type {
return reflect.TypeOf((*WebhookAuthenticationConfiguration)(nil)).Elem()
}
func (i WebhookAuthenticationConfigurationArgs) ToWebhookAuthenticationConfigurationOutput() WebhookAuthenticationConfigurationOutput {
return i.ToWebhookAuthenticationConfigurationOutputWithContext(context.Background())
}
func (i WebhookAuthenticationConfigurationArgs) ToWebhookAuthenticationConfigurationOutputWithContext(ctx context.Context) WebhookAuthenticationConfigurationOutput {
return pulumi.ToOutputWithContext(ctx, i).(WebhookAuthenticationConfigurationOutput)
}
func (i WebhookAuthenticationConfigurationArgs) ToWebhookAuthenticationConfigurationPtrOutput() WebhookAuthenticationConfigurationPtrOutput {
return i.ToWebhookAuthenticationConfigurationPtrOutputWithContext(context.Background())
}
func (i WebhookAuthenticationConfigurationArgs) ToWebhookAuthenticationConfigurationPtrOutputWithContext(ctx context.Context) WebhookAuthenticationConfigurationPtrOutput {
return pulumi.ToOutputWithContext(ctx, i).(WebhookAuthenticationConfigurationOutput).ToWebhookAuthenticationConfigurationPtrOutputWithContext(ctx)
}
// WebhookAuthenticationConfigurationPtrInput is an input type that accepts WebhookAuthenticationConfigurationArgs, WebhookAuthenticationConfigurationPtr and WebhookAuthenticationConfigurationPtrOutput values.
// You can construct a concrete instance of `WebhookAuthenticationConfigurationPtrInput` via:
//
// WebhookAuthenticationConfigurationArgs{...}
//
// or:
//
// nil
type WebhookAuthenticationConfigurationPtrInput interface {
pulumi.Input
ToWebhookAuthenticationConfigurationPtrOutput() WebhookAuthenticationConfigurationPtrOutput
ToWebhookAuthenticationConfigurationPtrOutputWithContext(context.Context) WebhookAuthenticationConfigurationPtrOutput
}
type webhookAuthenticationConfigurationPtrType WebhookAuthenticationConfigurationArgs
func WebhookAuthenticationConfigurationPtr(v *WebhookAuthenticationConfigurationArgs) WebhookAuthenticationConfigurationPtrInput {
return (*webhookAuthenticationConfigurationPtrType)(v)
}
func (*webhookAuthenticationConfigurationPtrType) ElementType() reflect.Type {
return reflect.TypeOf((**WebhookAuthenticationConfiguration)(nil)).Elem()
}
func (i *webhookAuthenticationConfigurationPtrType) ToWebhookAuthenticationConfigurationPtrOutput() WebhookAuthenticationConfigurationPtrOutput {
return i.ToWebhookAuthenticationConfigurationPtrOutputWithContext(context.Background())
}
func (i *webhookAuthenticationConfigurationPtrType) ToWebhookAuthenticationConfigurationPtrOutputWithContext(ctx context.Context) WebhookAuthenticationConfigurationPtrOutput {
return pulumi.ToOutputWithContext(ctx, i).(WebhookAuthenticationConfigurationPtrOutput)
}
type WebhookAuthenticationConfigurationOutput struct{ *pulumi.OutputState }
func (WebhookAuthenticationConfigurationOutput) ElementType() reflect.Type {
return reflect.TypeOf((*WebhookAuthenticationConfiguration)(nil)).Elem()
}
func (o WebhookAuthenticationConfigurationOutput) ToWebhookAuthenticationConfigurationOutput() WebhookAuthenticationConfigurationOutput {
return o
}
func (o WebhookAuthenticationConfigurationOutput) ToWebhookAuthenticationConfigurationOutputWithContext(ctx context.Context) WebhookAuthenticationConfigurationOutput {
return o
}
func (o WebhookAuthenticationConfigurationOutput) ToWebhookAuthenticationConfigurationPtrOutput() WebhookAuthenticationConfigurationPtrOutput {
return o.ToWebhookAuthenticationConfigurationPtrOutputWithContext(context.Background())
}
func (o WebhookAuthenticationConfigurationOutput) ToWebhookAuthenticationConfigurationPtrOutputWithContext(ctx context.Context) WebhookAuthenticationConfigurationPtrOutput {
return o.ApplyTWithContext(ctx, func(_ context.Context, v WebhookAuthenticationConfiguration) *WebhookAuthenticationConfiguration {
return &v
}).(WebhookAuthenticationConfigurationPtrOutput)
}
// A valid CIDR block for `IP` filtering. Required for `IP`.
func (o WebhookAuthenticationConfigurationOutput) AllowedIpRange() pulumi.StringPtrOutput {
return o.ApplyT(func(v WebhookAuthenticationConfiguration) *string { return v.AllowedIpRange }).(pulumi.StringPtrOutput)
}
// The shared secret for the GitHub repository webhook. Set this as `secret` in your `githubRepositoryWebhook`'s `configuration` block. Required for `GITHUB_HMAC`.
func (o WebhookAuthenticationConfigurationOutput) SecretToken() pulumi.StringPtrOutput {
return o.ApplyT(func(v WebhookAuthenticationConfiguration) *string { return v.SecretToken }).(pulumi.StringPtrOutput)
}
type WebhookAuthenticationConfigurationPtrOutput struct{ *pulumi.OutputState }
func (WebhookAuthenticationConfigurationPtrOutput) ElementType() reflect.Type {
return reflect.TypeOf((**WebhookAuthenticationConfiguration)(nil)).Elem()
}
func (o WebhookAuthenticationConfigurationPtrOutput) ToWebhookAuthenticationConfigurationPtrOutput() WebhookAuthenticationConfigurationPtrOutput {
return o
}
func (o WebhookAuthenticationConfigurationPtrOutput) ToWebhookAuthenticationConfigurationPtrOutputWithContext(ctx context.Context) WebhookAuthenticationConfigurationPtrOutput {
return o
}
func (o WebhookAuthenticationConfigurationPtrOutput) Elem() WebhookAuthenticationConfigurationOutput {
return o.ApplyT(func(v *WebhookAuthenticationConfiguration) WebhookAuthenticationConfiguration {
if v != nil {
return *v
}
var ret WebhookAuthenticationConfiguration
return ret
}).(WebhookAuthenticationConfigurationOutput)
}
// A valid CIDR block for `IP` filtering. Required for `IP`.
func (o WebhookAuthenticationConfigurationPtrOutput) AllowedIpRange() pulumi.StringPtrOutput {
return o.ApplyT(func(v *WebhookAuthenticationConfiguration) *string {
if v == nil {
return nil
}
return v.AllowedIpRange
}).(pulumi.StringPtrOutput)
}
// The shared secret for the GitHub repository webhook. Set this as `secret` in your `githubRepositoryWebhook`'s `configuration` block. Required for `GITHUB_HMAC`.
func (o WebhookAuthenticationConfigurationPtrOutput) SecretToken() pulumi.StringPtrOutput {
return o.ApplyT(func(v *WebhookAuthenticationConfiguration) *string {
if v == nil {
return nil
}
return v.SecretToken
}).(pulumi.StringPtrOutput)
}
type WebhookFilter struct {
// The [JSON path](https://github.com/json-path/JsonPath) to filter on.
JsonPath string `pulumi:"jsonPath"`
// The value to match on (e.g. `refs/heads/{Branch}`). See [AWS docs](https://docs.aws.amazon.com/codepipeline/latest/APIReference/API_WebhookFilterRule.html) for details.
MatchEquals string `pulumi:"matchEquals"`
}
// WebhookFilterInput is an input type that accepts WebhookFilterArgs and WebhookFilterOutput values.
// You can construct a concrete instance of `WebhookFilterInput` via:
//
// WebhookFilterArgs{...}
type WebhookFilterInput interface {
pulumi.Input
ToWebhookFilterOutput() WebhookFilterOutput
ToWebhookFilterOutputWithContext(context.Context) WebhookFilterOutput
}
type WebhookFilterArgs struct {
// The [JSON path](https://github.com/json-path/JsonPath) to filter on.
JsonPath pulumi.StringInput `pulumi:"jsonPath"`
// The value to match on (e.g. `refs/heads/{Branch}`). See [AWS docs](https://docs.aws.amazon.com/codepipeline/latest/APIReference/API_WebhookFilterRule.html) for details.
MatchEquals pulumi.StringInput `pulumi:"matchEquals"`
}
func (WebhookFilterArgs) ElementType() reflect.Type {
return reflect.TypeOf((*WebhookFilter)(nil)).Elem()
}
func (i WebhookFilterArgs) ToWebhookFilterOutput() WebhookFilterOutput {
return i.ToWebhookFilterOutputWithContext(context.Background())
}
func (i WebhookFilterArgs) ToWebhookFilterOutputWithContext(ctx context.Context) WebhookFilterOutput {
return pulumi.ToOutputWithContext(ctx, i).(WebhookFilterOutput)
}
// WebhookFilterArrayInput is an input type that accepts WebhookFilterArray and WebhookFilterArrayOutput values.
// You can construct a concrete instance of `WebhookFilterArrayInput` via:
//
// WebhookFilterArray{ WebhookFilterArgs{...} }
type WebhookFilterArrayInput interface {
pulumi.Input
ToWebhookFilterArrayOutput() WebhookFilterArrayOutput
ToWebhookFilterArrayOutputWithContext(context.Context) WebhookFilterArrayOutput
}
type WebhookFilterArray []WebhookFilterInput
func (WebhookFilterArray) ElementType() reflect.Type {
return reflect.TypeOf((*[]WebhookFilter)(nil)).Elem()
}
func (i WebhookFilterArray) ToWebhookFilterArrayOutput() WebhookFilterArrayOutput {
return i.ToWebhookFilterArrayOutputWithContext(context.Background())
}
func (i WebhookFilterArray) ToWebhookFilterArrayOutputWithContext(ctx context.Context) WebhookFilterArrayOutput {
return pulumi.ToOutputWithContext(ctx, i).(WebhookFilterArrayOutput)
}
type WebhookFilterOutput struct{ *pulumi.OutputState }
func (WebhookFilterOutput) ElementType() reflect.Type {
return reflect.TypeOf((*WebhookFilter)(nil)).Elem()
}
func (o WebhookFilterOutput) ToWebhookFilterOutput() WebhookFilterOutput {
return o
}
func (o WebhookFilterOutput) ToWebhookFilterOutputWithContext(ctx context.Context) WebhookFilterOutput {
return o
}
// The [JSON path](https://github.com/json-path/JsonPath) to filter on.
func (o WebhookFilterOutput) JsonPath() pulumi.StringOutput {
return o.ApplyT(func(v WebhookFilter) string { return v.JsonPath }).(pulumi.StringOutput)
}
// The value to match on (e.g. `refs/heads/{Branch}`). See [AWS docs](https://docs.aws.amazon.com/codepipeline/latest/APIReference/API_WebhookFilterRule.html) for details.
func (o WebhookFilterOutput) MatchEquals() pulumi.StringOutput {
return o.ApplyT(func(v WebhookFilter) string { return v.MatchEquals }).(pulumi.StringOutput)
}
type WebhookFilterArrayOutput struct{ *pulumi.OutputState }
func (WebhookFilterArrayOutput) ElementType() reflect.Type {
return reflect.TypeOf((*[]WebhookFilter)(nil)).Elem()
}
func (o WebhookFilterArrayOutput) ToWebhookFilterArrayOutput() WebhookFilterArrayOutput {
return o
}
func (o WebhookFilterArrayOutput) ToWebhookFilterArrayOutputWithContext(ctx context.Context) WebhookFilterArrayOutput {
return o
}
func (o WebhookFilterArrayOutput) Index(i pulumi.IntInput) WebhookFilterOutput {
return pulumi.All(o, i).ApplyT(func(vs []interface{}) WebhookFilter {
return vs[0].([]WebhookFilter)[vs[1].(int)]
}).(WebhookFilterOutput)
}
func init() {
pulumi.RegisterInputType(reflect.TypeOf((*PipelineArtifactStoreInput)(nil)).Elem(), PipelineArtifactStoreArgs{})
pulumi.RegisterInputType(reflect.TypeOf((*PipelineArtifactStorePtrInput)(nil)).Elem(), PipelineArtifactStoreArgs{})
pulumi.RegisterInputType(reflect.TypeOf((*PipelineArtifactStoreEncryptionKeyInput)(nil)).Elem(), PipelineArtifactStoreEncryptionKeyArgs{})
pulumi.RegisterInputType(reflect.TypeOf((*PipelineArtifactStoreEncryptionKeyPtrInput)(nil)).Elem(), PipelineArtifactStoreEncryptionKeyArgs{})
pulumi.RegisterInputType(reflect.TypeOf((*PipelineStageInput)(nil)).Elem(), PipelineStageArgs{})
pulumi.RegisterInputType(reflect.TypeOf((*PipelineStageArrayInput)(nil)).Elem(), PipelineStageArray{})
pulumi.RegisterInputType(reflect.TypeOf((*PipelineStageActionInput)(nil)).Elem(), PipelineStageActionArgs{})
pulumi.RegisterInputType(reflect.TypeOf((*PipelineStageActionArrayInput)(nil)).Elem(), PipelineStageActionArray{})
pulumi.RegisterInputType(reflect.TypeOf((*WebhookAuthenticationConfigurationInput)(nil)).Elem(), WebhookAuthenticationConfigurationArgs{})
pulumi.RegisterInputType(reflect.TypeOf((*WebhookAuthenticationConfigurationPtrInput)(nil)).Elem(), WebhookAuthenticationConfigurationArgs{})
pulumi.RegisterInputType(reflect.TypeOf((*WebhookFilterInput)(nil)).Elem(), WebhookFilterArgs{})
pulumi.RegisterInputType(reflect.TypeOf((*WebhookFilterArrayInput)(nil)).Elem(), WebhookFilterArray{})
pulumi.RegisterOutputType(PipelineArtifactStoreOutput{})
pulumi.RegisterOutputType(PipelineArtifactStorePtrOutput{})
pulumi.RegisterOutputType(PipelineArtifactStoreEncryptionKeyOutput{})
pulumi.RegisterOutputType(PipelineArtifactStoreEncryptionKeyPtrOutput{})
pulumi.RegisterOutputType(PipelineStageOutput{})
pulumi.RegisterOutputType(PipelineStageArrayOutput{})
pulumi.RegisterOutputType(PipelineStageActionOutput{})
pulumi.RegisterOutputType(PipelineStageActionArrayOutput{})
pulumi.RegisterOutputType(WebhookAuthenticationConfigurationOutput{})
pulumi.RegisterOutputType(WebhookAuthenticationConfigurationPtrOutput{})
pulumi.RegisterOutputType(WebhookFilterOutput{})
pulumi.RegisterOutputType(WebhookFilterArrayOutput{})
}
| PipelineArtifactStorePtr |
VGG19_pytorch.py | import torch
import torch.nn.functional as F
import numpy as np
from collections import OrderedDict
from easydict import EasyDict
from _main_base import main
import os
#---
# config
#---
cfg = EasyDict()
# class
cfg.CLASS_LABEL = ['akahara', 'madara']
cfg.CLASS_NUM = len(cfg.CLASS_LABEL)
# model
cfg.INPUT_HEIGHT = 64
cfg.INPUT_WIDTH = 64
cfg.INPUT_CHANNEL = 3
cfg.GPU = False
cfg.DEVICE = torch.device("cuda" if cfg.GPU and torch.cuda.is_available() else "cpu")
cfg.MODEL_SAVE_PATH = 'models/VGG16_{}.pt'
cfg.MODEL_SAVE_INTERVAL = 200
cfg.ITERATION = 1000
cfg.MINIBATCH = 8
cfg.OPTIMIZER = torch.optim.SGD
cfg.LEARNING_RATE = 0.1
cfg.MOMENTUM = 0.9
cfg.LOSS_FUNCTION = loss_fn = torch.nn.NLLLoss()
cfg.TRAIN = EasyDict()
cfg.TRAIN.DISPAY_ITERATION_INTERVAL = 50
cfg.TRAIN.DATA_PATH = '../Dataset/train/images/'
cfg.TRAIN.DATA_HORIZONTAL_FLIP = True
cfg.TRAIN.DATA_VERTICAL_FLIP = True
cfg.TRAIN.DATA_ROTATION = False
cfg.TEST = EasyDict()
cfg.TEST.MODEL_PATH = cfg.MODEL_SAVE_PATH.format('final')
cfg.TEST.DATA_PATH = '../Dataset/test/images/'
cfg.TEST.MINIBATCH = 2
# random seed
torch.manual_seed(0)
class VGG19(torch.nn.Module):
def __init__(self):
|
def forward(self, x):
# block conv1
x = self.conv1(x)
x = F.max_pool2d(x, 2, stride=2, padding=0)
# block conv2
x = self.conv2(x)
x = F.max_pool2d(x, 2, stride=2, padding=0)
# block conv3
x = self.conv3(x)
x = F.max_pool2d(x, 2, stride=2, padding=0)
# block conv4
x = self.conv4(x)
x = F.max_pool2d(x, 2, stride=2, padding=0)
# block conv5
x = self.conv5(x)
x = F.max_pool2d(x, 2, stride=2, padding=0)
x = x.view(x.shape[0], -1)
x = self.top(x)
x = self.fc_out(x)
x = F.softmax(x, dim=1)
return x
# main
if __name__ == '__main__':
model_save_dir = '/'.join(cfg.MODEL_SAVE_PATH.split('/')[:-1])
os.makedirs(model_save_dir, exist_ok=True)
main(cfg, VGG19()) | super(VGG19, self).__init__()
self.conv1 = torch.nn.Sequential(OrderedDict({
'conv1_1' : torch.nn.Conv2d(cfg.INPUT_CHANNEL, 64, kernel_size=3, padding=1, stride=1),
'conv1_1_relu' : torch.nn.ReLU(),
'conv1_1_bn' : torch.nn.BatchNorm2d(64),
'conv1_2' : torch.nn.Conv2d(64, 64, kernel_size=3, padding=1, stride=1),
'conv1_2_relu' : torch.nn.ReLU(),
'conv1_2_bn' : torch.nn.BatchNorm2d(64),
}))
self.conv2 = torch.nn.Sequential(OrderedDict({
'conv2_1' : torch.nn.Conv2d(64, 128, kernel_size=3, padding=1, stride=1),
'conv2_1_relu' : torch.nn.ReLU(),
'conv2_1_bn' : torch.nn.BatchNorm2d(128),
'conv2_2' : torch.nn.Conv2d(128, 128, kernel_size=3, padding=1, stride=1),
'conv2_2_relu' : torch.nn.ReLU(),
'conv2_2_bn' : torch.nn.BatchNorm2d(128),
}))
self.conv3 = torch.nn.Sequential(OrderedDict({
'conv3_1' : torch.nn.Conv2d(128, 256, kernel_size=3, padding=1, stride=1),
'conv3_1_relu' : torch.nn.ReLU(),
'conv3_1_bn' : torch.nn.BatchNorm2d(256),
'conv3_2' : torch.nn.Conv2d(256, 256, kernel_size=3, padding=1, stride=1),
'conv3_2_relu' : torch.nn.ReLU(),
'conv3_2_bn' : torch.nn.BatchNorm2d(256),
'conv3_3' : torch.nn.Conv2d(256, 256, kernel_size=3, padding=1, stride=1),
'conv3_3_relu' : torch.nn.ReLU(),
'conv3_3_bn' : torch.nn.BatchNorm2d(256),
'conv3_4' : torch.nn.Conv2d(256, 256, kernel_size=3, padding=1, stride=1),
'conv3_4_relu' : torch.nn.ReLU(),
'conv3_4_bn' : torch.nn.BatchNorm2d(256),
}))
self.conv4 = torch.nn.Sequential(OrderedDict({
'conv4_1' : torch.nn.Conv2d(256, 512, kernel_size=3, padding=1, stride=1),
'conv4_1_relu' : torch.nn.ReLU(),
'conv4_1_bn' : torch.nn.BatchNorm2d(512),
'conv4_2' : torch.nn.Conv2d(512, 512, kernel_size=3, padding=1, stride=1),
'conv4_2_relu' : torch.nn.ReLU(),
'conv4_2_bn' : torch.nn.BatchNorm2d(512),
'conv4_3' : torch.nn.Conv2d(512, 512, kernel_size=3, padding=1, stride=1),
'conv4_3_relu' : torch.nn.ReLU(),
'conv4_3_bn' : torch.nn.BatchNorm2d(512),
'conv4_4' : torch.nn.Conv2d(512, 512, kernel_size=3, padding=1, stride=1),
'conv4_4_relu' : torch.nn.ReLU(),
'conv4_4_bn' : torch.nn.BatchNorm2d(512),
}))
self.conv5 = torch.nn.Sequential(OrderedDict({
'conv5_1' : torch.nn.Conv2d(512, 512, kernel_size=3, padding=1, stride=1),
'conv5_1_relu' : torch.nn.ReLU(),
'conv5_1_bn' : torch.nn.BatchNorm2d(512),
'conv5_2' : torch.nn.Conv2d(512, 512, kernel_size=3, padding=1, stride=1),
'conv5_2_relu' : torch.nn.ReLU(),
'conv5_2_bn' : torch.nn.BatchNorm2d(512),
'conv5_3' : torch.nn.Conv2d(512, 512, kernel_size=3, padding=1, stride=1),
'conv5_3_relu' : torch.nn.ReLU(),
'conv5_3_bn' : torch.nn.BatchNorm2d(512),
'conv5_3' : torch.nn.Conv2d(512, 512, kernel_size=3, padding=1, stride=1),
'conv5_3_relu' : torch.nn.ReLU(),
'conv5_3_bn' : torch.nn.BatchNorm2d(512),
}))
self.top = torch.nn.Sequential(OrderedDict({
'Dense1' : torch.nn.Linear(512 * (cfg.INPUT_HEIGHT // 32) * (cfg.INPUT_WIDTH // 32), 256),
'Dense1_relu' : torch.nn.ReLU(),
'Dense1_dropout' : torch.nn.Dropout(p=0.5),
'Dense2' : torch.nn.Linear(256, 256),
'Dense2_relu' : torch.nn.ReLU(),
'Dense2_dropout' : torch.nn.Dropout(p=0.5),
}))
self.fc_out = torch.nn.Linear(256, cfg.CLASS_NUM) |
clean.py | import sys
import re
import string
# dictionary to store clean data.
cleaned_data = {}
# list of professors
professors = []
# list of courses
courses = []
def createdict(profname, course_list):
new_course_list = []
is_course_match = False
profname = profname.title()
prof_courses = course_list.split('|')
prof_courses = [course.strip() for course in prof_courses]
if profname not in cleaned_data:
cleaned_data.setdefault(profname, [])
for c in prof_courses:
# replace & with and
if '&' in c:
c = c.replace('&', 'and ')
# replace intro. or intro with introduction
matcher = re.match("intro\.?", c)
if matcher:
c = re.sub("intro\.?", "introduction ", c)
# replace or make all roman numerals capitals.
matcher = re.match(r"\bi+\b", c.lower())
if matcher:
c = c.lower()
c = re.sub(r"\bi\b", "I", c)
c = re.sub(r"\bii\b", "II", c)
c = re.sub(r"\biii\b", "III", c)
# remove all punctuation marks.
punctuation_regex = re.compile('[%s]' % re.escape(string.punctuation))
c = punctuation_regex.sub('', c)
c_split = c.split()
c_word_array = []
# make only non roman numeral words as "title", roman numerals as "uppercase".
for c_split_constituent in c_split:
matcher = re.match(r"\bi+\b", c_split_constituent.lower())
if matcher:
c_split_constituent = c_split_constituent.upper()
else:
c_split_constituent = c_split_constituent.title()
c_word_array.append(c_split_constituent)
c = (" ".join(c_word for c_word in c_word_array))
if not courses:
courses.append(c)
is_course_match = False
if c in courses:
is_course_match = True
else:
# calculate courses similarity using edit distance using DP.
for c2 in courses:
c_length = len(c)
c2_length = len(c2)
table = [[0 for x in range(c2_length + 1)] for x in range(c_length + 1)]
for i in range(c_length + 1):
table[i][0] = i
for j in range(c2_length + 1):
table[0][j] = j
for i in range(1, c_length + 1):
for j in range(1, c2_length + 1):
if c[i - 1] == c2[j - 1]:
table[i][j] = table[i - 1][j - 1]
else:
table[i][j] = 1 + min(table[i][j - 1], table[i - 1][j], table[i - 1][j - 1])
distance = table[i][j]
if distance <= 2:
is_course_match = True
c = c2 | courses.append(c)
new_course_list.append(c)
cleaned_data[profname] = cleaned_data[profname] + new_course_list
return
# output file where the cleaned data is stored.
out = open("cleaned.txt", "w")
# input file to read the data.
inFile = sys.argv[1]
file_buffer = open(inFile, "r").read().splitlines()
for line in file_buffer:
if not line.strip():
continue
# separate the prof names and course lists.
separator = line.split('-', 1)
# if the professor name has comma, since we only need last name, we take only that.
prof = separator[0].strip()
if ',' in prof:
prof = (prof.split(',')[0]).strip()
# if professor name has a space in the last name, take only the last part from it.
if ' ' in prof:
prof = (prof.split()[-1]).strip()
# if professor name has a '.' in the last name, take only the last part form it.
elif '.' in prof:
prof = (prof.split('.')[-1]).strip()
# if professor name is in firstName.lastName format, take only lastName.
elif '.' in prof:
prof = ((prof.split('.')[-1]).split()[-1]).strip()
# if professor name is in firstName lastName format, take only lastName.
elif ' ' in prof:
prof = (prof.split()[-1]).strip()
else:
prof = prof.strip()
# create a dictionary of professor to their courses.
createdict(prof, separator[1].strip())
for key, value in cleaned_data.items():
professors.append(key)
# sort the courses list
value = list(set(value))
value.sort()
cleaned_data[key] = value
professors.sort()
for name in professors:
out.write(name + " - " + ("|".join(cleaned_data[name]))+"\n") | break
if not is_course_match: |
app.py | import flask
from devices import devices
from models import JsonEncoder
from pins import pins
from settings import settings
app = flask.Flask(__name__)
app.json_encoder = JsonEncoder
app.register_blueprint(devices, url_prefix="/devices")
app.register_blueprint(settings, url_prefix="/settings")
app.register_blueprint(pins, url_prefix="/pins")
@app.route("/", defaults={"path": None})
@app.route("/<path:path>")
def index(path: str):
|
if __name__ == "__main__":
import __init__
app.run()
| return open("static/index.html", "r").read() |
EntitySchemaTransformer.js | import { MetadataArgsStorage } from "../metadata-args/MetadataArgsStorage";
/**
* Transforms entity schema into metadata args storage.
* The result will be just like entities read from decorators.
*/
var EntitySchemaTransformer = /** @class */ (function () {
function EntitySchemaTransformer() {
}
// -------------------------------------------------------------------------
// Public Methods
// -------------------------------------------------------------------------
/**
* Transforms entity schema into new metadata args storage object.
*/
EntitySchemaTransformer.prototype.transform = function (schemas) {
var metadataArgsStorage = new MetadataArgsStorage();
schemas.forEach(function (entitySchema) {
var options = entitySchema.options;
// add table metadata args from the schema
var tableMetadata = {
target: options.target || options.name,
name: options.tableName,
database: options.database,
schema: options.schema,
type: options.type || "regular",
orderBy: options.orderBy,
synchronize: options.synchronize
};
metadataArgsStorage.tables.push(tableMetadata);
// add columns metadata args from the schema
Object.keys(options.columns).forEach(function (columnName) {
var column = options.columns[columnName];
var mode = "regular";
if (column.createDate)
mode = "createDate";
if (column.updateDate)
mode = "updateDate";
if (column.version)
mode = "version";
if (column.treeChildrenCount)
mode = "treeChildrenCount";
if (column.treeLevel)
mode = "treeLevel";
if (column.objectId)
mode = "objectId";
var columnAgrs = {
target: options.target || options.name,
mode: mode,
propertyName: columnName,
options: {
type: column.type,
name: column.objectId ? "_id" : column.name,
length: column.length,
width: column.width,
nullable: column.nullable,
readonly: column.readonly,
select: column.select,
primary: column.primary,
unique: column.unique,
comment: column.comment,
default: column.default,
onUpdate: column.onUpdate,
precision: column.precision,
scale: column.scale,
zerofill: column.zerofill,
unsigned: column.unsigned,
charset: column.charset,
collation: column.collation,
enum: column.enum,
asExpression: column.asExpression,
generatedType: column.generatedType,
hstoreType: column.hstoreType,
array: column.array,
transformer: column.transformer
}
};
metadataArgsStorage.columns.push(columnAgrs);
if (column.generated) {
var generationArgs = {
target: options.target || options.name,
propertyName: columnName,
strategy: typeof column.generated === "string" ? column.generated : "increment"
};
metadataArgsStorage.generations.push(generationArgs);
}
if (column.unique)
metadataArgsStorage.uniques.push({ target: options.target || options.name, columns: [columnName] });
});
// add relation metadata args from the schema
if (options.relations) {
Object.keys(options.relations).forEach(function (relationName) {
var relationSchema = options.relations[relationName];
var relation = {
target: options.target || options.name,
propertyName: relationName,
relationType: relationSchema.type,
isLazy: relationSchema.lazy || false,
type: relationSchema.target,
inverseSideProperty: relationSchema.inverseSide,
isTreeParent: relationSchema.treeParent,
isTreeChildren: relationSchema.treeChildren,
options: {
eager: relationSchema.eager || false,
cascade: relationSchema.cascade,
nullable: relationSchema.nullable,
onDelete: relationSchema.onDelete,
onUpdate: relationSchema.onUpdate,
primary: relationSchema.primary,
persistence: relationSchema.persistence
}
};
metadataArgsStorage.relations.push(relation);
// add join column
if (relationSchema.joinColumn) {
if (typeof relationSchema.joinColumn === "boolean") {
var joinColumn = {
target: options.target || options.name,
propertyName: relationName
};
metadataArgsStorage.joinColumns.push(joinColumn);
}
else {
var joinColumn = {
target: options.target || options.name,
propertyName: relationName,
name: relationSchema.joinColumn.name,
referencedColumnName: relationSchema.joinColumn.referencedColumnName
};
metadataArgsStorage.joinColumns.push(joinColumn);
}
}
// add join table
if (relationSchema.joinTable) { | propertyName: relationName
};
metadataArgsStorage.joinTables.push(joinTable);
}
else {
var joinTable = {
target: options.target || options.name,
propertyName: relationName,
name: relationSchema.joinTable.name,
database: relationSchema.joinTable.database,
schema: relationSchema.joinTable.schema,
joinColumns: (relationSchema.joinTable.joinColumn ? [relationSchema.joinTable.joinColumn] : relationSchema.joinTable.joinColumns),
inverseJoinColumns: (relationSchema.joinTable.inverseJoinColumn ? [relationSchema.joinTable.inverseJoinColumn] : relationSchema.joinTable.inverseJoinColumns),
};
metadataArgsStorage.joinTables.push(joinTable);
}
}
});
}
// add index metadata args from the schema
if (options.indices) {
options.indices.forEach(function (index) {
var indexAgrs = {
target: options.target || options.name,
name: index.name,
unique: index.unique === true ? true : false,
spatial: index.spatial === true ? true : false,
fulltext: index.fulltext === true ? true : false,
synchronize: index.synchronize === false ? false : true,
where: index.where,
sparse: index.sparse,
columns: index.columns
};
metadataArgsStorage.indices.push(indexAgrs);
});
}
// add unique metadata args from the schema
if (options.uniques) {
options.uniques.forEach(function (unique) {
var uniqueAgrs = {
target: options.target || options.name,
name: unique.name,
columns: unique.columns
};
metadataArgsStorage.uniques.push(uniqueAgrs);
});
}
// add check metadata args from the schema
if (options.checks) {
options.checks.forEach(function (check) {
var checkAgrs = {
target: options.target || options.name,
name: check.name,
expression: check.expression
};
metadataArgsStorage.checks.push(checkAgrs);
});
}
// add exclusion metadata args from the schema
if (options.exclusions) {
options.exclusions.forEach(function (exclusion) {
var exclusionArgs = {
target: options.target || options.name,
name: exclusion.name,
expression: exclusion.expression
};
metadataArgsStorage.exclusions.push(exclusionArgs);
});
}
});
return metadataArgsStorage;
};
return EntitySchemaTransformer;
}());
export { EntitySchemaTransformer };
//# sourceMappingURL=EntitySchemaTransformer.js.map | if (typeof relationSchema.joinTable === "boolean") {
var joinTable = {
target: options.target || options.name, |
manage_data.go | package build
import (
"github.com/stellar/go/support/errors"
"github.com/stellar/go/xdr"
)
// ClearData removes a key/value pair associated with the source account
func ClearData(name string, muts ...interface{}) (result ManageDataBuilder) {
result.MD.DataName = xdr.String64(name)
result.MD.DataValue = nil
result.validateName()
result.Mutate(muts...)
return
}
// SetData sets a key/value pair associated with the source account, updating it
// if one already exists.
func SetData(name string, value []byte, muts ...interface{}) (result ManageDataBuilder) {
result.MD.DataName = xdr.String64(name)
v := xdr.DataValue(value)
result.MD.DataValue = &v
result.validateName()
result.validateValue()
result.Mutate(muts...)
return
}
// ManageDataBuilder helps to build ManageDataOp structs.
type ManageDataBuilder struct {
O xdr.Operation |
// Mutate applies the provided mutators to this builder's payment or operation.
func (b *ManageDataBuilder) Mutate(muts ...interface{}) {
for _, m := range muts {
var err error
switch mut := m.(type) {
case OperationMutator:
err = mut.MutateOperation(&b.O)
default:
err = errors.New("Mutator type not allowed")
}
if err != nil {
b.Err = errors.Wrap(err, "ManageDataBuilder error")
return
}
}
}
func (b *ManageDataBuilder) validateName() {
if len(b.MD.DataName) > 64 {
b.Err = errors.New("Name too long: must be less than 64 bytes")
return
}
if b.MD.DataName == "" {
b.Err = errors.New("Invalid name: empty string")
return
}
}
func (b *ManageDataBuilder) validateValue() {
if *b.MD.DataValue == nil {
b.Err = errors.New("Invalid value: cannot set a nil value")
}
if len(*b.MD.DataValue) > 64 {
b.Err = errors.New("Value too long: must be less than 64 bytes")
}
} | MD xdr.ManageDataOp
Err error
} |
lib.rs | extern crate xml;
#[macro_use]
extern crate failure;
type Result<T> = std::result::Result<T, failure::Error>;
use std::fmt;
use std::fmt::Write;
use xml::attribute::OwnedAttribute;
use xml::reader::{EventReader, XmlEvent};
const TAB1: &str = " ";
const TAB2: &str = " ";
const TAB3: &str = " ";
const TAB4: &str = " ";
#[derive(Debug)]
struct FdbScope {
name: String,
options: Vec<FdbOption>,
}
impl FdbScope {
fn gen_ty<W: fmt::Write>(&self, w: &mut W) -> fmt::Result {
let with_ty = self.with_ty();
if with_ty {
writeln!(w, "#[derive(Clone, Debug)]")?;
} else {
writeln!(w, "#[derive(Clone, Copy, Debug)]")?;
}
writeln!(w, "#[non_exhaustive]")?;
writeln!(w, "pub enum {name} {{", name = self.name)?;
let with_ty = self.with_ty();
for option in self.options.iter() {
option.gen_ty(w, with_ty)?;
}
writeln!(w, "}}")
}
fn gen_impl<W: fmt::Write>(&self, w: &mut W) -> fmt::Result {
writeln!(w, "impl {name} {{", name = self.name)?;
self.gen_code(w)?;
self.gen_apply(w)?;
writeln!(w, "}}")
}
fn gen_code<W: fmt::Write>(&self, w: &mut W) -> fmt::Result {
writeln!(
w,
"{t}pub fn code(&self) -> fdb_sys::FDB{name} {{",
t = TAB1,
name = self.name,
)?;
writeln!(w, "{t}match *self {{", t = TAB2)?;
let enum_prefix = self.c_enum_prefix();
let with_ty = self.with_ty();
for option in self.options.iter() {
writeln!(
w,
"{t}{scope}::{name}{param} => fdb_sys::{enum_prefix}{code},",
t = TAB3,
scope = self.name,
name = option.name,
param = if let (true, Some(..)) = (with_ty, option.get_ty()) {
"(..)"
} else {
""
},
enum_prefix = enum_prefix,
code = option.c_name,
)?;
}
writeln!(w, "{t}}}", t = TAB2)?;
writeln!(w, "{t}}}", t = TAB1)
}
fn gen_apply<W: fmt::Write>(&self, w: &mut W) -> fmt::Result {
let fn_name = match self.apply_fn_name() {
Some(name) => name,
_ => return Ok(()),
};
let first_arg = match self.apply_arg_name() {
Some(name) => format!(", target: *mut fdb_sys::{}", name),
None => String::new(),
};
writeln!(
w,
"{t}pub unsafe fn apply(&self{args}) -> FdbResult<()> {{",
t = TAB1,
args = first_arg
)?;
writeln!(w, "{t}let code = self.code();", t = TAB2)?;
writeln!(w, "{t}let err = match *self {{", t = TAB2)?;
let args = if first_arg.is_empty() {
"code"
} else {
"target, code"
};
for option in self.options.iter() {
write!(w, "{}{}::{}", TAB3, self.name, option.name)?;
match option.param_type {
FdbOptionTy::Empty => {
writeln!(
w,
" => fdb_sys::{}({}, std::ptr::null(), 0),",
fn_name, args
)?;
}
FdbOptionTy::Int => {
writeln!(w, "(v) => {{")?;
writeln!(
w,
"{}let data: [u8;8] = std::mem::transmute(v as i64);",
TAB4,
)?;
writeln!(
w,
"{}fdb_sys::{}({}, data.as_ptr() as *const u8, 8)",
TAB4, fn_name, args
)?;
writeln!(w, "{t}}}", t = TAB3)?;
}
FdbOptionTy::Bytes => {
writeln!(w, "(ref v) => {{")?;
writeln!(
w,
"{}fdb_sys::{}({}, v.as_ptr() as *const u8, \
i32::try_from(v.len()).expect(\"len to fit in i32\"))\n",
TAB4, fn_name, args
)?;
writeln!(w, "{t}}}", t = TAB3)?;
}
FdbOptionTy::Str => {
writeln!(w, "(ref v) => {{")?;
writeln!(
w,
"{}fdb_sys::{}({}, v.as_ptr() as *const u8, \
i32::try_from(v.len()).expect(\"len to fit in i32\"))\n",
TAB4, fn_name, args
)?;
writeln!(w, "{t}}}", t = TAB3)?;
}
}
}
writeln!(w, "{t}}};", t = TAB2)?;
writeln!(
w,
"{t}if err != 0 {{ Err(FdbError::from_code(err)) }} else {{ Ok(()) }}",
t = TAB2,
)?;
writeln!(w, "{t}}}", t = TAB1)
}
fn with_ty(&self) -> bool {
self.apply_fn_name().is_some()
}
fn c_enum_prefix(&self) -> &'static str {
match self.name.as_str() {
"NetworkOption" => "FDBNetworkOption_FDB_NET_OPTION_",
"ClusterOption" => "FDBClusterOption_FDB_CLUSTER_OPTION_",
"DatabaseOption" => "FDBDatabaseOption_FDB_DB_OPTION_",
"TransactionOption" => "FDBTransactionOption_FDB_TR_OPTION_",
"StreamingMode" => "FDBStreamingMode_FDB_STREAMING_MODE_",
"MutationType" => "FDBMutationType_FDB_MUTATION_TYPE_",
"ConflictRangeType" => "FDBConflictRangeType_FDB_CONFLICT_RANGE_TYPE_",
"ErrorPredicate" => "FDBErrorPredicate_FDB_ERROR_PREDICATE_",
ty => panic!("unknown Scope name: `{}`", ty),
}
}
fn apply_arg_name(&self) -> Option<&'static str> {
let s = match self.name.as_str() {
"ClusterOption" => "FDBCluster",
"DatabaseOption" => "FDBDatabase",
"TransactionOption" => "FDBTransaction",
_ => return None,
};
Some(s)
}
fn apply_fn_name(&self) -> Option<&'static str> {
let s = match self.name.as_str() {
"NetworkOption" => "fdb_network_set_option",
"ClusterOption" => "fdb_cluster_set_option",
"DatabaseOption" => "fdb_database_set_option",
"TransactionOption" => "fdb_transaction_set_option",
_ => return None,
};
Some(s)
}
}
#[derive(Clone, Copy, Debug)]
enum FdbOptionTy {
Empty,
Int,
Str,
Bytes,
}
impl std::default::Default for FdbOptionTy {
fn default() -> Self {
FdbOptionTy::Empty
}
}
#[derive(Default, Debug)]
struct FdbOption {
name: String,
c_name: String,
code: i32,
param_type: FdbOptionTy,
param_description: String,
description: String,
hidden: bool,
default_for: Option<i32>,
persistent: bool,
}
impl FdbOption {
fn gen_ty<W: fmt::Write>(&self, w: &mut W, with_ty: bool) -> fmt::Result {
if !self.param_description.is_empty() {
writeln!(w, "{t}/// {desc}", t = TAB1, desc = self.param_description)?;
writeln!(w, "{t}///", t = TAB1)?;
}
if !self.description.is_empty() {
writeln!(w, "{t}/// {desc}", t = TAB1, desc = self.description)?;
}
if let (true, Some(ty)) = (with_ty, self.get_ty()) {
writeln!(w, "{t}{name}({ty}),", t = TAB1, name = self.name, ty = ty)?;
} else {
writeln!(w, "{t}{name},", t = TAB1, name = self.name)?;
}
Ok(())
}
fn get_ty(&self) -> Option<&'static str> {
match self.param_type {
FdbOptionTy::Int => Some("i32"),
FdbOptionTy::Str => Some("String"),
FdbOptionTy::Bytes => Some("Vec<u8>"),
FdbOptionTy::Empty => None,
}
}
}
fn to_rs_enum_name(v: &str) -> String {
let mut is_start_of_word = true;
v.chars()
.filter_map(|c| {
if c == '_' {
is_start_of_word = true;
None
} else if is_start_of_word {
is_start_of_word = false;
Some(c.to_ascii_uppercase())
} else {
Some(c)
}
})
.collect()
}
impl From<Vec<OwnedAttribute>> for FdbOption {
fn from(attrs: Vec<OwnedAttribute>) -> Self {
let mut opt = Self::default();
for attr in attrs {
let v = attr.value;
match attr.name.local_name.as_str() {
"name" => {
opt.name = to_rs_enum_name(v.as_str());
opt.c_name = v.to_uppercase();
}
"code" => {
opt.code = v.parse().expect("code to be a i32");
}
"paramType" => {
opt.param_type = match v.as_str() {
"Int" => FdbOptionTy::Int,
"String" => FdbOptionTy::Str,
"Bytes" => FdbOptionTy::Bytes,
"" => FdbOptionTy::Empty,
ty => panic!("unexpected param_type: {}", ty),
};
}
"paramDescription" => {
opt.param_description = v;
}
"description" => {
opt.description = v;
}
"hidden" => match v.as_str() {
"true" => opt.hidden = true,
"false" => opt.hidden = false,
_ => panic!("unexpected boolean value in 'hidden': {}", v),
},
"defaultFor" => {
opt.default_for = Some(v.parse().expect("defaultFor to be a i32"));
}
"persistent" => match v.as_str() {
"true" => opt.persistent = true,
"false" => opt.persistent = false,
_ => panic!("unexpected boolean value in 'persistent': {}", v),
},
attr => {
panic!("unexpected option attribute: {}", attr);
} | }
}
fn on_scope<I>(parser: &mut I) -> Result<Vec<FdbOption>>
where
I: Iterator<Item = xml::reader::Result<XmlEvent>>,
{
let mut options = Vec::new();
for e in parser {
let e = e?;
match e {
XmlEvent::StartElement {
name, attributes, ..
} => {
ensure!(name.local_name == "Option", "unexpected token");
let option = FdbOption::from(attributes.clone());
if !option.hidden {
options.push(option);
}
}
XmlEvent::EndElement { name, .. } => {
if name.local_name == "Scope" {
return Ok(options);
}
}
_ => {}
}
}
bail!("unexpected end of token");
}
#[cfg(all(not(feature = "embedded-fdb-include"), target_os = "linux"))]
const OPTIONS_DATA: &[u8] = include_bytes!("/usr/include/foundationdb/fdb.options");
#[cfg(all(not(feature = "embedded-fdb-include"), target_os = "macos"))]
const OPTIONS_DATA: &[u8] = include_bytes!("/usr/local/include/foundationdb/fdb.options");
#[cfg(all(not(feature = "embedded-fdb-include"), target_os = "windows"))]
const OPTIONS_DATA: &[u8] =
include_bytes!("C:/Program Files/foundationdb/include/foundationdb/fdb.options");
#[cfg(all(feature = "embedded-fdb-include", feature = "fdb-5_1"))]
const OPTIONS_DATA: &[u8] = include_bytes!("../include/510/fdb.options");
#[cfg(all(feature = "embedded-fdb-include", feature = "fdb-5_2"))]
const OPTIONS_DATA: &[u8] = include_bytes!("../include/520/fdb.options");
#[cfg(all(feature = "embedded-fdb-include", feature = "fdb-6_0"))]
const OPTIONS_DATA: &[u8] = include_bytes!("../include/600/fdb.options");
#[cfg(all(feature = "embedded-fdb-include", feature = "fdb-6_1"))]
const OPTIONS_DATA: &[u8] = include_bytes!("../include/610/fdb.options");
#[cfg(all(feature = "embedded-fdb-include", feature = "fdb-6_2"))]
const OPTIONS_DATA: &[u8] = include_bytes!("../include/620/fdb.options");
pub fn emit() -> Result<String> {
let mut reader = OPTIONS_DATA;
let parser = EventReader::new(&mut reader);
let mut iter = parser.into_iter();
let mut scopes = Vec::new();
while let Some(e) = iter.next() {
match e.unwrap() {
XmlEvent::StartElement {
name, attributes, ..
} => {
if name.local_name == "Scope" {
let scope_name = attributes
.into_iter()
.find(|attr| attr.name.local_name == "name")
.unwrap();
let options = on_scope(&mut iter).unwrap();
scopes.push(FdbScope {
name: scope_name.value,
options,
});
}
}
XmlEvent::EndElement { .. } => {
//
}
_ => {}
}
}
let mut w = String::new();
writeln!(w, "use std::convert::TryFrom;")?;
writeln!(w, "use crate::{{FdbError, FdbResult}};")?;
writeln!(w, "use foundationdb_sys as fdb_sys;")?;
for scope in scopes.iter() {
scope.gen_ty(&mut w)?;
scope.gen_impl(&mut w)?;
}
Ok(w)
} | }
}
opt |
09.py | data = [("000060", 8.25), ("000020", 5.75), ("039490", 1.3)]
def | return x[1]
data.sort(key=정렬규칙)
print(data)
| 정렬규칙(x):
|
pgbart.py | # Copyright 2020 The PyMC Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from copy import copy
import aesara
import numpy as np
from aesara import function as aesara_function
from pymc.aesaraf import inputvars, join_nonshared_inputs, make_shared_replacements
from pymc.bart.bart import BARTRV
from pymc.bart.tree import LeafNode, SplitNode, Tree
from pymc.model import modelcontext
from pymc.step_methods.arraystep import ArrayStepShared, Competence
_log = logging.getLogger("pymc")
class PGBART(ArrayStepShared):
"""
Particle Gibss BART sampling step
Parameters
----------
vars: list
List of value variables for sampler
num_particles : int
Number of particles for the conditional SMC sampler. Defaults to 40
max_stages : int
Maximum number of iterations of the conditional SMC sampler. Defaults to 100.
batch : int or tuple
Number of trees fitted per step. Defaults to "auto", which is the 10% of the `m` trees
during tuning and after tuning. If a tuple is passed the first element is the batch size
during tuning and the second the batch size after tuning.
model: PyMC Model
Optional model for sampling step. Defaults to None (taken from context).
"""
name = "bartsampler"
default_blocked = False
generates_stats = True
stats_dtypes = [{"variable_inclusion": np.ndarray, "bart_trees": np.ndarray}]
def __init__(self, vars=None, num_particles=40, max_stages=100, batch="auto", model=None):
_log.warning("BART is experimental. Use with caution.")
model = modelcontext(model)
initial_values = model.compute_initial_point()
value_bart = inputvars(vars)[0]
self.bart = model.values_to_rvs[value_bart].owner.op
self.X = self.bart.X
self.Y = self.bart.Y
self.missing_data = np.any(np.isnan(self.X))
self.m = self.bart.m
self.alpha = self.bart.alpha | self.alpha_vec = np.ones(self.X.shape[1])
self.init_mean = self.Y.mean()
# if data is binary
Y_unique = np.unique(self.Y)
if Y_unique.size == 2 and np.all(Y_unique == [0, 1]):
self.mu_std = 6 / (self.k * self.m ** 0.5)
# maybe we need to check for count data
else:
self.mu_std = (2 * self.Y.std()) / (self.k * self.m ** 0.5)
self.num_observations = self.X.shape[0]
self.num_variates = self.X.shape[1]
self.available_predictors = list(range(self.num_variates))
self.sum_trees = np.full_like(self.Y, self.init_mean).astype(aesara.config.floatX)
self.a_tree = Tree.init_tree(
leaf_node_value=self.init_mean / self.m,
idx_data_points=np.arange(self.num_observations, dtype="int32"),
)
self.mean = fast_mean()
self.normal = NormalSampler()
self.prior_prob_leaf_node = compute_prior_probability(self.alpha)
self.ssv = SampleSplittingVariable(self.alpha_vec)
self.tune = True
if batch == "auto":
batch = max(1, int(self.m * 0.1))
self.batch = (batch, batch)
else:
if isinstance(batch, (tuple, list)):
self.batch = batch
else:
self.batch = (batch, batch)
self.log_num_particles = np.log(num_particles)
self.indices = list(range(2, num_particles))
self.len_indices = len(self.indices)
self.max_stages = max_stages
shared = make_shared_replacements(initial_values, vars, model)
self.likelihood_logp = logp(initial_values, [model.datalogpt], vars, shared)
self.all_particles = []
for i in range(self.m):
self.a_tree.leaf_node_value = self.init_mean / self.m
p = ParticleTree(self.a_tree)
self.all_particles.append(p)
self.all_trees = np.array([p.tree for p in self.all_particles])
super().__init__(vars, shared)
def astep(self, _):
variable_inclusion = np.zeros(self.num_variates, dtype="int")
tree_ids = np.random.choice(range(self.m), replace=False, size=self.batch[~self.tune])
for tree_id in tree_ids:
# Generate an initial set of SMC particles
# at the end of the algorithm we return one of these particles as the new tree
particles = self.init_particles(tree_id)
# Compute the sum of trees without the old tree, that we are attempting to replace
self.sum_trees_noi = self.sum_trees - particles[0].tree.predict_output()
# Resample leaf values for particle 1 which is a copy of the old tree
particles[1].sample_leafs(
self.sum_trees,
self.X,
self.mean,
self.m,
self.normal,
self.mu_std,
)
# The old tree and the one with new leafs do not grow so we update the weights only once
self.update_weight(particles[0], old=True)
self.update_weight(particles[1], old=True)
for _ in range(self.max_stages):
# Sample each particle (try to grow each tree), except for the first two
stop_growing = True
for p in particles[2:]:
tree_grew = p.sample_tree(
self.ssv,
self.available_predictors,
self.prior_prob_leaf_node,
self.X,
self.missing_data,
self.sum_trees,
self.mean,
self.m,
self.normal,
self.mu_std,
)
if tree_grew:
self.update_weight(p)
if p.expansion_nodes:
stop_growing = False
if stop_growing:
break
# Normalize weights
W_t, normalized_weights = self.normalize(particles[2:])
# Resample all but first two particles
new_indices = np.random.choice(
self.indices, size=self.len_indices, p=normalized_weights
)
particles[2:] = particles[new_indices]
# Set the new weights
for p in particles[2:]:
p.log_weight = W_t
for p in particles[2:]:
p.log_weight = p.old_likelihood_logp
_, normalized_weights = self.normalize(particles)
# Get the new tree and update
new_particle = np.random.choice(particles, p=normalized_weights)
new_tree = new_particle.tree
self.all_trees[tree_id] = new_tree
new_particle.log_weight = new_particle.old_likelihood_logp - self.log_num_particles
self.all_particles[tree_id] = new_particle
self.sum_trees = self.sum_trees_noi + new_tree.predict_output()
if self.tune:
self.ssv = SampleSplittingVariable(self.alpha_vec)
for index in new_particle.used_variates:
self.alpha_vec[index] += 1
else:
for index in new_particle.used_variates:
variable_inclusion[index] += 1
stats = {"variable_inclusion": variable_inclusion, "bart_trees": self.all_trees}
return self.sum_trees, [stats]
def normalize(self, particles):
"""
Use logsumexp trick to get W_t and softmax to get normalized_weights
"""
log_w = np.array([p.log_weight for p in particles])
log_w_max = log_w.max()
log_w_ = log_w - log_w_max
w_ = np.exp(log_w_)
w_sum = w_.sum()
W_t = log_w_max + np.log(w_sum) - self.log_num_particles
normalized_weights = w_ / w_sum
# stabilize weights to avoid assigning exactly zero probability to a particle
normalized_weights += 1e-12
return W_t, normalized_weights
def init_particles(self, tree_id: int) -> np.ndarray:
"""
Initialize particles
"""
p = self.all_particles[tree_id]
particles = [p]
particles.append(copy(p))
for _ in self.indices:
particles.append(ParticleTree(self.a_tree))
return np.array(particles)
def update_weight(self, particle, old=False):
"""
Update the weight of a particle
Since the prior is used as the proposal,the weights are updated additively as the ratio of
the new and old log-likelihoods.
"""
new_likelihood = self.likelihood_logp(self.sum_trees_noi + particle.tree.predict_output())
if old:
particle.log_weight = new_likelihood
particle.old_likelihood_logp = new_likelihood
else:
particle.log_weight += new_likelihood - particle.old_likelihood_logp
particle.old_likelihood_logp = new_likelihood
@staticmethod
def competence(var, has_grad):
"""
PGBART is only suitable for BART distributions
"""
dist = getattr(var.owner, "op", None)
if isinstance(dist, BARTRV):
return Competence.IDEAL
return Competence.INCOMPATIBLE
class ParticleTree:
"""
Particle tree
"""
def __init__(self, tree):
self.tree = tree.copy() # keeps the tree that we care at the moment
self.expansion_nodes = [0]
self.log_weight = 0
self.old_likelihood_logp = 0
self.used_variates = []
def sample_tree(
self,
ssv,
available_predictors,
prior_prob_leaf_node,
X,
missing_data,
sum_trees,
mean,
m,
normal,
mu_std,
):
tree_grew = False
if self.expansion_nodes:
index_leaf_node = self.expansion_nodes.pop(0)
# Probability that this node will remain a leaf node
prob_leaf = prior_prob_leaf_node[self.tree[index_leaf_node].depth]
if prob_leaf < np.random.random():
index_selected_predictor = grow_tree(
self.tree,
index_leaf_node,
ssv,
available_predictors,
X,
missing_data,
sum_trees,
mean,
m,
normal,
mu_std,
)
if index_selected_predictor is not None:
new_indexes = self.tree.idx_leaf_nodes[-2:]
self.expansion_nodes.extend(new_indexes)
self.used_variates.append(index_selected_predictor)
tree_grew = True
return tree_grew
def sample_leafs(self, sum_trees, X, mean, m, normal, mu_std):
sample_leaf_values(self.tree, sum_trees, X, mean, m, normal, mu_std)
class SampleSplittingVariable:
def __init__(self, alpha_vec):
"""
Sample splitting variables proportional to `alpha_vec`.
This is equivalent to compute the posterior mean of a Dirichlet-Multinomial model.
This enforce sparsity.
"""
self.enu = list(enumerate(np.cumsum(alpha_vec / alpha_vec.sum())))
def rvs(self):
r = np.random.random()
for i, v in self.enu:
if r <= v:
return i
def compute_prior_probability(alpha):
"""
Calculate the probability of the node being a LeafNode (1 - p(being SplitNode)).
Taken from equation 19 in [Rockova2018].
Parameters
----------
alpha : float
Returns
-------
list with probabilities for leaf nodes
References
----------
.. [Rockova2018] Veronika Rockova, Enakshi Saha (2018). On the theory of BART.
arXiv, `link <https://arxiv.org/abs/1810.00787>`__
"""
prior_leaf_prob = [0]
depth = 1
while prior_leaf_prob[-1] < 1:
prior_leaf_prob.append(1 - alpha ** depth)
depth += 1
return prior_leaf_prob
def grow_tree(
tree,
index_leaf_node,
ssv,
available_predictors,
X,
missing_data,
sum_trees,
mean,
m,
normal,
mu_std,
):
current_node = tree.get_node(index_leaf_node)
idx_data_points = current_node.idx_data_points
index_selected_predictor = ssv.rvs()
selected_predictor = available_predictors[index_selected_predictor]
available_splitting_values = X[idx_data_points, selected_predictor]
if missing_data:
idx_data_points = idx_data_points[~np.isnan(available_splitting_values)]
available_splitting_values = available_splitting_values[
~np.isnan(available_splitting_values)
]
if available_splitting_values.size > 0:
idx_selected_splitting_values = discrete_uniform_sampler(len(available_splitting_values))
split_value = available_splitting_values[idx_selected_splitting_values]
new_idx_data_points = get_new_idx_data_points(
split_value, idx_data_points, selected_predictor, X
)
current_node_children = (
current_node.get_idx_left_child(),
current_node.get_idx_right_child(),
)
new_nodes = []
for idx in range(2):
idx_data_point = new_idx_data_points[idx]
node_value = draw_leaf_value(
sum_trees[idx_data_point],
X[idx_data_point, selected_predictor],
mean,
m,
normal,
mu_std,
)
new_node = LeafNode(
index=current_node_children[idx],
value=node_value,
idx_data_points=idx_data_point,
)
new_nodes.append(new_node)
new_split_node = SplitNode(
index=index_leaf_node,
idx_split_variable=selected_predictor,
split_value=split_value,
)
# update tree nodes and indexes
tree.delete_node(index_leaf_node)
tree.set_node(index_leaf_node, new_split_node)
tree.set_node(new_nodes[0].index, new_nodes[0])
tree.set_node(new_nodes[1].index, new_nodes[1])
return index_selected_predictor
def sample_leaf_values(tree, sum_trees, X, mean, m, normal, mu_std):
for idx in tree.idx_leaf_nodes:
if idx > 0:
leaf = tree[idx]
idx_data_points = leaf.idx_data_points
parent_node = tree[leaf.get_idx_parent_node()]
selected_predictor = parent_node.idx_split_variable
node_value = draw_leaf_value(
sum_trees[idx_data_points],
X[idx_data_points, selected_predictor],
mean,
m,
normal,
mu_std,
)
leaf.value = node_value
def get_new_idx_data_points(split_value, idx_data_points, selected_predictor, X):
left_idx = X[idx_data_points, selected_predictor] <= split_value
left_node_idx_data_points = idx_data_points[left_idx]
right_node_idx_data_points = idx_data_points[~left_idx]
return left_node_idx_data_points, right_node_idx_data_points
def draw_leaf_value(Y_mu_pred, X_mu, mean, m, normal, mu_std):
"""Draw Gaussian distributed leaf values"""
if Y_mu_pred.size == 0:
return 0
else:
norm = normal.random() * mu_std
if Y_mu_pred.size == 1:
mu_mean = Y_mu_pred.item() / m
else:
mu_mean = mean(Y_mu_pred) / m
draw = norm + mu_mean
return draw
def fast_mean():
"""If available use Numba to speed up the computation of the mean."""
try:
from numba import jit
except ImportError:
return np.mean
@jit
def mean(a):
count = a.shape[0]
suma = 0
for i in range(count):
suma += a[i]
return suma / count
return mean
def discrete_uniform_sampler(upper_value):
"""Draw from the uniform distribution with bounds [0, upper_value).
This is the same and np.random.randit(upper_value) but faster.
"""
return int(np.random.random() * upper_value)
class NormalSampler:
"""
Cache samples from a standard normal distribution
"""
def __init__(self):
self.size = 1000
self.cache = []
def random(self):
if not self.cache:
self.update()
return self.cache.pop()
def update(self):
self.cache = np.random.normal(loc=0.0, scale=1, size=self.size).tolist()
def logp(point, out_vars, vars, shared):
"""Compile Aesara function of the model and the input and output variables.
Parameters
----------
out_vars: List
containing :class:`pymc.Distribution` for the output variables
vars: List
containing :class:`pymc.Distribution` for the input variables
shared: List
containing :class:`aesara.tensor.Tensor` for depended shared data
"""
out_list, inarray0 = join_nonshared_inputs(point, out_vars, vars, shared)
f = aesara_function([inarray0], out_list[0])
f.trust_input = True
return f | self.k = self.bart.k
self.alpha_vec = self.bart.split_prior
if self.alpha_vec is None: |
index.ts | #!/usr/bin/env node
import process from "process";
import { AllPackages, getDefinitelyTyped } from "@definitelytyped/definitions-parser";
import { loggerWithErrors, NpmPublishClient } from "@definitelytyped/utils";
import { graphql } from "@octokit/graphql";
import search from "libnpmsearch";
import yargs from "yargs";
main();
async function main() {
const { dry } = yargs.argv;
const [log] = loggerWithErrors();
const options = { definitelyTypedPath: undefined, progress: false, parseInParallel: false };
const dt = await getDefinitelyTyped(options, log);
const allPackages = await AllPackages.read(dt);
const client = await NpmPublishClient.create(process.env.NPM_TOKEN!);
// Loop over npm @types packages and mark as deprecated any that no longer exist in the DT repo.
let from = 0;
let results;
do {
const opts = { limit: 250, from };
// Won't return already-deprecated packages.
results = await search("@types", opts);
for (const result of results) {
const types = result.name.slice("@types/".length);
// Skip ones that exist, either in the types/ directory or notNeededPackages.json.
if (allPackages.tryGetLatestVersion(types) || allPackages.getNotNeededPackage(types)) continue;
const msg = await fetchMsg(types);
if (!msg) {
log.info(`Could not find the commit that removed types/${types}/.`);
continue;
}
log.info(`Deprecating ${result.name}: ${msg}`);
if (!dry) await client.deprecate(result.name, "*", msg);
}
from += results.length;
// The registry API clamps limit at 250 and from at 5,000, so we can only loop over 5,250 packages, for now.
} while (results.length >= 250 && from <= 5000);
}
/** Reference the commit/PR that removed the named types. */
async function fetchMsg(types: string) {
const {
repository: {
defaultBranchRef: { | history: {
nodes: [commit],
},
},
},
},
} = await graphql(
`
query ($path: String!) {
repository(name: "DefinitelyTyped", owner: "DefinitelyTyped") {
defaultBranchRef {
target {
... on Commit {
history(first: 1, path: $path) {
nodes {
associatedPullRequests(first: 1) {
nodes {
url
}
}
messageHeadline
}
}
}
}
}
}
}
`,
{
headers: { authorization: `token ${process.env.GITHUB_TOKEN}` },
path: `types/${types}/`,
}
);
if (!commit) return;
const {
associatedPullRequests: {
nodes: [pullRequest],
},
messageHeadline,
} = commit;
const subject = messageHeadline.replace(new RegExp(String.raw`^\[${types}] `), "").replace(/ \(#[0-9]+\)$/, "");
return pullRequest ? `${subject} ${pullRequest.url}` : subject;
} | target: { |
b_list_comprehension.py | def estrai_classico(lista, lettera):
output = []
for l in lista:
if l[0] == lettera:
output.append(l)
return output
def quadrati(val_massimo):
output = []
for v in range(val_massimo):
output.append(v ** 2)
return output
def quadrato(numero): |
def costruisci_pari(i):
return "{} è pari".format(i)
if __name__ == "__main__":
lista_nomi = ["Jax Teller", "Walter White", "Billy Butcher", "Luke Skywalker", "Bobby Singer", "Johnny Lawrence"]
# Trovo tutti i nomi che iniziano per "J" con il metodo classico
lista_nomi_j = estrai_classico(lista_nomi, "J")
# print(lista_nomi_j)
# Trovo tutti i nomi che iniziano per "B" con una list comprehension
lista_nomi_b = [nome for nome in lista_nomi if nome[0] == "B"]
# print(lista_nomi_b)
# print("Lista dei nomi: {}".format(lista_nomi))
# print("Lista dei nomi che iniziano con J (con metodo classico): {}".format(lista_nomi_j))
# print("Lista dei nomi che iniziano con B (con list comprehension): {}".format(lista_nomi_b))
# print("Lista dei quadrati fino a 10 calcolata col metodo classico: {}".format(quadrati(10)))
# print("Lista dei quadrati calcolata con list comprehension: {}".format([i ** 2 for i in range(10)]))
# # Mostro "pari" per i numeri pari, e "dispari" per i numeri dispari; utile per l'if/else
lista_pari_dispari = [costruisci_pari(i) if i % 2 == 0 else "{} è dispari".format(i) for i in range(1, 10)]
print("Lista pari e dispari: {}".format(lista_pari_dispari))
# # Esempio di assignment expression
# fib = [0, 1]
# fib += [(fib := [fib[1], fib[0] + fib[1]]) and fib[1] for i in range(10)]
# print(fib) | return numero ** 2 |
get.ts | import _ from "lodash";
import { GenericObject } from "../../types/Common";
import { IXFilterFunction } from "../../types/XFilter";
const get: IXFilterFunction<
any,
any,
IXFilterFunctionGetOpts, | GenericObject
> = function get(
payload: any,
opts?: IXFilterFunctionGetOpts,
ref?: GenericObject
): any {
if (!opts?.prop || typeof opts.prop !== "string") return payload;
return _.get({ $data: payload, ...ref }, opts.prop);
};
export default get;
export interface IXFilterFunctionGetOpts {
prop: string;
} | |
solution.rs | use num_traits::{AsPrimitive, FromPrimitive, NumAssign, PrimInt, Unsigned};
use std::fmt::{Debug, Display};
pub trait UnsignedInt:
PrimInt
+ Unsigned
+ Display
+ Debug
+ AsPrimitive<usize>
+ AsPrimitive<f64>
+ FromPrimitive
+ NumAssign
{
}
impl UnsignedInt for u16 {}
impl UnsignedInt for u32 {}
///
/// Solution of the linear assignment problem
///
#[derive(Debug, Clone)]
pub struct | <I>
where
I: UnsignedInt,
{
/// index i gives the object, j, owned by person i
///
/// Unassigned people are marked by MAX value of the integer type (u32::MAX for u32)
pub person_to_object: Vec<I>,
/// index j gives the person, i, who owns object j
///
/// Unassigned objects are marked by MAX value of the integer type (u32::MAX for u32)
pub object_to_person: Vec<I>,
/// number of unnassigned people in case perfect matching doesn't exist
pub num_unassigned: I,
/// found solution is ε-optimal if perfect matching exists. For integer weights small enough ε
/// gives optimum.
pub eps: f64,
}
impl<I> AuctionSolution<I>
where
I: UnsignedInt,
{
pub fn new(row_capacity: usize, column_capacity: usize) -> AuctionSolution<I> {
AuctionSolution::<I> {
person_to_object: Vec::with_capacity(row_capacity),
object_to_person: Vec::with_capacity(column_capacity),
eps: f64::NAN,
num_unassigned: I::max_value(),
}
}
}
| AuctionSolution |
test_write_sheet_format_pr.py | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2021, John McNamara, [email protected]
#
import unittest
from ...compatibility import StringIO
from ...worksheet import Worksheet
class TestWriteSheetFormatPr(unittest.TestCase):
"""
Test the Worksheet _write_sheet_format_pr() method.
"""
def | (self):
self.fh = StringIO()
self.worksheet = Worksheet()
self.worksheet._set_filehandle(self.fh)
def test_write_sheet_format_pr(self):
"""Test the _write_sheet_format_pr() method"""
self.worksheet._write_sheet_format_pr()
exp = """<sheetFormatPr defaultRowHeight="15"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
| setUp |
views.py | import urllib
from contextlib import suppress
from django.conf import settings
from django.contrib import messages
from django.contrib.auth import login
from django.core.exceptions import PermissionDenied, SuspiciousOperation
from django.http import FileResponse, Http404, HttpResponseServerError
from django.shortcuts import redirect
from django.template import TemplateDoesNotExist, loader
from django.urls import get_callable
from django.utils.http import url_has_allowed_host_and_scheme
from django.utils.timezone import now
from django.views.generic import FormView
from django.views.generic.detail import SingleObjectTemplateResponseMixin
from django.views.generic.edit import ModelFormMixin, ProcessFormView
from django_context_decorator import context
from pretalx.cfp.forms.auth import ResetForm
from pretalx.common.mail import SendMailException
from pretalx.common.phrases import phrases
from pretalx.person.forms import UserForm
from pretalx.person.models import User
class CreateOrUpdateView(
SingleObjectTemplateResponseMixin, ModelFormMixin, ProcessFormView
):
def set_object(self):
if getattr(self, "object", None) is None:
setattr(self, "object", None)
with suppress(self.model.DoesNotExist, AttributeError):
setattr(self, "object", self.get_object())
def get(self, request, *args, **kwargs):
self.set_object()
return super().get(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
self.set_object()
return super().post(request, *args, **kwargs)
def is_form_bound(request, form_name, form_param="form"):
return request.method == "POST" and request.POST.get(form_param) == form_name
def get_static(request, path, content_type): # pragma: no cover
"""TODO: move to staticfiles usage as per https://gist.github.com/SmileyChris/8d472f2a67526e36f39f3c33520182bc
This would avoid potential directory traversal by … a malicious urlconfig, so not a huge attack vector."""
path = settings.BASE_DIR / "pretalx/static" / path
if not path.exists():
raise Http404()
return FileResponse(
open(path, "rb"), content_type=content_type, as_attachment=False
)
class GenericLoginView(FormView):
form_class = UserForm
@context
def password_reset_link(self):
return self.get_password_reset_link()
def dispatch(self, request, *args, **kwargs):
if | def get_success_url(self):
params = self.request.GET.copy()
url = urllib.parse.unquote(params.pop("next", [""])[0])
params = "?" + params.urlencode() if params else ""
if url and url_has_allowed_host_and_scheme(url, allowed_hosts=None):
return url + params
return self.success_url + params
def form_valid(self, form):
pk = form.save()
user = User.objects.filter(pk=pk).first()
login(self.request, user, backend="django.contrib.auth.backends.ModelBackend")
return redirect(self.get_success_url())
class GenericResetView(FormView):
form_class = ResetForm
def form_valid(self, form):
user = form.cleaned_data["user"]
if not user or (
user.pw_reset_time
and (now() - user.pw_reset_time).total_seconds() < 3600 * 24
):
messages.success(self.request, phrases.cfp.auth_password_reset)
return redirect(self.get_success_url())
try:
user.reset_password(
event=getattr(self.request, "event", None),
orga="orga" in self.request.resolver_match.namespaces,
)
except SendMailException: # pragma: no cover
messages.error(self.request, phrases.base.error_sending_mail)
return self.get(self.request, *self.args, **self.kwargs)
messages.success(self.request, phrases.cfp.auth_password_reset)
user.log_action("pretalx.user.password.reset")
return redirect(self.get_success_url())
def handle_500(request):
try:
template = loader.get_template("500.html")
except TemplateDoesNotExist: # pragma: no cover
return HttpResponseServerError(
"Internal server error. Please contact the administrator for details.",
content_type="text/html",
)
context = {}
try: # This should never fail, but can't be too cautious in error views
context["request_path"] = urllib.parse.quote(request.path)
except Exception: # pragma: no cover
pass
return HttpResponseServerError(template.render(context))
def error_view(status_code):
if status_code == 4031:
return get_callable(settings.CSRF_FAILURE_VIEW)
if status_code == 500:
return handle_500
exceptions = {
400: SuspiciousOperation,
403: PermissionDenied,
404: Http404,
}
exception = exceptions[status_code]
def error_view(request, *args, **kwargs):
raise exception
return error_view
| not self.request.user.is_anonymous:
return redirect(self.get_success_url())
return super().dispatch(request, *args, **kwargs)
|
main.rs | use std::collections::{HashMap, HashSet};
use std::io::{self, Read};
type Pos = (i32, i32);
enum Dir { Left, Right, Up, Down }
impl Into<Pos> for &Dir {
fn into(self) -> Pos {
match self {
Dir::Left => (-1, 0),
Dir::Right => (1, 0),
Dir::Up => (0, -1),
Dir::Down => (0, 1)
}
}
}
type Path = Vec<(Dir, i32)>;
fn parse(input: &str) -> [Path; 2] {
let mut ret = [Vec::new(), Vec::new()];
for (i, line) in input.lines().enumerate() {
let mut path = Vec::new();
for segment in line.split(",") {
let dir = match &segment[0..1] {
"L" => Dir::Left,
"R" => Dir::Right,
"U" => Dir::Up,
"D" => Dir::Down,
_ => panic!()
};
let steps = segment[1..].parse().unwrap();
path.push((dir, steps));
}
ret[i] = path;
}
ret
}
fn points_touched(path: &Path) -> HashSet<Pos> {
let mut ret = HashSet::new();
let mut pos = (0, 0);
ret.insert(pos);
for (dir, steps) in path {
let offset: Pos = dir.into();
for _ in 0 .. *steps {
pos.0 += offset.0;
pos.1 += offset.1;
ret.insert(pos);
}
}
ret
}
fn signal_delays(path: &Path) -> HashMap<Pos, i32> {
let mut ret = HashMap::new();
let mut pos = (0, 0);
let mut dist = 0;
ret.insert(pos, dist);
for (dir, steps) in path {
let offset: Pos = dir.into();
for _ in 0 .. *steps {
pos.0 += offset.0;
pos.1 += offset.1;
dist += 1;
if !ret.contains_key(&pos) { ret.insert(pos, dist); }
}
}
ret
}
fn part1(input: &str) -> i32 {
let [path1, path2] = parse(input);
let [points1, points2] = [points_touched(&path1), points_touched(&path2)];
points1.intersection(&points2)
.map(|p| p.0.abs() + p.1.abs())
.filter(|d| *d != 0)
.min().unwrap()
}
fn part2(input: &str) -> i32 {
let [path1, path2] = parse(input);
let [points1, points2] = [points_touched(&path1), points_touched(&path2)];
let [signal_delays1, signal_delays2] = [signal_delays(&path1), signal_delays(&path2)];
points1.intersection(&points2)
.map(|p| signal_delays1.get(p).unwrap() + signal_delays2.get(p).unwrap())
.filter(|d| *d != 0)
.min().unwrap()
}
fn main() { | println!("{}", part2(&input));
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_part1() {
assert_eq!(part1("R8,U5,L5,D3\nU7,R6,D4,L4"), 6);
assert_eq!(part1("R75,D30,R83,U83,L12,D49,R71,U7,L72\nU62,R66,U55,R34,D71,R55,D58,R83"), 159);
assert_eq!(part1("R98,U47,R26,D63,R33,U87,L62,D20,R33,U53,R51\nU98,R91,D20,R16,D67,R40,U7,R15,U6,R7"), 135);
}
#[test]
fn test_part2() {
assert_eq!(part2("R8,U5,L5,D3\nU7,R6,D4,L4"), 30);
assert_eq!(part2("R75,D30,R83,U83,L12,D49,R71,U7,L72\nU62,R66,U55,R34,D71,R55,D58,R83"), 610);
assert_eq!(part2("R98,U47,R26,D63,R33,U87,L62,D20,R33,U53,R51\nU98,R91,D20,R16,D67,R40,U7,R15,U6,R7"), 410);
}
} | let mut input = String::new();
io::stdin().read_to_string(&mut input).unwrap();
println!("{}", part1(&input)); |
build.rs | // Licensed under the MIT License <LICENSE.md>
fn main() {
println!("cargo:rustc-flags=-l appnotify");
} | // Copyright © 2015, Peter Atashian |
|
versionChecker.go | // The MIT License
//
// Copyright (c) 2020 Temporal Technologies Inc. All rights reserved.
//
// Copyright (c) 2020 Uber Technologies, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package frontend
import (
"runtime"
"sync"
"time"
enumsbp "go.temporal.io/api/enums/v1"
"go.temporal.io/api/serviceerror"
versionpb "go.temporal.io/api/version/v1"
"go.temporal.io/version/check"
"go.temporal.io/server/common/headers"
"go.temporal.io/server/common/metrics"
"go.temporal.io/server/common/persistence"
"go.temporal.io/server/common/primitives/timestamp"
"go.temporal.io/server/common/resource"
)
const VersionCheckInterval = 24 * time.Hour
type VersionChecker struct {
resource.Resource
config *Config
params *resource.BootstrapParams
shutdownChan chan struct{}
metricsScope metrics.Scope
startOnce sync.Once
stopOnce sync.Once
}
func NewVersionChecker(
resource resource.Resource,
params *resource.BootstrapParams,
config *Config,
) *VersionChecker {
return &VersionChecker{Resource: resource, config: config, params: params, shutdownChan: make(chan struct{}),
metricsScope: resource.GetMetricsClient().Scope(metrics.VersionCheckScope)}
}
func (vc *VersionChecker) Start() {
if vc.config.EnableServerVersionCheck() {
vc.startOnce.Do(func() {
go vc.versionCheckLoop()
})
}
}
func (vc *VersionChecker) Stop() {
if vc.config.EnableServerVersionCheck() {
vc.stopOnce.Do(func() {
close(vc.shutdownChan)
})
}
}
func (vc *VersionChecker) versionCheckLoop() {
timer := time.NewTicker(VersionCheckInterval)
defer timer.Stop()
vc.performVersionCheck()
for {
select {
case <-vc.shutdownChan:
return
case <-timer.C:
vc.performVersionCheck()
}
}
}
func (vc *VersionChecker) performVersionCheck() {
sw := vc.metricsScope.StartTimer(metrics.VersionCheckLatency)
defer sw.Stop()
clusterMetadataManager := vc.GetClusterMetadataManager()
metadata, err := clusterMetadataManager.GetClusterMetadata()
if err != nil {
vc.metricsScope.IncCounter(metrics.VersionCheckFailedCount)
return
}
if !isUpdateNeeded(metadata) {
return
}
req, err := vc.createVersionCheckRequest(metadata)
if err != nil {
vc.metricsScope.IncCounter(metrics.VersionCheckFailedCount)
return
}
resp, err := vc.getVersionInfo(req)
if err != nil {
vc.metricsScope.IncCounter(metrics.VersionCheckRequestFailedCount)
vc.metricsScope.IncCounter(metrics.VersionCheckFailedCount)
return
}
err = vc.saveVersionInfo(resp)
if err != nil {
vc.metricsScope.IncCounter(metrics.VersionCheckFailedCount)
return
}
vc.metricsScope.IncCounter(metrics.VersionCheckSuccessCount)
}
func isUpdateNeeded(metadata *persistence.GetClusterMetadataResponse) bool {
return metadata.VersionInfo == nil || (metadata.VersionInfo.LastUpdateTime != nil &&
metadata.VersionInfo.LastUpdateTime.Before(time.Now().Add(-time.Hour)))
}
func (vc *VersionChecker) createVersionCheckRequest(metadata *persistence.GetClusterMetadataResponse) (*check.VersionCheckRequest, error) {
return &check.VersionCheckRequest{
Product: headers.ClientNameServer,
Version: headers.ServerVersion,
Arch: runtime.GOARCH,
OS: runtime.GOOS,
DB: vc.GetClusterMetadataManager().GetName(),
ClusterID: metadata.ClusterId,
Timestamp: time.Now().UnixNano(),
}, nil
}
func (vc *VersionChecker) getVersionInfo(req *check.VersionCheckRequest) (*check.VersionCheckResponse, error) {
return check.NewCaller().Call(req)
}
func (vc *VersionChecker) saveVersionInfo(resp *check.VersionCheckResponse) error {
clusterMetadataManager := vc.GetClusterMetadataManager()
metadata, err := clusterMetadataManager.GetClusterMetadata()
if err != nil {
return err
}
metadata.VersionInfo = toVersionInfo(resp)
saved, err := clusterMetadataManager.SaveClusterMetadata(&persistence.SaveClusterMetadataRequest{
ClusterMetadata: metadata.ClusterMetadata, Version: metadata.Version})
if err != nil {
return err
}
if !saved {
return serviceerror.NewInternal("version info update hasn't been applied")
}
return nil
}
func toVersionInfo(resp *check.VersionCheckResponse) *versionpb.VersionInfo {
return &versionpb.VersionInfo{
Current: convertReleaseInfo(resp.Current),
Recommended: convertReleaseInfo(resp.Recommended),
Instructions: resp.Instructions,
Alerts: convertAlerts(resp.Alerts),
LastUpdateTime: timestamp.TimePtr(time.Now().UTC()),
}
}
func | (alerts []check.Alert) []*versionpb.Alert {
var result []*versionpb.Alert
for _, alert := range alerts {
result = append(result, &versionpb.Alert{
Message: alert.Message,
Severity: enumsbp.Severity(alert.Severity),
})
}
return result
}
func convertReleaseInfo(releaseInfo check.ReleaseInfo) *versionpb.ReleaseInfo {
return &versionpb.ReleaseInfo{
Version: releaseInfo.Version,
ReleaseTime: timestamp.UnixOrZeroTimePtr(releaseInfo.ReleaseTime),
Notes: releaseInfo.Notes,
}
}
| convertAlerts |
test_exact.py | # -*- coding: utf-8 -*-
import unittest
import freqerica.hamiltonian.exact
class BasicTestSuite(unittest.TestCase):
"""Basic test cases."""
def test_absolute_truth_and_meaning(self):
assert True
if __name__ == '__main__':
| unittest.main() |
|
lib.rs | #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
#![feature(bool_to_option)]
#![feature(box_patterns)]
#![feature(try_blocks)]
#![feature(in_band_lifetimes)]
#![feature(let_else)]
#![feature(once_cell)]
#![feature(nll)]
#![feature(associated_type_bounds)]
#![recursion_limit = "256"]
//! This crate contains codegen code that is used by all codegen backends (LLVM and others).
//! The backend-agnostic functions of this crate use functions defined in various traits that
//! have to be implemented by each backends.
#[macro_use]
extern crate rustc_macros;
#[macro_use]
extern crate tracing;
#[macro_use]
extern crate rustc_middle;
use rustc_ast as ast;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::sync::Lrc;
use rustc_hir::def_id::CrateNum;
use rustc_hir::LangItem;
use rustc_middle::dep_graph::WorkProduct;
use rustc_middle::middle::dependency_format::Dependencies;
use rustc_middle::ty::query::Providers;
use rustc_session::config::{CrateType, OutputFilenames, OutputType, RUST_CGU_EXT};
use rustc_session::cstore::{self, CrateSource};
use rustc_session::utils::NativeLibKind;
use rustc_span::symbol::Symbol;
use std::path::{Path, PathBuf};
pub mod back;
pub mod base;
pub mod common;
pub mod coverageinfo;
pub mod debuginfo;
pub mod glue;
pub mod meth;
pub mod mir;
pub mod mono_item;
pub mod target_features;
pub mod traits;
pub struct ModuleCodegen<M> {
/// The name of the module. When the crate may be saved between
/// compilations, incremental compilation requires that name be
/// unique amongst **all** crates. Therefore, it should contain
/// something unique to this crate (e.g., a module path) as well
/// as the crate name and disambiguator.
/// We currently generate these names via CodegenUnit::build_cgu_name().
pub name: String,
pub module_llvm: M,
pub kind: ModuleKind,
}
// FIXME(eddyb) maybe include the crate name in this?
pub const METADATA_FILENAME: &str = "lib.rmeta";
impl<M> ModuleCodegen<M> {
pub fn into_compiled_module(
self,
emit_obj: bool,
emit_dwarf_obj: bool,
emit_bc: bool,
outputs: &OutputFilenames,
) -> CompiledModule |
}
#[derive(Debug, Encodable, Decodable)]
pub struct CompiledModule {
pub name: String,
pub kind: ModuleKind,
pub object: Option<PathBuf>,
pub dwarf_object: Option<PathBuf>,
pub bytecode: Option<PathBuf>,
}
pub struct CachedModuleCodegen {
pub name: String,
pub source: WorkProduct,
}
#[derive(Copy, Clone, Debug, PartialEq, Encodable, Decodable)]
pub enum ModuleKind {
Regular,
Metadata,
Allocator,
}
bitflags::bitflags! {
pub struct MemFlags: u8 {
const VOLATILE = 1 << 0;
const NONTEMPORAL = 1 << 1;
const UNALIGNED = 1 << 2;
}
}
#[derive(Clone, Debug, Encodable, Decodable, HashStable)]
pub struct NativeLib {
pub kind: NativeLibKind,
pub name: Option<Symbol>,
pub cfg: Option<ast::MetaItem>,
pub verbatim: Option<bool>,
pub dll_imports: Vec<cstore::DllImport>,
}
impl From<&cstore::NativeLib> for NativeLib {
fn from(lib: &cstore::NativeLib) -> Self {
NativeLib {
kind: lib.kind,
name: lib.name,
cfg: lib.cfg.clone(),
verbatim: lib.verbatim,
dll_imports: lib.dll_imports.clone(),
}
}
}
/// Misc info we load from metadata to persist beyond the tcx.
///
/// Note: though `CrateNum` is only meaningful within the same tcx, information within `CrateInfo`
/// is self-contained. `CrateNum` can be viewed as a unique identifier within a `CrateInfo`, where
/// `used_crate_source` contains all `CrateSource` of the dependents, and maintains a mapping from
/// identifiers (`CrateNum`) to `CrateSource`. The other fields map `CrateNum` to the crate's own
/// additional properties, so that effectively we can retrieve each dependent crate's `CrateSource`
/// and the corresponding properties without referencing information outside of a `CrateInfo`.
#[derive(Debug, Encodable, Decodable)]
pub struct CrateInfo {
pub target_cpu: String,
pub exported_symbols: FxHashMap<CrateType, Vec<String>>,
pub local_crate_name: Symbol,
pub compiler_builtins: Option<CrateNum>,
pub profiler_runtime: Option<CrateNum>,
pub is_no_builtins: FxHashSet<CrateNum>,
pub native_libraries: FxHashMap<CrateNum, Vec<NativeLib>>,
pub crate_name: FxHashMap<CrateNum, String>,
pub used_libraries: Vec<NativeLib>,
pub used_crate_source: FxHashMap<CrateNum, Lrc<CrateSource>>,
pub used_crates: Vec<CrateNum>,
pub lang_item_to_crate: FxHashMap<LangItem, CrateNum>,
pub missing_lang_items: FxHashMap<CrateNum, Vec<LangItem>>,
pub dependency_formats: Lrc<Dependencies>,
pub windows_subsystem: Option<String>,
}
#[derive(Encodable, Decodable)]
pub struct CodegenResults {
pub modules: Vec<CompiledModule>,
pub allocator_module: Option<CompiledModule>,
pub metadata_module: Option<CompiledModule>,
pub metadata: rustc_metadata::EncodedMetadata,
pub crate_info: CrateInfo,
}
pub fn provide(providers: &mut Providers) {
crate::back::symbol_export::provide(providers);
crate::base::provide(providers);
crate::target_features::provide(providers);
}
pub fn provide_extern(providers: &mut Providers) {
crate::back::symbol_export::provide_extern(providers);
}
/// Checks if the given filename ends with the `.rcgu.o` extension that `rustc`
/// uses for the object files it generates.
pub fn looks_like_rust_object_file(filename: &str) -> bool {
let path = Path::new(filename);
let ext = path.extension().and_then(|s| s.to_str());
if ext != Some(OutputType::Object.extension()) {
// The file name does not end with ".o", so it can't be an object file.
return false;
}
// Strip the ".o" at the end
let ext2 = path.file_stem().and_then(|s| Path::new(s).extension()).and_then(|s| s.to_str());
// Check if the "inner" extension
ext2 == Some(RUST_CGU_EXT)
}
| {
let object = emit_obj.then(|| outputs.temp_path(OutputType::Object, Some(&self.name)));
let dwarf_object = emit_dwarf_obj.then(|| outputs.temp_path_dwo(Some(&self.name)));
let bytecode = emit_bc.then(|| outputs.temp_path(OutputType::Bitcode, Some(&self.name)));
CompiledModule { name: self.name.clone(), kind: self.kind, object, dwarf_object, bytecode }
} |
cancel.py | """Cancel an existing iSCSI account."""
# :license: MIT, see LICENSE for more details.
import click
import SoftLayer
from SoftLayer.CLI import environment
from SoftLayer.CLI import exceptions
from SoftLayer.CLI import formatting
from SoftLayer.CLI import helpers
@click.command()
@click.argument('identifier')
@click.option('--reason', help="An optional reason for cancellation")
@click.option('--immediate',
is_flag=True,
help="Cancels the iSCSI immediately instead of on the billing "
"anniversary")
@environment.pass_env
def cli(env, identifier, reason, immediate):
| """Cancel an existing iSCSI account."""
iscsi_mgr = SoftLayer.ISCSIManager(env.client)
iscsi_id = helpers.resolve_id(iscsi_mgr.resolve_ids, identifier, 'iSCSI')
if not (env.skip_confirmations or formatting.no_going_back(iscsi_id)):
raise exceptions.CLIAbort('Aborted')
iscsi_mgr.cancel_iscsi(iscsi_id, reason, immediate) |
|
manage.py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'SqlToModel.settings')
try: | "available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main() | from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and " |
parser.rs | // Copyright Materialize, Inc. and contributors. All rights reserved.
//
// Use of this software is governed by the Business Source License
// included in the LICENSE file.
//
// As of the Change Date specified in that file, in accordance with
// the Business Source License, use of this software will be governed
// by the Apache License, Version 2.0.
//! A parser for sqllogictest.
use std::borrow::ToOwned;
use anyhow::{anyhow, bail};
use lazy_static::lazy_static;
use mz_repr::ColumnName;
use regex::Regex;
use crate::ast::{Location, Mode, Output, QueryOutput, Record, Sort, Type};
#[derive(Debug, Clone)]
pub struct Parser<'a> {
contents: &'a str,
fname: String,
curline: usize,
mode: Mode,
}
impl<'a> Parser<'a> {
pub fn new(fname: &str, contents: &'a str) -> Self {
Parser {
contents,
fname: fname.to_string(),
curline: 1,
mode: Mode::Standard,
}
}
pub fn is_done(&self) -> bool {
self.contents.is_empty()
}
pub fn location(&self) -> Location {
Location {
file: self.fname.clone(),
line: self.curline,
}
}
fn consume(&mut self, upto: usize) {
for ch in self.contents[..upto].chars() {
if ch == '\n' {
self.curline += 1;
}
}
self.contents = &self.contents[upto..];
}
pub fn split_at(&mut self, sep: &Regex) -> Result<&'a str, anyhow::Error> {
match sep.find(self.contents) {
Some(found) => {
let result = &self.contents[..found.start()];
self.consume(found.end());
Ok(result)
}
None => bail!("Couldn't split {:?} at {:?}", self.contents, sep),
}
}
pub fn parse_record(&mut self) -> Result<Record<'a>, anyhow::Error> {
if self.is_done() {
return Ok(Record::Halt);
}
lazy_static! {
static ref COMMENT_AND_LINE_REGEX: Regex = Regex::new("(#[^\n]*)?\r?(\n|$)").unwrap();
}
let first_line = self.split_at(&COMMENT_AND_LINE_REGEX)?.trim();
if first_line.is_empty() {
// query starts on the next line
return self.parse_record();
}
let mut words = first_line.split(' ').peekable();
match words.next().unwrap() {
"statement" => self.parse_statement(words, first_line),
"query" => self.parse_query(words, first_line),
"simple" => self.parse_simple(words),
"hash-threshold" => {
let threshold = words
.next()
.ok_or_else(|| anyhow!("missing threshold in: {}", first_line))?
.parse::<u64>()
.map_err(|err| anyhow!("invalid threshold ({}) in: {}", err, first_line))?;
Ok(Record::HashThreshold { threshold })
}
// we'll follow the postgresql version of all these tests
"skipif" => {
match words.next().unwrap() {
"postgresql" => {
// discard next record
self.parse_record()?;
self.parse_record()
}
_ => self.parse_record(),
}
}
"onlyif" => {
match words.next().unwrap() {
"postgresql" => self.parse_record(),
_ => {
// discard next record
self.parse_record()?;
self.parse_record()
}
}
}
"halt" => Ok(Record::Halt),
// this is some cockroach-specific thing, we don't care
"subtest" | "user" | "kv-batch-size" => self.parse_record(),
"mode" => {
self.mode = match words.next() {
Some("cockroach") => Mode::Cockroach,
Some("standard") | Some("sqlite") => Mode::Standard,
other => bail!("unknown parse mode: {:?}", other),
};
self.parse_record()
}
"copy" => Ok(Record::Copy {
table_name: words
.next()
.ok_or_else(|| anyhow!("load directive missing table name"))?,
tsv_path: words
.next()
.ok_or_else(|| anyhow!("load directive missing TSV path"))?,
}),
other => bail!("Unexpected start of record: {}", other),
}
}
pub fn parse_records(&mut self) -> Result<Vec<Record<'a>>, anyhow::Error> {
let mut records = vec![];
loop {
match self.parse_record()? {
Record::Halt => break,
record => records.push(record),
}
}
Ok(records)
}
fn parse_statement(
&mut self,
mut words: impl Iterator<Item = &'a str>,
first_line: &'a str,
) -> Result<Record<'a>, anyhow::Error> {
let location = self.location();
let mut expected_error = None;
let mut rows_affected = None;
match words.next() {
Some("count") => {
rows_affected = Some(
words
.next()
.ok_or_else(|| anyhow!("missing count of rows affected"))?
.parse::<u64>()
.map_err(|err| anyhow!("parsing count of rows affected: {}", err))?,
);
}
Some("ok") | Some("OK") => (),
Some("error") => expected_error = Some(parse_expected_error(first_line)),
_ => bail!("invalid statement disposition: {}", first_line),
};
lazy_static! {
static ref DOUBLE_LINE_REGEX: Regex = Regex::new(r"(\n|\r\n|$)(\n|\r\n|$)").unwrap();
}
let sql = self.split_at(&DOUBLE_LINE_REGEX)?;
Ok(Record::Statement {
expected_error,
rows_affected,
sql,
location,
})
}
fn parse_query(
&mut self,
mut words: std::iter::Peekable<impl Iterator<Item = &'a str>>,
first_line: &'a str,
) -> Result<Record<'a>, anyhow::Error> {
let location = self.location();
if words.peek() == Some(&"error") {
let error = parse_expected_error(first_line);
lazy_static! {
static ref DOUBLE_LINE_REGEX: Regex =
Regex::new(r"(\n|\r\n|$)(\n|\r\n|$)").unwrap();
}
let sql = self.split_at(&DOUBLE_LINE_REGEX)?;
return Ok(Record::Query {
sql,
output: Err(error),
location,
});
}
let types = words.next().map_or(Ok(vec![]), parse_types)?;
let mut sort = Sort::No;
let mut check_column_names = false;
let mut multiline = false;
if let Some(options) = words.next() {
for option in options.split(',') {
match option {
"nosort" => sort = Sort::No,
"rowsort" => sort = Sort::Row,
"valuesort" => sort = Sort::Value,
"colnames" => check_column_names = true,
"multiline" => multiline = true,
other => {
if other.starts_with("partialsort") {
// TODO(jamii) https://github.com/cockroachdb/cockroach/blob/d2f7fbf5dd1fc1a099bbad790a2e1f7c60a66cc3/pkg/sql/logictest/logic.go#L153
// partialsort has comma-separated arguments so our parsing is totally broken
// luckily it always comes last in the existing tests, so we can just bail out for now
sort = Sort::Row;
break;
} else {
bail!("Unrecognized option {:?} in {:?}", other, options);
}
}
};
}
}
if multiline && (check_column_names || sort.yes()) {
bail!("multiline option is incompatible with all other options");
}
let label = words.next();
lazy_static! {
static ref LINE_REGEX: Regex = Regex::new("\r?(\n|$)").unwrap();
static ref HASH_REGEX: Regex = Regex::new(r"(\S+) values hashing to (\S+)").unwrap();
static ref QUERY_OUTPUT_REGEX: Regex = Regex::new(r"\r?\n----").unwrap();
}
let sql = self.split_at(&QUERY_OUTPUT_REGEX)?;
lazy_static! {
static ref EOF_REGEX: Regex = Regex::new(r"(\n|\r\n)EOF(\n|\r\n)").unwrap();
static ref DOUBLE_LINE_REGEX: Regex = Regex::new(r"(\n|\r\n|$)(\n|\r\n|$)").unwrap();
}
let mut output_str = self
.split_at(if multiline {
&EOF_REGEX
} else {
&DOUBLE_LINE_REGEX
})?
.trim_start();
let column_names = if check_column_names {
Some(
split_at(&mut output_str, &LINE_REGEX)?
.split(' ')
.filter(|s| !s.is_empty())
.map(|s| ColumnName::from(s.replace('␠', " ")))
.collect(),
)
} else {
None
};
let output = match HASH_REGEX.captures(output_str) {
Some(captures) => Output::Hashed {
num_values: captures.get(1).unwrap().as_str().parse::<usize>()?,
md5: captures.get(2).unwrap().as_str().to_owned(),
},
None => {
if multiline {
Output::Values(vec![output_str.to_owned()])
} else if output_str.starts_with('\r') || output_str.starts_with('\n') {
Output::Values(vec![])
} else {
let mut vals: Vec<String> = output_str.lines().map(|s| s.to_owned()).collect();
if let Mode::Cockroach = self.mode {
let mut rows: Vec<Vec<String>> = vec![];
for line in vals {
let cols = split_cols(&line, types.len());
if sort != Sort::No && cols.len() != types.len() {
// We can't check this condition for
// Sort::No, because some tests use strings
// with whitespace that look like extra
// columns. (Note that these tests never
// use any of the sorting options.)
bail!(
"col len ({}) did not match declared col len ({})",
cols.len(),
types.len()
);
}
rows.push(cols.into_iter().map(|col| col.replace('␠', " ")).collect());
}
if sort == Sort::Row {
rows.sort();
}
vals = rows.into_iter().flatten().collect();
if sort == Sort::Value {
vals.sort();
}
}
Output::Values(vals)
}
}
};
Ok(Record::Query {
sql,
output: Ok(QueryOutput {
types,
sort,
label,
column_names,
mode: self.mode,
output,
output_str,
}),
location,
})
}
fn parse_simple(
&mut self,
mut words: std::iter::Peekable<impl Iterator<Item = &'a str>>,
) -> Result<Record<'a>, anyhow::Error> {
| fn split_at<'a>(input: &mut &'a str, sep: &Regex) -> Result<&'a str, anyhow::Error> {
match sep.find(input) {
Some(found) => {
let result = &input[..found.start()];
*input = &input[found.end()..];
Ok(result)
}
None => bail!("Couldn't split {:?} at {:?}", input, sep),
}
}
/// Parse a query result type string into a vec of expected types
fn parse_types(input: &str) -> Result<Vec<Type>, anyhow::Error> {
input
.chars()
.map(|char| {
Ok(match char {
'T' => Type::Text,
'I' => Type::Integer,
'R' => Type::Real,
'B' => Type::Bool,
'O' => Type::Oid,
_ => bail!("Unexpected type char {} in: {}", char, input),
})
})
.collect()
}
lazy_static! {
static ref WHITESPACE_REGEX: Regex = Regex::new(r"\s+").unwrap();
}
fn parse_expected_error(line: &str) -> &str {
lazy_static! {
static ref PGCODE_RE: Regex =
Regex::new("(statement|query) error( pgcode [a-zA-Z0-9]{5})? ?").unwrap();
}
// TODO(benesch): one day this should record the expected pgcode, if
// specified.
let pos = PGCODE_RE.find(line).unwrap().end();
&line[pos..]
}
/// Split on whitespace to normalize multiple spaces to one space. This happens
/// unconditionally in Cockroach mode, regardless of the sort option.
///
/// TODO: this doesn't have the whitespace-collapsing behavior for
/// single-column values that cockroach relies on
pub(crate) fn split_cols(line: &str, expected_columns: usize) -> Vec<&str> {
if expected_columns == 1 {
vec![line.trim()]
} else {
line.split_whitespace().collect()
}
}
| let location = self.location();
let mut conn = None;
if let Some(options) = words.next() {
for option in options.split(',') {
if let Some(value) = option.strip_prefix("conn=") {
conn = Some(value);
} else {
bail!("Unrecognized option {:?} in {:?}", option, options);
}
}
}
lazy_static! {
static ref QUERY_OUTPUT_REGEX: Regex = Regex::new(r"\r?\n----").unwrap();
static ref DOUBLE_LINE_REGEX: Regex = Regex::new(r"(\n|\r\n|$)(\n|\r\n|$)").unwrap();
}
let sql = self.split_at(&QUERY_OUTPUT_REGEX)?;
let output_str = self.split_at(&DOUBLE_LINE_REGEX)?.trim_start();
let output = Output::Values(output_str.lines().map(String::from).collect());
Ok(Record::Simple {
location,
conn,
sql,
output,
output_str,
})
}
}
|
calcRect.js | /*
* Copyright 2016 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
var assert = require('proclaim');
var sinon = require('sinon');
var calcRect = require('../../../src/util/calcRect');
suite('calcRect', function() {
test('null', function() {
var rect = calcRect(1, 1, null);
assert(rect.x === 0);
assert(rect.y === 0);
assert(rect.width === 1);
assert(rect.height === 1);
});
test('relative offset', function() {
var rect = calcRect(100, 200, { relativeX: 0.1, relativeY: 0.2 });
assert(rect.x === 0.1);
assert(rect.y === 0.2);
assert(rect.width === 1);
assert(rect.height === 1);
});
test('relative size', function() {
var rect = calcRect(100, 200, { relativeWidth: 0.25, relativeHeight: 0.75 });
assert(rect.x === 0);
assert(rect.y === 0);
assert(rect.width === 0.25);
assert(rect.height === 0.75);
});
test('relative offset and size', function() {
var rect = calcRect(100, 200,
{ relativeX: 0.1, relativeY: 0.2,
relativeWidth: 0.25, relativeHeight: 0.75 });
assert(rect.x === 0.1);
assert(rect.y === 0.2);
assert(rect.width === 0.25);
assert(rect.height === 0.75);
});
test('absolute offset', function() {
var rect = calcRect(100, 200, { absoluteX: 10, absoluteY: 40 });
assert(rect.x === 0.1);
assert(rect.y === 0.2);
assert(rect.width === 1);
assert(rect.height === 1);
});
test('absolute size', function() {
var rect = calcRect(100, 200, { absoluteWidth: 20, absoluteHeight: 50 });
assert(rect.x === 0);
assert(rect.y === 0);
assert(rect.width === 0.2);
assert(rect.height === 0.25);
});
test('absolute offset and size', function() {
var rect = calcRect(100, 200,
{ absoluteX: 10, absoluteY: 40,
absoluteWidth: 20, absoluteHeight: 50 });
assert(rect.x === 0.1);
assert(rect.y === 0.2);
assert(rect.width === 0.2);
assert(rect.height === 0.25);
});
| var rect = calcRect(100, 200,
{ relativeX: 0.1, relativeY: 0.2,
absoluteWidth: 20, absoluteHeight: 50 });
assert(rect.x === 0.1);
assert(rect.y === 0.2);
assert(rect.width === 0.2);
assert(rect.height === 0.25);
});
test('absolute offset relative size', function() {
var rect = calcRect(100, 200,
{ absoluteX: 10, absoluteY: 40,
relativeWidth: 0.25, relativeHeight: 0.75 });
assert(rect.x === 0.1);
assert(rect.y === 0.2);
assert(rect.width === 0.25);
assert(rect.height === 0.75);
});
test('absolute overrides relative', function() {
var rect = calcRect(100, 200,
{ absoluteX: 10, absoluteY: 40,
absoluteWidth: 20, absoluteHeight: 50,
relativeX: 0.12, relativeY: 0.34,
relativeWidth: 0.56, relativeHeight: 0.78 });
assert(rect.x === 0.1);
assert(rect.y === 0.2);
assert(rect.width === 0.2);
assert(rect.height === 0.25);
});
}); | test('relative offset absolute size', function() { |
resource_alicloud_mns_topic_test.go | package alicloud
import (
"fmt"
"log"
"strings"
"testing"
"github.com/alibaba/terraform-provider/alicloud/connectivity"
"github.com/dxh031/ali_mns"
"github.com/hashicorp/terraform/helper/resource"
"github.com/hashicorp/terraform/terraform"
)
func init() |
func testSweepMnsTopics(region string) error {
rawClient, err := sharedClientForRegion(region)
if err != nil {
return fmt.Errorf("error getting Alicloud client: %s", err)
}
client := rawClient.(*connectivity.AliyunClient)
prefixes := []string{
"tf-testAcc",
"tf-testacc",
}
var topicAttrs []ali_mns.TopicAttribute
for _, namePrefix := range prefixes {
for {
var nextMaker string
raw, err := client.WithMnsTopicManager(func(topicManager ali_mns.AliTopicManager) (interface{}, error) {
return topicManager.ListTopicDetail(nextMaker, 1000, namePrefix)
})
if err != nil {
return fmt.Errorf("get topicDetails error: %#v", err)
}
topicDetails, _ := raw.(ali_mns.TopicDetails)
for _, attr := range topicDetails.Attrs {
topicAttrs = append(topicAttrs, attr)
}
nextMaker = topicDetails.NextMarker
if nextMaker == "" {
break
}
}
}
for _, topicAttr := range topicAttrs {
name := topicAttr.TopicName
skip := true
for _, prefix := range prefixes {
if strings.HasPrefix(strings.ToLower(name), strings.ToLower(prefix)) {
skip = false
break
}
}
if skip {
log.Printf("[INFO] Skipping mns topic : %s ", name)
continue
}
log.Printf("[INFO] delete mns topic : %s ", name)
_, err := client.WithMnsTopicManager(func(topicManager ali_mns.AliTopicManager) (interface{}, error) {
return nil, topicManager.DeleteTopic(name)
})
if err != nil {
log.Printf("[ERROR] Failed to delete mns topic (%s (%s)): %s", topicAttr.TopicName, topicAttr.TopicName, err)
}
}
return nil
}
func TestAccAlicloudMnsTopic_basic(t *testing.T) {
var attr ali_mns.TopicAttribute
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckMNSTopicDestroy,
Steps: []resource.TestStep{
{
Config: testAccMNSTopicConfig,
Check: resource.ComposeTestCheckFunc(
testAccMNSTopicExist("alicloud_mns_topic.topic", &attr),
resource.TestCheckResourceAttr("alicloud_mns_topic.topic", "name", "tf-testAccMNSTopicConfig"),
),
},
{
Config: testAccMNSTopicConfigUpdate,
Check: resource.ComposeTestCheckFunc(
testAccMNSTopicExist("alicloud_mns_topic.topic", &attr),
resource.TestCheckResourceAttr("alicloud_mns_topic.topic", "name", "tf-testAccMNSTopicConfig"),
resource.TestCheckResourceAttr("alicloud_mns_topic.topic", "maximum_message_size", "12357"),
resource.TestCheckResourceAttr("alicloud_mns_topic.topic", "logging_enabled", "true"),
),
},
},
})
}
func testAccMNSTopicExist(n string, attr *ali_mns.TopicAttribute) resource.TestCheckFunc {
return func(s *terraform.State) error {
rs, ok := s.RootModule().Resources[n]
if !ok {
return fmt.Errorf("Not found: %s", n)
}
if rs.Primary.ID == "" {
return fmt.Errorf("No MNSTopic ID is set")
}
client := testAccProvider.Meta().(*connectivity.AliyunClient)
raw, err := client.WithMnsTopicManager(func(topicManager ali_mns.AliTopicManager) (interface{}, error) {
return topicManager.GetTopicAttributes(rs.Primary.ID)
})
if err != nil {
return err
}
instance, _ := raw.(ali_mns.TopicAttribute)
if instance.TopicName != rs.Primary.ID {
return fmt.Errorf("mns topic %s not found", n)
}
*attr = instance
return nil
}
}
func testAccCheckMNSTopicDestroy(s *terraform.State) error {
client := testAccProvider.Meta().(*connectivity.AliyunClient)
mnsService := MnsService{}
for _, rs := range s.RootModule().Resources {
if rs.Type != "alicloud_mns_topic" {
continue
}
_, err := client.WithMnsTopicManager(func(topicManager ali_mns.AliTopicManager) (interface{}, error) {
return topicManager.GetTopicAttributes(rs.Primary.ID)
})
if err != nil {
if mnsService.TopicNotExistFunc(err) {
continue
}
return err
}
return fmt.Errorf("MNS Topic %s still exist", rs.Primary.ID)
}
return nil
}
const testAccMNSTopicConfig = `
variable "name" {
default = "tf-testAccMNSTopicConfig"
}
resource "alicloud_mns_topic" "topic"{
name="${var.name}"
}`
const testAccMNSTopicConfigUpdate = `
variable "name" {
default = "tf-testAccMNSTopicConfig"
}
resource "alicloud_mns_topic" "topic"{
name="${var.name}"
maximum_message_size=12357
logging_enabled=true
}`
| {
resource.AddTestSweepers("alicloud_mns_topic", &resource.Sweeper{
Name: "alicloud_mns_topic",
F: testSweepMnsTopics,
})
} |
goog.net.xhriopool.js | goog.provide("goog.net.XhrIoPool");
goog.require("goog.net.XhrIo"); | goog.net.XhrIoPool = function(opt_headers, opt_minCount, opt_maxCount, opt_withCredentials) {
this.headers_ = opt_headers;
this.withCredentials_ = !!opt_withCredentials;
goog.structs.PriorityPool.call(this, opt_minCount, opt_maxCount);
};
goog.inherits(goog.net.XhrIoPool, goog.structs.PriorityPool);
goog.net.XhrIoPool.prototype.createObject = function() {
const xhrIo = new goog.net.XhrIo();
const headers = this.headers_;
if (headers) {
headers.forEach(function(value, key) {
xhrIo.headers.set(key, value);
});
}
if (this.withCredentials_) {
xhrIo.setWithCredentials(true);
}
return xhrIo;
};
goog.net.XhrIoPool.prototype.objectCanBeReused = function(obj) {
const xhr = obj;
return !xhr.isDisposed() && !xhr.isActive();
};
//# sourceMappingURL=goog.net.xhriopool.js.map | goog.require("goog.structs.PriorityPool");
goog.requireType("goog.structs.Map"); |
WalletStandard.tsx | import React /* , { ReactNode } */ from 'react';
import { Trans } from '@lingui/macro';
import {
More,
Amount,
Fee,
Form,
TextField as MintTextField,
AlertDialog,
CopyToClipboard,
Flex,
Card,
ConfirmDialog,
} from '@mint/core';
import { makeStyles } from '@material-ui/core/styles';
import { useDispatch, useSelector } from 'react-redux';
import isNumeric from 'validator/es/lib/isNumeric';
import { useForm, useWatch } from 'react-hook-form';
import {
/*
Tooltip,
Accordion,
AccordionSummary,
AccordionDetails,
*/
Box,
Typography,
Button,
TextField,
InputAdornment,
Grid,
ListItemIcon,
MenuItem,
} from '@material-ui/core';
import {
// ExpandMore as ExpandMoreIcon,
// Help as HelpIcon,
Delete as DeleteIcon,
} from '@material-ui/icons';
import {
get_address,
send_transaction,
farm_block,
} from '../../../modules/message';
import { /* mojo_to_mint_string, */ mint_to_mojo } from '../../../util/mint';
import { openDialog } from '../../../modules/dialog';
import { get_transaction_result } from '../../../util/transaction_result';
import config from '../../../config/config';
import type { RootState } from '../../../modules/rootReducer';
import WalletHistory from '../WalletHistory';
// import useCurrencyCode from '../../../hooks/useCurrencyCode';
import { deleteUnconfirmedTransactions } from '../../../modules/incoming';
// import WalletGraph from '../WalletGraph';
import WalletCards from './WalletCards';
import WalletStatus from '../WalletStatus';
import useOpenDialog from '../../../hooks/useOpenDialog';
const drawerWidth = 240;
const useStyles = makeStyles((theme) => ({
front: {
zIndex: 100,
},
resultSuccess: {
color: '#3AAC59',
},
resultFailure: {
color: 'red',
},
root: {
display: 'flex',
paddingLeft: '0px',
},
toolbar: {
paddingRight: 24, // keep right padding when drawer closed
},
toolbarIcon: {
display: 'flex',
alignItems: 'center',
justifyContent: 'flex-end',
padding: '0 8px',
...theme.mixins.toolbar,
},
appBar: {
zIndex: theme.zIndex.drawer + 1,
transition: theme.transitions.create(['width', 'margin'], {
easing: theme.transitions.easing.sharp,
duration: theme.transitions.duration.leavingScreen,
}),
},
appBarShift: {
marginLeft: drawerWidth,
width: `calc(100% - ${drawerWidth}px)`,
transition: theme.transitions.create(['width', 'margin'], {
easing: theme.transitions.easing.sharp,
duration: theme.transitions.duration.enteringScreen,
}),
},
menuButton: {
marginRight: 36,
},
menuButtonHidden: {
display: 'none',
},
title: {
flexGrow: 1,
},
drawerPaper: {
position: 'relative',
whiteSpace: 'nowrap',
width: drawerWidth,
transition: theme.transitions.create('width', {
easing: theme.transitions.easing.sharp,
duration: theme.transitions.duration.enteringScreen,
}),
},
drawerPaperClose: {
overflowX: 'hidden',
transition: theme.transitions.create('width', {
easing: theme.transitions.easing.sharp,
duration: theme.transitions.duration.leavingScreen,
}),
width: theme.spacing(7),
[theme.breakpoints.up('sm')]: {
width: theme.spacing(9),
},
},
appBarSpacer: theme.mixins.toolbar,
content: {
flexGrow: 1,
height: '100vh',
overflow: 'auto',
},
container: {
paddingTop: theme.spacing(0),
paddingBottom: theme.spacing(0),
paddingRight: theme.spacing(0),
},
paper: {
marginTop: theme.spacing(2),
padding: theme.spacing(2),
display: 'flex',
overflow: 'auto',
flexDirection: 'column',
},
fixedHeight: {
height: 240,
},
heading: {
fontSize: theme.typography.pxToRem(15),
fontWeight: theme.typography.fontWeightRegular,
},
drawerWallet: {
position: 'relative',
whiteSpace: 'nowrap',
width: drawerWidth,
height: '100%',
transition: theme.transitions.create('width', {
easing: theme.transitions.easing.sharp,
duration: theme.transitions.duration.enteringScreen,
}),
},
sendCard: {
marginTop: theme.spacing(2),
},
sendButton: {
marginTop: theme.spacing(2),
marginBottom: theme.spacing(2),
width: 150,
height: 50,
},
copyButton: {
marginTop: theme.spacing(0),
marginBottom: theme.spacing(0),
height: 56,
},
cardTitle: {
paddingLeft: theme.spacing(1),
paddingTop: theme.spacing(1),
marginBottom: theme.spacing(1),
},
cardSubSection: {
paddingLeft: theme.spacing(3),
paddingRight: theme.spacing(3),
paddingTop: theme.spacing(1),
},
walletContainer: {
marginBottom: theme.spacing(5),
},
table_root: {
width: '100%',
maxHeight: 600,
overflowY: 'scroll',
},
table: {
height: '100%',
overflowY: 'scroll',
},
tableBody: {
height: '100%',
overflowY: 'scroll',
},
row: {
width: 700,
},
cell_short: {
fontSize: '14px',
width: 50,
overflowWrap: 'break-word' /* Renamed property in CSS3 draft spec */,
},
amountField: {
paddingRight: 20,
},
}));
/*
type BalanceCardSubSectionProps = {
title: ReactNode;
tooltip?: ReactNode;
balance: number;
};
function BalanceCardSubSection(props: BalanceCardSubSectionProps) {
const currencyCode = useCurrencyCode();
return (
<Grid item xs={12}>
<Box display="flex">
<Box flexGrow={1}>
<Typography variant="subtitle1">
{props.title}
{props.tooltip && (
<Tooltip title={props.tooltip}>
<HelpIcon style={{ color: '#c8c8c8', fontSize: 12 }} />
</Tooltip>
)}
</Typography>
</Box>
<Box>
<Typography variant="subtitle1">
{mojo_to_mint_string(props.balance)} {currencyCode}
</Typography>
</Box>
</Box>
</Grid>
);
}
type BalanceCardProps = {
wallet_id: number;
};
function BalanceCard(props: BalanceCardProps) {
const { wallet_id } = props;
const wallet = useSelector((state: RootState) =>
state.wallet_state.wallets?.find((item) => item.id === wallet_id),
);
const balance = wallet?.wallet_balance?.confirmed_wallet_balance;
const balance_spendable = wallet?.wallet_balance?.spendable_balance;
const balance_pending = wallet?.wallet_balance?.pending_balance;
const pending_change = wallet?.wallet_balance?.pending_change;
const balance_ptotal = balance + balance_pending;
const classes = useStyles();
return (
<Card title={<Trans>Balance</Trans>}>
<BalanceCardSubSection
title={<Trans>Total Balance</Trans>}
balance={balance}
tooltip={
<Trans>
This is the total amount of mint in the blockchain at the current
peak sub block that is controlled by your private keys. It includes
frozen farming rewards, but not pending incoming and outgoing
transactions.
</Trans>
}
/>
<BalanceCardSubSection
title={<Trans>Spendable Balance</Trans>}
balance={balance_spendable}
tooltip={
<Trans>
This is the amount of Mint that you can currently use to make
transactions. It does not include pending farming rewards, pending
incoming transactions, and Mint that you have just spent but is not
yet in the blockchain.
</Trans>
}
/>
<Grid item xs={12}>
<Box display="flex">
<Box flexGrow={1}>
<Accordion className={classes.front}>
<AccordionSummary
expandIcon={<ExpandMoreIcon />}
aria-controls="panel1a-content"
id="panel1a-header"
>
<Typography className={classes.heading}>
<Trans>View pending balances</Trans>
</Typography>
</AccordionSummary>
<AccordionDetails>
<Grid container spacing={0}>
<BalanceCardSubSection
title={<Trans>Pending Total Balance</Trans>}
balance={balance_ptotal}
tooltip={
<Trans>
This is the total balance + pending balance: it is what
your balance will be after all pending transactions are
confirmed.
</Trans>
}
/>
<BalanceCardSubSection
title={<Trans>Pending Balance</Trans>}
balance={balance_pending}
tooltip={
<Trans>
This is the sum of the incoming and outgoing pending
transactions (not yet included into the blockchain).
This does not include farming rewards.
</Trans>
}
/>
<BalanceCardSubSection
title={<Trans>Pending Change</Trans>}
balance={pending_change}
tooltip={
<Trans>
This is the pending change, which are change coins which
you have sent to yourself, but have not been confirmed
yet.
</Trans>
}
/>
</Grid>
</AccordionDetails>
</Accordion>
</Box>
</Box>
</Grid>
<WalletGraph walletId={wallet_id} />
</Card>
);
}
*/
type SendCardProps = {
wallet_id: number;
};
type SendTransactionData = {
address: string;
amount: string;
fee: string;
};
function SendCard(props: SendCardProps) {
const { wallet_id } = props;
const classes = useStyles();
const dispatch = useDispatch();
const methods = useForm<SendTransactionData>({
shouldUnregister: false,
defaultValues: {
address: '',
amount: '',
fee: '',
},
});
const addressValue = useWatch<string>({
control: methods.control,
name: 'address',
});
const syncing = useSelector(
(state: RootState) => state.wallet_state.status.syncing,
);
const wallet = useSelector((state: RootState) =>
state.wallet_state.wallets?.find((item) => item.id === wallet_id),
);
if (!wallet) {
return null;
}
const { sending_transaction, send_transaction_result } = wallet;
const result = get_transaction_result(send_transaction_result);
const result_message = result.message;
const result_class = result.success
? classes.resultSuccess
: classes.resultFailure;
function | () {
if (addressValue) {
dispatch(farm_block(addressValue));
}
}
function handleSubmit(data: SendTransactionData) {
if (sending_transaction) {
return;
}
if (syncing) {
dispatch(
openDialog(
<AlertDialog>
<Trans>Please finish syncing before making a transaction</Trans>
</AlertDialog>,
),
);
return;
}
const amount = data.amount.trim();
if (!isNumeric(amount)) {
dispatch(
openDialog(
<AlertDialog>
<Trans>Please enter a valid numeric amount</Trans>
</AlertDialog>,
),
);
return;
}
const fee = data.fee.trim();
if (!isNumeric(fee)) {
dispatch(
openDialog(
<AlertDialog>
<Trans>Please enter a valid numeric fee</Trans>
</AlertDialog>,
),
);
return;
}
let address = data.address;
if (address.includes('colour')) {
dispatch(
openDialog(
<AlertDialog>
<Trans>
Error: Cannot send mint to coloured address. Please enter a mint
address.
</Trans>
</AlertDialog>,
),
);
return;
}
if (address.slice(0, 12) === 'mint_addr://') {
address = address.slice(12);
}
if (address.startsWith('0x') || address.startsWith('0X')) {
address = address.slice(2);
}
const amountValue = Number.parseFloat(mint_to_mojo(amount));
const feeValue = Number.parseFloat(mint_to_mojo(fee));
dispatch(send_transaction(wallet_id, amountValue, feeValue, address));
methods.reset();
}
return (
<Card
title={<Trans>Create Transaction</Trans>}
tooltip={
<Trans>
On average there is one minute between each transaction block. Unless
there is congestion you can expect your transaction to be included in
less than a minute.
</Trans>
}
>
{result_message && <p className={result_class}>{result_message}</p>}
<Form methods={methods} onSubmit={handleSubmit}>
<Grid spacing={2} container>
<Grid xs={12} item>
<MintTextField
name="address"
variant="filled"
color="secondary"
fullWidth
disabled={sending_transaction}
label={<Trans>Address / Puzzle hash</Trans>}
/>
</Grid>
<Grid xs={12} md={6} item>
<Amount
id="filled-secondary"
variant="filled"
color="secondary"
name="amount"
disabled={sending_transaction}
label={<Trans>Amount</Trans>}
fullWidth
/>
</Grid>
<Grid xs={12} md={6} item>
<Fee
id="filled-secondary"
variant="filled"
name="fee"
color="secondary"
disabled={sending_transaction}
label={<Trans>Fee</Trans>}
fullWidth
/>
</Grid>
<Grid xs={12} item>
<Flex justifyContent="flex-end" gap={1}>
{!!config.local_test && (
<Button onClick={farm} variant="outlined">
<Trans>Farm</Trans>
</Button>
)}
<Button
variant="contained"
color="primary"
type="submit"
disabled={sending_transaction}
>
<Trans>Send</Trans>
</Button>
</Flex>
</Grid>
</Grid>
</Form>
</Card>
);
}
type AddressCardProps = {
wallet_id: number;
};
function AddressCard(props: AddressCardProps) {
const { wallet_id } = props;
const dispatch = useDispatch();
const wallet = useSelector((state: RootState) =>
state.wallet_state.wallets?.find((item) => item.id === wallet_id),
);
if (!wallet) {
return null;
}
const { address } = wallet;
function newAddress() {
dispatch(get_address(wallet_id, true));
}
return (
<Card
title={<Trans>Receive Address</Trans>}
action={
<Button onClick={newAddress} variant="outlined">
<Trans>New Address</Trans>
</Button>
}
tooltip={
<Trans>
HD or Hierarchical Deterministic keys are a type of public key/private
key scheme where one private key can have a nearly infinite number of
different public keys (and therefor wallet receive addresses) that
will all ultimately come back to and be spendable by a single private
key.
</Trans>
}
>
<Grid item xs={12}>
<Box display="flex">
<Box flexGrow={1}>
<TextField
label={<Trans>Address</Trans>}
value={address}
variant="filled"
InputProps={{
readOnly: true,
endAdornment: (
<InputAdornment position="end">
<CopyToClipboard value={address} />
</InputAdornment>
),
}}
fullWidth
/>
</Box>
</Box>
</Grid>
</Card>
);
}
type StandardWalletProps = {
wallet_id: number;
showTitle?: boolean;
};
export default function StandardWallet(props: StandardWalletProps) {
const { wallet_id, showTitle } = props;
const dispatch = useDispatch();
const openDialog = useOpenDialog();
async function handleDeleteUnconfirmedTransactions() {
const deleteConfirmed = await openDialog(
<ConfirmDialog
title={<Trans>Confirmation</Trans>}
confirmTitle={<Trans>Delete</Trans>}
confirmColor="danger"
>
<Trans>Are you sure you want to delete unconfirmed transactions?</Trans>
</ConfirmDialog>,
);
// @ts-ignore
if (deleteConfirmed) {
dispatch(deleteUnconfirmedTransactions(wallet_id));
}
}
return (
<Flex flexDirection="column" gap={1}>
<Flex gap={1} alignItems="center">
<Flex flexGrow={1}>
{showTitle && (
<Typography variant="h5" gutterBottom>
<Trans>Mint Wallet</Trans>
</Typography>
)}
</Flex>
<Flex gap={1} alignItems="center">
<Flex alignItems="center">
<Typography variant="body1" color="textSecondary">
<Trans>Wallet Status:</Trans>
</Typography>
<WalletStatus height />
</Flex>
<More>
{({ onClose }) => (
<Box>
<MenuItem
onClick={() => {
onClose();
handleDeleteUnconfirmedTransactions();
}}
>
<ListItemIcon>
<DeleteIcon />
</ListItemIcon>
<Typography variant="inherit" noWrap>
<Trans>Delete Unconfirmed Transactions</Trans>
</Typography>
</MenuItem>
</Box>
)}
</More>
</Flex>
</Flex>
<Flex flexDirection="column" gap={3}>
<WalletCards wallet_id={wallet_id} />
<SendCard wallet_id={wallet_id} />
<AddressCard wallet_id={wallet_id} />
<WalletHistory walletId={wallet_id} />
</Flex>
</Flex>
);
}
| farm |
resources.go | // Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
package wire
import (
"fmt"
"regexp"
"strings"
)
// ValidateZone verifies that the `input` string has the format of a valid
// Google Cloud zone. An example zone is "europe-west1-b".
// See https://cloud.google.com/compute/docs/regions-zones for more information.
func ValidateZone(input string) error {
parts := strings.Split(input, "-")
if len(parts) != 3 {
return fmt.Errorf("pubsublite: invalid zone %q", input)
}
return nil
}
// ValidateRegion verifies that the `input` string has the format of a valid
// Google Cloud region. An example region is "europe-west1".
// See https://cloud.google.com/compute/docs/regions-zones for more information.
func ValidateRegion(input string) error {
parts := strings.Split(input, "-")
if len(parts) != 2 {
return fmt.Errorf("pubsublite: invalid region %q", input)
}
return nil
}
// ZoneToRegion returns the region that the given zone is in.
func ZoneToRegion(zone string) (string, error) {
if err := ValidateZone(zone); err != nil {
return "", err
}
return zone[0:strings.LastIndex(zone, "-")], nil
}
// LocationPath stores a path consisting of a project and zone/region.
type LocationPath struct {
// A Google Cloud project. The project ID (e.g. "my-project") or the project
// number (e.g. "987654321") can be provided.
Project string
// A Google Cloud zone or region, for example "us-central1-a", "us-central1".
Location string
}
func (l LocationPath) String() string {
return fmt.Sprintf("projects/%s/locations/%s", l.Project, l.Location)
}
var locPathRE = regexp.MustCompile(`^projects/([^/]+)/locations/([^/]+)$`)
// ParseLocationPath parses a project/location path.
func ParseLocationPath(input string) (LocationPath, error) {
parts := locPathRE.FindStringSubmatch(input)
if len(parts) < 3 { | }
return LocationPath{Project: parts[1], Location: parts[2]}, nil
}
// TopicPath stores the full path of a Pub/Sub Lite topic.
type TopicPath struct {
// A Google Cloud project. The project ID (e.g. "my-project") or the project
// number (e.g. "987654321") can be provided.
Project string
// A Google Cloud zone, for example "us-central1-a".
Zone string
// The ID of the Pub/Sub Lite topic, for example "my-topic-name".
TopicID string
}
func (t TopicPath) String() string {
return fmt.Sprintf("projects/%s/locations/%s/topics/%s", t.Project, t.Zone, t.TopicID)
}
// Location returns the topic's location path.
func (t TopicPath) Location() LocationPath {
return LocationPath{Project: t.Project, Location: t.Zone}
}
var topicPathRE = regexp.MustCompile(`^projects/([^/]+)/locations/([^/]+)/topics/([^/]+)$`)
// ParseTopicPath parses the full path of a Pub/Sub Lite topic.
func ParseTopicPath(input string) (TopicPath, error) {
parts := topicPathRE.FindStringSubmatch(input)
if len(parts) < 4 {
return TopicPath{}, fmt.Errorf("pubsublite: invalid topic path %q. valid format is %q",
input, "projects/PROJECT_ID/locations/ZONE/topics/TOPIC_ID")
}
return TopicPath{Project: parts[1], Zone: parts[2], TopicID: parts[3]}, nil
}
// SubscriptionPath stores the full path of a Pub/Sub Lite subscription.
type SubscriptionPath struct {
// A Google Cloud project. The project ID (e.g. "my-project") or the project
// number (e.g. "987654321") can be provided.
Project string
// A Google Cloud zone. An example zone is "us-central1-a".
Zone string
// The ID of the Pub/Sub Lite subscription, for example
// "my-subscription-name".
SubscriptionID string
}
func (s SubscriptionPath) String() string {
return fmt.Sprintf("projects/%s/locations/%s/subscriptions/%s", s.Project, s.Zone, s.SubscriptionID)
}
// Location returns the subscription's location path.
func (s SubscriptionPath) Location() LocationPath {
return LocationPath{Project: s.Project, Location: s.Zone}
}
var subsPathRE = regexp.MustCompile(`^projects/([^/]+)/locations/([^/]+)/subscriptions/([^/]+)$`)
// ParseSubscriptionPath parses the full path of a Pub/Sub Lite subscription.
func ParseSubscriptionPath(input string) (SubscriptionPath, error) {
parts := subsPathRE.FindStringSubmatch(input)
if len(parts) < 4 {
return SubscriptionPath{}, fmt.Errorf("pubsublite: invalid subscription path %q. valid format is %q",
input, "projects/PROJECT_ID/locations/ZONE/subscriptions/SUBSCRIPTION_ID")
}
return SubscriptionPath{Project: parts[1], Zone: parts[2], SubscriptionID: parts[3]}, nil
}
// ReservationPath stores the full path of a Pub/Sub Lite reservation.
type ReservationPath struct {
// A Google Cloud project. The project ID (e.g. "my-project") or the project
// number (e.g. "987654321") can be provided.
Project string
// A Google Cloud region. An example region is "us-central1".
Region string
// The ID of the Pub/Sub Lite reservation, for example "my-reservation-name".
ReservationID string
}
func (r ReservationPath) String() string {
return fmt.Sprintf("projects/%s/locations/%s/reservations/%s", r.Project, r.Region, r.ReservationID)
}
// Location returns the reservation's location path.
func (r ReservationPath) Location() LocationPath {
return LocationPath{Project: r.Project, Location: r.Region}
}
var reservationPathRE = regexp.MustCompile(`^projects/([^/]+)/locations/([^/]+)/reservations/([^/]+)$`)
// ParseReservationPath parses the full path of a Pub/Sub Lite reservation.
func ParseReservationPath(input string) (ReservationPath, error) {
parts := reservationPathRE.FindStringSubmatch(input)
if len(parts) < 4 {
return ReservationPath{}, fmt.Errorf("pubsublite: invalid reservation path %q. valid format is %q",
input, "projects/PROJECT_ID/locations/REGION/reservations/RESERVATION_ID")
}
return ReservationPath{Project: parts[1], Region: parts[2], ReservationID: parts[3]}, nil
}
type topicPartition struct {
Path string
Partition int
}
func (tp topicPartition) String() string {
return fmt.Sprintf("%s/partitions/%d", tp.Path, tp.Partition)
}
type subscriptionPartition struct {
Path string
Partition int
}
func (sp subscriptionPartition) String() string {
return fmt.Sprintf("%s/partitions/%d", sp.Path, sp.Partition)
}
// MessageMetadata holds properties of a message published to the Pub/Sub Lite
// service.
//
// NOTE: This is duplicated in the pscompat package in order to generate nicer
// docs and should be kept consistent.
type MessageMetadata struct {
// The topic partition the message was published to.
Partition int
// The offset the message was assigned.
Offset int64
}
func (m *MessageMetadata) String() string {
return fmt.Sprintf("%d:%d", m.Partition, m.Offset)
} | return LocationPath{}, fmt.Errorf("pubsublite: invalid location path %q. valid format is %q",
input, "projects/PROJECT_ID/locations/ZONE") |
main.rs | #![allow(unused)]
struct User {
username: String,
email: String,
sign_in_count: u64,
active: bool,
}
#[derive(Debug)]
struct Rectangle {
width: u32,
height: u32,
}
fn | () {
let mut user1 = User {
email: String::from("[email protected]"),
username: String::from("someusername123"),
active: true,
sign_in_count: 1,
};
user1.email = String::from("[email protected]");
let user2 = User {
email: String::from("[email protected]"),
username: String::from("anotherusername567"),
..user1
};
let rect1 = Rectangle { width: 2, height: 3 };
println!("rect1 is {:?}", rect1);
println!("rect1 is {:#?}", rect1);
let area = rect1.area();
let square = Rectangle::square(1);
let rect2 = Rectangle::new(1, 2);
println!("rect2 is {:?}", rect2);
let can_hold = rect1.can_hold(&rect2);
}
impl Rectangle {
fn area(&self) -> u32 {
self.width * self.height
}
fn can_hold(&self, other: &Rectangle) -> bool {
self.width >= other.width && self.height >= other.height
}
fn square(size: u32) -> Rectangle {
Rectangle { width: size, height: size }
}
fn new(width: u32, height: u32) -> Rectangle {
Rectangle { width, height }
}
}
| main |
dir-mixin.js | define(["exports", "./property-accessors.js", "../utils/mixin.js"], function (_exports, _propertyAccessors, _mixin) {
"use strict";
Object.defineProperty(_exports, "__esModule", {
value: true
});
_exports.DirMixin = void 0;
/**
* @fileoverview
* @suppress {checkPrototypalTypes}
* @license Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
* This code may only be used under the BSD style license found at
* http://polymer.github.io/LICENSE.txt The complete set of authors may be found
* at http://polymer.github.io/AUTHORS.txt The complete set of contributors may
* be found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by
* Google as part of the polymer project is also subject to an additional IP
* rights grant found at http://polymer.github.io/PATENTS.txt
*/
var HOST_DIR = /:host\(:dir\((ltr|rtl)\)\)/g;
var HOST_DIR_REPLACMENT = ':host([dir="$1"])';
var EL_DIR = /([\s\w-#\.\[\]\*]*):dir\((ltr|rtl)\)/g;
var EL_DIR_REPLACMENT = ':host([dir="$2"]) $1';
var DIR_CHECK = /:dir\((?:ltr|rtl)\)/;
var SHIM_SHADOW = Boolean(window['ShadyDOM'] && window['ShadyDOM']['inUse']);
/**
* @type {!Array<!Polymer_DirMixin>}
*/
var DIR_INSTANCES = [];
/** @type {?MutationObserver} */
var observer = null;
var documentDir = '';
function getRTL() {
documentDir = document.documentElement.getAttribute('dir');
}
/**
* @param {!Polymer_DirMixin} instance Instance to set RTL status on
*/
function setRTL(instance) {
if (!instance.__autoDirOptOut) {
var el =
/** @type {!HTMLElement} */
instance;
el.setAttribute('dir', documentDir);
}
}
function updateDirection() {
getRTL();
documentDir = document.documentElement.getAttribute('dir');
for (var i = 0; i < DIR_INSTANCES.length; i++) {
setRTL(DIR_INSTANCES[i]);
}
}
function takeRecords() {
if (observer && observer.takeRecords().length) {
updateDirection();
}
}
/**
* Element class mixin that allows elements to use the `:dir` CSS Selector to
* have text direction specific styling.
*
* With this mixin, any stylesheet provided in the template will transform
* `:dir` into `:host([dir])` and sync direction with the page via the
* element's `dir` attribute.
*
* Elements can opt out of the global page text direction by setting the `dir`
* attribute directly in `ready()` or in HTML.
*
* Caveats:
* - Applications must set `<html dir="ltr">` or `<html dir="rtl">` to sync
* direction
* - Automatic left-to-right or right-to-left styling is sync'd with the
* `<html>` element only.
* - Changing `dir` at runtime is supported.
* - Opting out of the global direction styling is permanent
*
* @mixinFunction
* @polymer
* @appliesMixin PropertyAccessors
* @template T
* @param {function(new:T)} superClass Class to apply mixin to.
* @return {function(new:T)} superClass with mixin applied.
*/
var DirMixin = (0, _mixin.dedupingMixin)(function (base) {
if (!SHIM_SHADOW) {
if (!observer) {
getRTL();
observer = new MutationObserver(updateDirection);
observer.observe(document.documentElement, {
attributes: true,
attributeFilter: ['dir']
});
}
}
/**
* @constructor
* @implements {Polymer_PropertyAccessors}
* @private
*/
var elementBase = (0, _propertyAccessors.PropertyAccessors)(base);
/**
* @polymer
* @mixinClass
* @implements {Polymer_DirMixin}
*/
var Dir =
/*#__PURE__*/
function (_elementBase) {
babelHelpers.inherits(Dir, _elementBase);
babelHelpers.createClass(Dir, null, [{
key: "_processStyleText",
/**
* @param {string} cssText .
* @param {string} baseURI .
* @return {string} .
* @suppress {missingProperties} Interfaces in closure do not inherit statics, but classes do
* @nocollapse
*/
value: function _processStyleText(cssText, baseURI) {
// TODO(https://github.com/google/closure-compiler/issues/3240):
// Change back to just super.methodCall()
cssText = elementBase._processStyleText.call(this, cssText, baseURI);
if (!SHIM_SHADOW && DIR_CHECK.test(cssText)) {
cssText = this._replaceDirInCssText(cssText);
this.__activateDir = true;
}
return cssText;
}
/**
* Replace `:dir` in the given CSS text
*
* @param {string} text CSS text to replace DIR
* @return {string} Modified CSS
* @nocollapse
*/
}, {
key: "_replaceDirInCssText",
value: function _replaceDirInCssText(text) {
var replacedText = text;
replacedText = replacedText.replace(HOST_DIR, HOST_DIR_REPLACMENT);
replacedText = replacedText.replace(EL_DIR, EL_DIR_REPLACMENT);
return replacedText;
}
}]);
function | () {
var _this;
babelHelpers.classCallCheck(this, Dir);
_this = babelHelpers.possibleConstructorReturn(this, babelHelpers.getPrototypeOf(Dir).call(this));
/** @type {boolean} */
_this.__autoDirOptOut = false;
return _this;
}
/**
* @override
* @suppress {invalidCasts} Closure doesn't understand that `this` is an
* HTMLElement
* @return {void}
*/
babelHelpers.createClass(Dir, [{
key: "ready",
value: function ready() {
babelHelpers.get(babelHelpers.getPrototypeOf(Dir.prototype), "ready", this).call(this);
this.__autoDirOptOut =
/** @type {!HTMLElement} */
this.hasAttribute('dir');
}
/**
* @override
* @suppress {missingProperties} If it exists on elementBase, it can be
* super'd
* @return {void}
*/
}, {
key: "connectedCallback",
value: function connectedCallback() {
if (elementBase.prototype.connectedCallback) {
babelHelpers.get(babelHelpers.getPrototypeOf(Dir.prototype), "connectedCallback", this).call(this);
}
if (this.constructor.__activateDir) {
takeRecords();
DIR_INSTANCES.push(this);
setRTL(this);
}
}
/**
* @override
* @suppress {missingProperties} If it exists on elementBase, it can be
* super'd
* @return {void}
*/
}, {
key: "disconnectedCallback",
value: function disconnectedCallback() {
if (elementBase.prototype.disconnectedCallback) {
babelHelpers.get(babelHelpers.getPrototypeOf(Dir.prototype), "disconnectedCallback", this).call(this);
}
if (this.constructor.__activateDir) {
var idx = DIR_INSTANCES.indexOf(this);
if (idx > -1) {
DIR_INSTANCES.splice(idx, 1);
}
}
}
}]);
return Dir;
}(elementBase);
Dir.__activateDir = false;
return Dir;
});
_exports.DirMixin = DirMixin;
}); | Dir |
jogoteca.py | from flask import Flask, render_template, request, redirect, session, flash, url_for
app = Flask(__name__)
app.secret_key = 'alura'
class | :
def __init__(self, nome, categoria, console):
self.nome = nome
self.categoria = categoria
self.console = console
class Usuario:
def __init__(self, id, nome, senha):
self.id = id
self.nome = nome
self.senha = senha
usuario1 = Usuario('luan', 'Luan Marques', '1234')
usuario2 = Usuario('nico', 'Nico Steppat', '7a1')
usuario3 = Usuario('flavio', 'Flávio', 'javascript')
usuarios = { usuario1.id: usuario1,
usuario2.id: usuario2,
usuario3.id: usuario3 }
jogo1 = Jogo('Super Mario', 'Acao', 'SNES')
jogo2 = Jogo('Pokemon Gold', 'RPG', 'GBA')
jogo3 = Jogo('Mortal Kombat', 'Luta', 'SNES')
lista = [jogo1, jogo2, jogo3]
@app.route('/')
def index():
return render_template('lista.html', titulo='Jogos',
jogos=lista)
@app.route('/novo')
def novo():
if 'usuario_logado' not in session or session['usuario_logado'] == None:
return redirect(url_for('login', proxima=url_for('novo')))
return render_template('novo.html', titulo='Novo jogo')
@app.route('/criar', methods=['POST',])
def criar():
nome = request.form['nome']
categoria = request.form['categoria']
console = request.form['console']
jogo = Jogo(nome, categoria, console)
lista.append(jogo)
return redirect(url_for('index'))
@app.route('/login')
def login():
proxima = request.args.get('proxima')
return render_template('login.html', proxima=proxima)
@app.route('/autenticar', methods=['POST',])
def autenticar():
if request.form['usuario'] in usuarios:
usuario = usuarios[request.form['usuario']]
if usuario.senha == request.form['senha']:
session['usuario_logado'] = usuario.id
flash(usuario.nome + ' logou com sucesso!')
proxima_pagina = request.form['proxima']
return redirect(proxima_pagina)
else :
flash('Não logado, tente de novo!')
return redirect(url_for('login'))
@app.route('/logout')
def logout():
session['usuario_logado'] = None
flash('Nenhum usuario logado!')
return redirect(url_for('index'))
app.run(debug=True)
| Jogo |
auth.go | package auth
import (
"context"
"errors"
"os"
"time"
"github.com/golang-jwt/jwt"
"github.com/google/uuid"
"github.com/varunamachi/libx/errx"
)
type AuthData map[string]interface{}
var (
ErrAuthentication = errors.New("auth.user.authenticationError")
ErrUserRetrieval = errors.New("auth.user.retrievalError")
ErrToken = errors.New("auth.user.authTokenError")
)
type Authenticator interface {
Authenticate(gtx context.Context, authData AuthData) error
}
type UserGetter interface {
GetUser(gtx context.Context, authData AuthData) (User, error)
}
type UserAuthenticator interface {
Authenticator
UserGetter
}
// Login - authenticates the user, get's the user information and generates a
// JWT token. The user and the token are then returned. And in case of error
// the error is returned
func Login(
gtx context.Context,
authr UserAuthenticator,
data AuthData) (User, string, error) {
if err := authr.Authenticate(gtx, data); err != nil {
return nil, "", errx.Errf(err, "failed to authenticate user")
}
user, err := authr.GetUser(gtx, data)
if err != nil {
return nil, "", errx.Errf(err, "failed to retrieve user")
}
token := jwt.New(jwt.SigningMethodHS256)
claims := token.Claims.(jwt.MapClaims)
claims["userId"] = user.Id()
claims["exp"] = time.Now().Add(time.Hour * 24).Unix()
signed, err := token.SignedString(GetJWTKey())
if err != nil |
return user, signed, nil
}
//GetJWTKey - gives a unique JWT key
func GetJWTKey() []byte {
jwtKey := os.Getenv("VLIBX_JWT_KEY")
if len(jwtKey) == 0 {
jwtKey = uuid.NewString()
// TODO - may be need to do something better
os.Setenv("VLIBX_JWT_KEY", jwtKey)
}
return []byte(jwtKey)
}
| {
return nil, "", errx.Errf(err, "failed to generate session token")
} |
nav-controller.ts | import { Injectable, Optional } from '@angular/core';
import { NavigationExtras, Router, UrlTree } from '@angular/router';
export const enum NavIntent {
Auto,
Forward,
Back,
Root
}
@Injectable()
export class NavController {
private intent: NavIntent = NavIntent.Auto;
private animated = true;
private stack: string[] = [];
constructor(
@Optional() private router?: Router
) {}
goForward(url: string | UrlTree, animated?: boolean, extras?: NavigationExtras) {
this.setIntent(NavIntent.Forward, animated);
return this.router.navigateByUrl(url, extras);
}
goBack(url: string | UrlTree, animated?: boolean, extras?: NavigationExtras) {
this.setIntent(NavIntent.Back, animated);
return this.router.navigateByUrl(url, extras);
}
goRoot(url: string | UrlTree, animated?: boolean, extras?: NavigationExtras) {
this.setIntent(NavIntent.Root, animated);
return this.router.navigateByUrl(url, extras);
}
setIntent(intent: NavIntent, animated?: boolean) {
this.intent = intent;
this.animated = (animated === undefined)
? intent !== NavIntent.Root
: animated;
}
consumeTransition() {
const guessDirection = this.guessDirection();
let direction = 0;
let animated = false;
if (this.intent === NavIntent.Auto) { | animated = this.animated;
direction = intentToDirection(this.intent);
}
this.intent = NavIntent.Auto;
this.animated = true;
return {
direction,
animated
};
}
private guessDirection() {
const index = this.stack.indexOf(document.location.href);
if (index === -1) {
this.stack.push(document.location.href);
return 1;
} else if (index < this.stack.length - 1) {
this.stack = this.stack.slice(0, index + 1);
return -1;
}
return 0;
}
}
function intentToDirection(intent: NavIntent): number {
switch (intent) {
case NavIntent.Forward: return 1;
case NavIntent.Back: return -1;
default: return 0;
}
} | direction = guessDirection;
animated = direction !== 0;
} else { |
Comp_05_0804.tsx | // Comp_05_0804
import React from 'react';
import { incModCount } from '../modCount'; | incModCount();
}, []);
return <div>
I'm component Comp_05_0804
<div>
</div>
</div>;
};
export default Comp_05_0804; |
const Comp_05_0804: React.FC = () => {
React.useEffect(() => { |
support-cta.tsx | import React from 'react';
import classNames from 'classnames';
import { PossibleSizes } from '@teambit/base-ui.theme.sizes';
import { Paragraph } from '@teambit/base-ui.text.paragraph';
import { alignItems, text } from '@teambit/base-ui.layout.align';
import { Grid } from '@teambit/base-ui.layout.grid-component';
import { H2 } from '@teambit/evangelist.elements.heading';
import { Link } from '@teambit/evangelist.elements.link';
import { Button } from '@teambit/evangelist.elements.button';
import styles from './support-cta.module.scss';
/**
* @name SupportCta
* @description
* A static section, calling users to contact us.
*/
export function | (props: React.HTMLAttributes<HTMLDivElement>) {
const { className, ...rest } = props;
return (
<Grid
colMd={2}
{...rest}
data-bit-id="teambit.evangelist/sections/support-page/support-cta"
className={classNames(className, text.center, text.md.left, styles.mainGrid)}
>
<div className={styles.topSection}>
<H2 size={PossibleSizes.xs}>Need support?</H2>
<Paragraph size={PossibleSizes.xl}>Reach out to our team.</Paragraph>
</div>
<div className={classNames(styles.buttons, alignItems.center)}>
<Link className={styles.link} href="/support">
<Button importance="cta">Contact support</Button>
</Link>
<div className={styles.spacer} />
<Link className={styles.link} href="/pricing">
<Button importance="normal">Upgrade support</Button>
</Link>
</div>
</Grid>
);
}
| SupportCta |
opt-frontend-common.ts | // Python Tutor: https://github.com/pgbovine/OnlinePythonTutor/
// Copyright (C) Philip Guo ([email protected])
// LICENSE: https://github.com/pgbovine/OnlinePythonTutor/blob/master/LICENSE.txt
/* TODO
- we're always referring to top-level CSS selectors on the page; maybe
use a this.domRoot pattern like in pytutor.ts?
*/
/// <reference path="_references.ts" />
// for TypeScript
declare var diff_match_patch: any;
declare var codeopticonUsername: string; // FIX later when porting Codeopticon
declare var codeopticonSession: string; // FIX later when porting Codeopticon
require('./lib/diff_match_patch.js');
require('./lib/jquery.ba-dotimeout.min.js');
// need to directly import the class for type checking to work
import {ExecutionVisualizer, assert, htmlspecialchars} from './pytutor';
import {nullTraceErrorLst,unsupportedFeaturesStr} from './footer-html';
// the main event!
//
// NB: this still relies on global state such as localStorage and the
// browser URL hash string, so you still can't have more than one of these
// objects per page; should still be instantiated as a SINGLETON
export abstract class AbstractBaseFrontend {
sessionUUID: string = generateUUID(); // remains constant throughout one page load ("session")
userUUID: string; // remains constant for a particular "user" throughout multiple page loads (stored in localStorage on a particular browser)
myVisualizer: ExecutionVisualizer;
originFrontendJsFile: string; // "abstract" -- must override in subclass
// a cache where each element is a pair:
// [appState, cached execution trace]
// that way, if you execute the same code with the same settings again and
// get a cache hit, then there's no need to make a server call
traceCache = [];
// 'edit' or 'display'. also support 'visualize' for backward
// compatibility (same as 'display')
appMode: string = 'edit';
// inputted by user for raw_input / mouse_input events
rawInputLst: string[] = [];
isExecutingCode: boolean = false;
// optional: not all frontends keep track of deltas
dmp = new diff_match_patch();
curCode = ''; // for dmp snapshots, kinda kludgy
deltaObj : {start: string, deltas: any[], v: number, startTime: number, executeTime?: number} = undefined;
num414Tries = 0;
// note that we use '2' and '3' instead of 'py2' and 'py3' due to legacy reasons
langSettingToBackendScript = {
// backend scripts to execute (Python 2 and 3 variants, if available)
// make two copies of ../web_exec.py and give them the following names,
// then change the first line (starting with #!) to the proper version
// of the Python interpreter (i.e., Python 2 or Python 3).
// Note that your hosting provider might have stringent rules for what
// kind of scripts are allowed to execute. For instance, my provider
// (Webfaction) seems to let scripts execute only if permissions are
// something like:
// -rwxr-xr-x 1 pgbovine pgbovine 2.5K Jul 5 22:46 web_exec_py2.py*
// (most notably, only the owner of the file should have write
// permissions)
'2': 'web_exec_py2.py',
'3': 'web_exec_py3.py',
// empty dummy scripts just to do logging on Apache server
'js': 'web_exec_js.py',
'ts': 'web_exec_ts.py',
'java': 'web_exec_java.py',
'ruby': 'web_exec_ruby.py',
'c': 'web_exec_c.py',
'cpp': 'web_exec_cpp.py',
// experimental!
'py3anaconda': 'web_exec_py3anaconda.py',
};
// these settings are all customized for my own server setup,
// so you will need to customize for your server:
serverRoot = (window.location.protocol === 'https:') ?
'https://cokapi.com/' : // my certificate for https is registered via cokapi.com, so use it for now
'http://cokapi.com/'; // try cokapi.com so that hopefully it works through firewalls better than directly using IP addr
// (but that's just an unsubstantiated hunch)
// randomly pick one backup server to load balance:
backupHttpServerRoot = 'http://142.93.219.188/';
// see ../../v4-cokapi/cokapi.js for details
langSettingToJsonpEndpoint = {
'2': null,
'3': null,
'js': this.serverRoot + 'exec_js_jsonp',
'ts': this.serverRoot + 'exec_ts_jsonp',
'java': this.serverRoot + 'exec_java_jsonp',
'ruby': this.serverRoot + 'exec_ruby_jsonp',
'c': this.serverRoot + 'exec_c_jsonp',
'cpp': this.serverRoot + 'exec_cpp_jsonp',
'py3anaconda': this.serverRoot + 'exec_pyanaconda_jsonp',
};
langSettingToJsonpEndpointBackup = {
'2': null,
'3': null,
'js': this.backupHttpServerRoot + 'exec_js_jsonp',
'ts': this.backupHttpServerRoot + 'exec_ts_jsonp',
'java': this.backupHttpServerRoot + 'exec_java_jsonp',
'ruby': this.backupHttpServerRoot + 'exec_ruby_jsonp',
'c': this.backupHttpServerRoot + 'exec_c_jsonp',
'cpp': this.backupHttpServerRoot + 'exec_cpp_jsonp',
'py3anaconda': this.backupHttpServerRoot + 'exec_pyanaconda_jsonp',
};
abstract executeCode(forceStartingInstr?: number, forceRawInputLst?: string[]) : any;
abstract finishSuccessfulExecution() : any; // called by executeCodeAndCreateViz
abstract handleUncaughtException(trace: any[]) : any; // called by executeCodeAndCreateViz
constructor(params: any = {}) {
if (supports_html5_storage()) {
// generate a unique UUID per "user" (as indicated by a single browser
// instance on a user's machine, which can be more precise than IP
// addresses due to sharing of IP addresses within, say, a school
// computer lab)
// added on 2015-01-27 for more precise user identification
if (!localStorage.getItem('opt_uuid')) {
localStorage.setItem('opt_uuid', generateUUID());
}
this.userUUID = localStorage.getItem('opt_uuid');
assert(this.userUUID);
} else {
this.userUUID = undefined;
}
// register a generic AJAX error handler
$(document).ajaxError((evt, jqxhr, settings, exception) => {
if (this.ignoreAjaxError(settings)) {
return; // early return!
}
// On my server ...
// This jqxhr.responseText might mean the URL is too long, since the error
// message returned by the server is something like this in nginx:
//
// <html>
// <head><title>414 Request-URI Too Large</title></head>
// <body bgcolor="white">
// <center><h1>414 Request-URI Too Large</h1></center>
// <hr><center>nginx</center>
// </body>
// </html>
//
// Note that you'll probably need to customize this check for your server.
if (jqxhr && jqxhr.responseText && jqxhr.responseText.indexOf('414') >= 0) {
// ok this is an UBER UBER hack. If this happens just once, then
// force click the "Visualize Execution" button again and re-try.
// why? what's the difference the second time around? the diffs_json
// parameter (derived from deltaObj) will be *empty* the second time
// around since it gets reset on every execution. if diffs_json is
// HUGE, then that might force the URL to be too big without your
// code necessarily being too big, so give it a second shot with an
// empty diffs_json. if it STILL fails, then display the error
// message and give up.
if (this.num414Tries === 0) {
this.num414Tries++;
$("#executeBtn").click();
} else {
this.setFronendError(["Server error! Your code might be too long for this tool. Shorten your code and re-try. [#CodeTooLong]"]);
this.num414Tries = 0; // reset this to 0 AFTER setFronendError so that in setFronendError we can know that it's a 414 error (super hacky!)
}
} else {
this.setFronendError(
["Server error! Your code might have an INFINITE LOOP or be running for too long.",
"The server may also be OVERLOADED. Or you're behind a FIREWALL that blocks access.",
"Try again later. This site is free with NO technical support. [#UnknownServerError]"]);
}
this.doneExecutingCode();
});
this.clearFrontendError();
$("#embedLinkDiv").hide();
$("#executeBtn")
.attr('disabled', false)
.click(this.executeCodeFromScratch.bind(this));
}
ignoreAjaxError(settings) {return false;} // subclasses should override
// empty stub so that our code doesn't crash.
// TODO: override this with a version in codeopticon-learner.js if needed
logEventCodeopticon(obj) { } // NOP
getAppState() {return {};} // NOP -- subclasses need to override
setFronendError(lines, ignoreLog=false) {
$("#frontendErrorOutput").html(lines.map(htmlspecialchars).join('<br/>') +
(ignoreLog ? '' : '<p/>(' + unsupportedFeaturesStr + ')'));
// log it to the server as well (unless ignoreLog is on)
if (!ignoreLog) {
var errorStr = lines.join();
var myArgs = this.getAppState();
(myArgs as any).opt_uuid = this.userUUID;
(myArgs as any).session_uuid = this.sessionUUID;
(myArgs as any).error_msg = errorStr;
// very subtle! if you have a 414 error, that means your original
// code was too long to fit in the URL, so CLEAR THE FULL CODE from
// myArgs, or else it will generate a URL that will give a 414 again
// when you run error_log.py!!! this relies on this.num414Tries not
// being reset yet at this point:
if (this.num414Tries > 0) {
(myArgs as any).code = '#CodeTooLong: ' + String((myArgs as any).code.length) + ' bytes';
}
$.get('error_log.py', myArgs, function(dat) {}); // added this logging feature on 2018-02-18
}
}
clearFrontendError() {
$("#frontendErrorOutput").html('');
}
// parsing the URL query string hash
getQueryStringOptions() {
var ril = $.bbq.getState('rawInputLstJSON');
var testCasesLstJSON = $.bbq.getState('testCasesJSON');
// note that any of these can be 'undefined'
return {preseededCode: $.bbq.getState('code'),
preseededCurInstr: Number($.bbq.getState('curInstr')),
verticalStack: $.bbq.getState('verticalStack'),
appMode: $.bbq.getState('mode'),
py: $.bbq.getState('py'),
cumulative: $.bbq.getState('cumulative'),
heapPrimitives: $.bbq.getState('heapPrimitives'),
textReferences: $.bbq.getState('textReferences'),
rawInputLst: ril ? $.parseJSON(ril) : undefined,
demoMode: $.bbq.getState('demo'), // is 'demo mode' on? if so, hide a lot of excess stuff
codcastFile: $.bbq.getState('codcast'), // load a codcast file created using ../recorder.html
codeopticonSession: $.bbq.getState('cosession'),
codeopticonUsername: $.bbq.getState('couser'),
testCasesLst: testCasesLstJSON ? $.parseJSON(testCasesLstJSON) : undefined
};
}
redrawConnectors() {
if (this.myVisualizer &&
(this.appMode == 'display' ||
this.appMode == 'visualize' /* deprecated */)) {
this.myVisualizer.redrawConnectors();
}
}
getBaseBackendOptionsObj() {
var ret = {cumulative_mode: ($('#cumulativeModeSelector').val() == 'true'),
heap_primitives: ($('#heapPrimitivesSelector').val() == 'true'),
show_only_outputs: false, // necessary for legacy reasons, ergh!
origin: this.originFrontendJsFile};
return ret;
}
getBaseFrontendOptionsObj() {
var ret = {// tricky: selector 'true' and 'false' values are strings!
disableHeapNesting: (($('#heapPrimitivesSelector').val() == 'true') ||
($('#heapPrimitivesSelector').val() == 'nevernest')),
textualMemoryLabels: ($('#textualMemoryLabelsSelector').val() == 'true'),
executeCodeWithRawInputFunc: this.executeCodeWithRawInput.bind(this),
// always use the same visualizer ID for all
// instantiated ExecutionVisualizer objects,
// so that they can sync properly across
// multiple clients using TogetherJS in shared sessions.
// this shouldn't lead to problems since only ONE
// ExecutionVisualizer will be shown at a time
visualizerIdOverride: '1',
updateOutputCallback: this.updateOutputCallbackFunc.bind(this),
startingInstruction: 0,
};
return ret;
}
updateOutputCallbackFunc() {
$('#urlOutput,#urlOutputShortened,#embedCodeOutput').val('');
}
executeCodeFromScratch() {
this.rawInputLst = []; // reset!
this.executeCode();
}
executeCodeWithRawInput(rawInputStr, curInstr) {
this.rawInputLst.push(rawInputStr);
this.executeCode(curInstr);
}
startExecutingCode(startingInstruction=0) {
$('#executeBtn').html("Please wait ... executing (takes up to 10 seconds)");
$('#executeBtn').attr('disabled', true);
this.isExecutingCode = true;
}
doneExecutingCode() {
$('#executeBtn').html("Visualize Execution");
$('#executeBtn').attr('disabled', false);
this.isExecutingCode = false;
}
// execute codeToExec and create a new ExecutionVisualizer
// object with outputDiv as its DOM parent
executeCodeAndCreateViz(codeToExec,
pyState,
backendOptionsObj, frontendOptionsObj,
outputDiv) {
var vizCallback = (dataFromBackend) => {
var trace = dataFromBackend.trace;
// don't enter visualize mode if there are killer errors:
if (!trace ||
(trace.length == 0) ||
(trace[trace.length - 1].event == 'uncaught_exception')) {
this.handleUncaughtException(trace);
if (trace.length == 1) {
this.setFronendError([trace[0].exception_msg]);
} else if (trace.length > 0 && trace[trace.length - 1].exception_msg) {
this.setFronendError([trace[trace.length - 1].exception_msg]);
} else {
this.setFronendError(nullTraceErrorLst);
}
} else {
// fail-soft to prevent running off of the end of trace
if (frontendOptionsObj.startingInstruction >= trace.length) {
frontendOptionsObj.startingInstruction = 0;
}
if (frontendOptionsObj.runTestCaseCallback) {
// hacky! DO NOT actually create a visualization! instead call:
frontendOptionsObj.runTestCaseCallback(trace);
} else {
// success!
this.myVisualizer = new ExecutionVisualizer(outputDiv, dataFromBackend, frontendOptionsObj);
// SUPER HACK -- slip in backendOptionsObj as an extra field
// NB: why do we do this? for more detailed logging?
(this.myVisualizer as any).backendOptionsObj = backendOptionsObj;
this.finishSuccessfulExecution(); // TODO: should we also run this if we're calling runTestCaseCallback?
}
}
}
this.executeCodeAndRunCallback(codeToExec,
pyState,
backendOptionsObj, frontendOptionsObj,
vizCallback.bind(this));
}
// execute code and call the execCallback function when the server
// returns data via Ajax
executeCodeAndRunCallback(codeToExec,
pyState,
backendOptionsObj, frontendOptionsObj,
execCallback) {
var callbackWrapper = (dataFromBackend) => {
this.clearFrontendError(); // clear old errors first; execCallback may put in a new error:
execCallback(dataFromBackend); // call the main event first
// run this at the VERY END after all the dust has settled
this.doneExecutingCode(); // rain or shine, we're done executing!
// tricky hacky reset
this.num414Tries = 0;
};
var backendScript = this.langSettingToBackendScript[pyState];
assert(backendScript);
var jsonp_endpoint = this.langSettingToJsonpEndpoint[pyState]; // maybe null
if (!backendScript) {
this.setFronendError(["Server configuration error: No backend script"]);
return;
}
this.clearFrontendError();
this.startExecutingCode(frontendOptionsObj.startingInstruction);
frontendOptionsObj.lang = pyState;
// kludgy exceptions
if (pyState === '2') {
frontendOptionsObj.lang = 'py2';
} else if (pyState === '3') {
frontendOptionsObj.lang = 'py3';
} else if (pyState === 'java') {
// TODO: should we still keep this exceptional case?
frontendOptionsObj.disableHeapNesting = true; // never nest Java objects, seems like a good default
}
// if we don't have any deltas, then don't bother sending deltaObj:
// NB: not all subclasses will initialize this.deltaObj
var deltaObjStringified = (this.deltaObj && (this.deltaObj.deltas.length > 0)) ? JSON.stringify(this.deltaObj) : null;
if (deltaObjStringified) {
// if deltaObjStringified is too long, then that will likely make
// the URL way too long. in that case, just make it null and don't
// send a delta (NB: actually set it to a canary value "overflow").
// we'll lose some info but at least the URL will hopefully not overflow:
if (deltaObjStringified.length > 4096) {
deltaObjStringified = "overflow"; // set a canary to overflow
}
} else {
// if we got here due to the num414Tries retries hack, set
// canary to "overflow"
if (this.num414Tries > 0) {
deltaObjStringified = "overflow_414";
}
}
// if we can find a matching cache entry, then use it!!!
if (this.traceCache) {
var appState = this.getAppState();
var cachedTrace = this.traceCacheGet(appState);
if (cachedTrace) {
//console.log("CACHE HIT!", appState);
callbackWrapper({code: (appState as any).code, trace: cachedTrace});
return; // return early without going to the server at all!
}
}
// everything below here is an ajax (async) call to the server ...
if (jsonp_endpoint) {
assert (pyState !== '2' && pyState !== '3');
// 2018-08-19: this is an uncommon use case (only used for https iframe embedding)
if (jsonp_endpoint.indexOf('https:') == 0) {
this.setFronendError(["Error: https execution of non-Python code is not currently supported. [#nonPythonHttps]"]);
this.doneExecutingCode();
return;
}
var retryOnBackupServer = () => {
// first log a #TryBackup error entry:
this.setFronendError(["Main server is busy or has errors; re-trying using backup server " + this.backupHttpServerRoot + " ... [#TryBackup]"]);
// now re-try the query using the backup server:
var backup_jsonp_endpoint = this.langSettingToJsonpEndpointBackup[pyState];
assert(backup_jsonp_endpoint);
$.ajax({
url: backup_jsonp_endpoint,
// The name of the callback parameter, as specified by the YQL service
jsonp: "callback",
dataType: "jsonp",
data: {user_script : codeToExec,
options_json: JSON.stringify(backendOptionsObj),
raw_input_json: this.rawInputLst.length > 0 ? JSON.stringify(this.rawInputLst) : null,
},
success: callbackWrapper
});
}
// for non-python, this should be a dummy script for logging
// only, and to check whether there's a 414 error for #CodeTooLong
$.get(backendScript,
{user_script : codeToExec,
options_json: JSON.stringify(backendOptionsObj),
user_uuid: this.userUUID,
session_uuid: this.sessionUUID,
diffs_json: deltaObjStringified},
(dat) => {
// this is super important! only if this first call is a
// SUCCESS do we actually make the REAL call using JSONP.
// the reason why is that we might get a 414 error for
// #CodeTooLong if we try to execute this code, in which
// case we want to either re-try or bail out. this also
// keeps the control flow synchronous. we always try
// the original backendScript, and then we try
// jsonp_endpoint only if that's successful:
// the REAL call uses JSONP
// http://learn.jquery.com/ajax/working-with-jsonp/
$.ajax({
url: jsonp_endpoint,
// for testing
//url: 'http://cokapi.com/test_failure_jsonp',
//url: 'http://cokapi.com/unknown_url',
// The name of the callback parameter, as specified by the YQL service
jsonp: "callback",
dataType: "jsonp",
data: {user_script : codeToExec,
options_json: JSON.stringify(backendOptionsObj),
raw_input_json: this.rawInputLst.length > 0 ? JSON.stringify(this.rawInputLst) : null,
},
success: (dataFromBackend) => {
var trace = dataFromBackend.trace;
var shouldRetry = false;
// the cokapi backend responded successfully, but the
// backend may have issued an error. if so, then
// RETRY with backupHttpServerRoot. otherwise let it
// through to callbackWrapper
if (!trace ||
(trace.length == 0) ||
(trace[trace.length - 1].event == 'uncaught_exception')) {
if (trace.length == 1) {
// we should only retry if there's a legit
// backend error and not just a syntax error:
var msg = trace[0].exception_msg;
if (msg.indexOf('#BackendError') >= 0) {
shouldRetry = true;
}
} else {
shouldRetry = true;
}
}
// don't bother re-trying for https since we don't
// currently have an https backup server
if (window.location.protocol === 'https:') {
shouldRetry = false;
}
if (shouldRetry) {
retryOnBackupServer();
} else {
// accept our fate without retrying
callbackWrapper(dataFromBackend);
}
},
// if there's a server error, then ALWAYS retry:
error: (jqXHR, textStatus, errorThrown) => {
retryOnBackupServer();
// use 'global: false;' below to NOT run the generic ajaxError() function
},
global: false, // VERY IMPORTANT! do not call the generic ajaxError() function when there's an error;
// only call our error handler above; http://api.jquery.com/ajaxerror/
});
}, "text");
} else {
// for Python 2 or 3, directly execute backendScript
assert (pyState === '2' || pyState === '3');
$.get(backendScript,
{user_script : codeToExec,
raw_input_json: this.rawInputLst.length > 0 ? JSON.stringify(this.rawInputLst) : '',
options_json: JSON.stringify(backendOptionsObj),
user_uuid: this.userUUID,
session_uuid: this.sessionUUID,
diffs_json: deltaObjStringified},
callbackWrapper, "json");
}
}
// manage traceCache
// return whether two states match, except don't worry about mode or curInstr
static appStateEqForCache(s1, s2) {
// NB: this isn't always true if we're recording and replaying
// in different frontend files ...
//assert(s1.origin == s2.origin); // sanity check!
return (s1.code == s2.code &&
s1.cumulative == s2.cumulative &&
s1.heapPrimitives == s1.heapPrimitives &&
s1.textReferences == s2.textReferences &&
s1.py == s2.py &&
s1.rawInputLstJSON == s2.rawInputLstJSON);
}
// SILENTLY fail without doing anything if the current app state is
// already in the cache
traceCacheAdd() {
// should only be called if you currently have a working trace;
// otherwise it's useless
assert(this.myVisualizer && this.myVisualizer.curTrace);
var appState = this.getAppState();
// make sure nothing in the cache currently matches appState
for (var i = 0; i < this.traceCache.length; i++) {
var e = this.traceCache[i];
if (AbstractBaseFrontend.appStateEqForCache(e[0], appState)) {
console.warn("traceCacheAdd silently failed, entry already in cache");
return;
}
}
this.traceCache.push([appState, this.myVisualizer.curTrace]);
console.log('traceCacheAdd', this.traceCache);
}
traceCacheGet(appState) {
for (var i = 0; i < this.traceCache.length; i++) {
var e = this.traceCache[i];
if (AbstractBaseFrontend.appStateEqForCache(e[0], appState)) {
return e[1];
}
}
return null;
}
traceCacheClear() {
this.traceCache = [];
}
setSurveyHTML() {
// use ${this.userUUID} within the string ...
var survey_v14 = `
<p style="font-size: 9pt; margin-top: 12px; margin-bottom: 15px; line-height: 150%;">
Help improve this tool by completing a <a style="font-size: 10pt; font-weight: bold;" href="https://docs.google.com/forms/d/e/1FAIpQLSfQJP1ojlv8XzXAvHz0al-J_Hs3GQu4XeblxT8EzS8dIzuaYA/viewform?entry.956368502=${this.userUUID}" target="_blank">short user survey</a>
</p>`;
$('#surveyPane').html(survey_v14);
}
} // END class AbstractBaseFrontend
// removed donation link on 2019-03-26
// Keep this tool free by making a <a style="font-size: 10pt; font-weight: bold;" href="http://pgbovine.net/support.htm" target="_blank">small donation</a> (PayPal, Patreon, credit/debit card)
/* For survey questions. Versions of survey wording:
[see ../../v3/js/opt-frontend-common.js for older versions of survey wording - v1 to v7]
v8: (deployed on 2016-06-20) - like v7 except emphasize the main usage survey more, and have the over-60 survey as auxiliary
const survey_v8 = '\n\
<p style="font-size: 10pt; margin-top: 10px; margin-bottom: 15px; line-height: 175%;">\n\
<span>Support our research and keep this tool free by <a href="https://docs.google.com/forms/d/1-aKilu0PECHZVRSIXHv8vJpEuKUO9uG3MrH864uX56U/viewform" target="_blank">filling out this user survey</a>.</span>\n\
<br/>\n\
<span style="font-size: 9pt;">If you are <b>at least 60 years old</b>, please also fill out <a href="https://docs.google.com/forms/d/1lrXsE04ghfX9wNzTVwm1Wc6gQ5I-B4uw91ACrbDhJs8/viewform" target="_blank">our survey about learning programming</a>.</span>\n\
</p>'
v9: (deployed on 2016-08-14, taken down 2016-12-05) - only put up the "older adults" survey except generalize it to ALL ages, take down the OPT usage survey for now
const survey_v9 = '\n\
<p style="font-size: 10pt; margin-top: 10px; margin-bottom: 15px; line-height: 175%;">\n\
<span>Support our research and keep this tool free by <a href="https://docs.google.com/forms/d/1lrXsE04ghfX9wNzTVwm1Wc6gQ5I-B4uw91ACrbDhJs8/viewform" target="_blank"><b>filling out this user survey</b></a>.</span>\n\
</p>'
v10: (deployed on 2016-12-05) - survey of how native languages affects learning programming
(taken down on 2017-07-28)
[see survey_v10 variable above]
// use ${this.userUUID} within the string ...
var survey_v10 = '\n\
<p style="font-size: 11pt; margin-top: 12px; margin-bottom: 15px; line-height: 150%;">\n\
<span><span style="color: #e93f34;">Support our research and keep this tool free</span> by filling out this <a href="https://docs.google.com/forms/d/e/1FAIpQLSe48NsBZPvu1hrTBwc8-aSic7nPSxpsxFqpUxV5AN4LwnyJWg/viewform?entry.956368502=';
survey_v10 += this.userUUID;
survey_v10 += '" target="_blank">survey on how your native spoken language affects how you learn programming</a>.</span></p>';
$('#surveyPane').html(survey_v10); |
v11: labinthewild python debugging experiment (deployed on 2017-07-28, taken down on 2017-09-12)
var survey_v11 = `<p style="font-size: 10pt; margin-top: 12px; margin-bottom: 15px; line-height: 150%;">
<span>
<span style="color: #e93f34;">Support our research and practice Python</span>
by trying our new
<a target="_blank" href="http://www.labinthewild.org/studies/python_tutor/">debugging skill test</a>!`;
v12: simplified demographic survey which is a simplified hybrid of the v8 general usage survey and the v10 native language survey (deployed on 2017-09-12)
// use ${this.userUUID} within the string ...
var survey_v12 = '\n\
<p style="font-size: 10pt; margin-top: 12px; margin-bottom: 15px; line-height: 150%;">\n\
<span>Support our research and keep this tool free by <a href="https://docs.google.com/forms/d/e/1FAIpQLSfQJP1ojlv8XzXAvHz0al-J_Hs3GQu4XeblxT8EzS8dIzuaYA/viewform?entry.956368502=';
survey_v12 += this.userUUID;
survey_v12 += '" target="_blank"><b>filling out this short user survey</b></a>.</span></p>';
v13: same as v12 except with slightly different wording, and adding a
call for donations (deployed on 2017-12-27)
// use ${this.userUUID} within the string ...
var survey_v13 = '\n\
<p style="font-size: 10pt; margin-top: 12px; margin-bottom: 15px; line-height: 150%;">\n\
<div style="margin-bottom: 12px;">Keep this tool free for everyone by <a href="http://pgbovine.net/support.htm" target="_blank"><b>making a small donation</b></a> <span style="font-size: 8pt;">(PayPal, Patreon, credit/debit card)</span></div>\
<span>Support our research by completing a <a href="https://docs.google.com/forms/d/e/1FAIpQLSfQJP1ojlv8XzXAvHz0al-J_Hs3GQu4XeblxT8EzS8dIzuaYA/viewform?entry.956368502=';
survey_v13 += this.userUUID;
survey_v13 += '" target="_blank"><b>short user survey</b></a></span></p>';
v14: very similar to v13 (deployed on 2018-03-11)
[see the survey_v14 variable]
*/
// misc utilities:
// From http://stackoverflow.com/a/8809472
export function generateUUID(){
var d = new Date().getTime();
var uuid = 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
var r = (d + Math.random()*16)%16 | 0;
d = Math.floor(d/16);
return (c=='x' ? r : (r&0x7|0x8)).toString(16);
});
return uuid;
};
// Adapted from http://diveintohtml5.info/storage.html
export function supports_html5_storage() {
try {
if ('localStorage' in window && window['localStorage'] !== null) {
// From: http://stackoverflow.com/questions/21159301/
// Safari before v11, in Private Browsing Mode, looks like it supports localStorage but all calls to
// setItem throw QuotaExceededError. Making these calls in the try block will detect that situation
// with the catch below, returning false.
localStorage.setItem('_localStorage_test', '1');
localStorage.removeItem('_localStorage_test');
return true;
}
else {
return false;
}
} catch (e) {
return false;
}
} | |
styled.rs | use crate::{
draw_target::DrawTarget,
drawable::{Drawable, Pixel},
pixel_iterator::IntoPixels,
pixelcolor::PixelColor,
primitives::{polyline, polyline::Polyline, Primitive},
style::{PrimitiveStyle, Styled},
};
/// Pixel iterator for each pixel in the line
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug)]
pub struct StyledPixels<'a, C>
where
C: PixelColor,
{
stroke_color: Option<C>,
line_iter: polyline::Points<'a>,
}
impl<'a, C> StyledPixels<'a, C>
where
C: PixelColor,
{
pub(in crate::primitives) fn new(styled: &Styled<Polyline<'a>, PrimitiveStyle<C>>) -> Self |
}
impl<'a, C> Iterator for StyledPixels<'a, C>
where
C: PixelColor,
{
type Item = Pixel<C>;
fn next(&mut self) -> Option<Self::Item> {
// Return none if stroke color is none
let stroke_color = self.stroke_color?;
self.line_iter
.next()
.map(|point| Pixel(point, stroke_color))
}
}
impl<'a, C> IntoPixels for &Styled<Polyline<'a>, PrimitiveStyle<C>>
where
C: PixelColor,
{
type Color = C;
type Iter = StyledPixels<'a, C>;
fn into_pixels(self) -> Self::Iter {
StyledPixels::new(self)
}
}
impl<'a, C> Drawable<C> for Styled<Polyline<'a>, PrimitiveStyle<C>>
where
C: PixelColor,
{
fn draw<D>(&self, display: &mut D) -> Result<(), D::Error>
where
D: DrawTarget<Color = C>,
{
display.draw_iter(self.into_pixels())
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::primitives::polyline::tests::HEARTBEAT;
use crate::primitives::polyline::tests::SMALL;
use crate::{
drawable::Drawable,
geometry::Point,
mock_display::MockDisplay,
pixelcolor::{BinaryColor, Rgb565, RgbColor},
primitives::Primitive,
style::{PrimitiveStyle, PrimitiveStyleBuilder},
};
// Ensure that polylines only draw 1px wide due to lack of support for line joiners. This test
// should fail when joiners are supported and should be removed then.
#[test]
fn one_px_wide_only() {
let polyline = Polyline::new(&HEARTBEAT);
let thick = polyline.into_styled(PrimitiveStyle::with_stroke(Rgb565::RED, 10));
let thin = polyline.into_styled(PrimitiveStyle::with_stroke(Rgb565::RED, 1));
assert!(thick.into_pixels().eq(thin.into_pixels()));
}
#[test]
fn mock_display() {
let mut display: MockDisplay<BinaryColor> = MockDisplay::new();
Polyline::new(&SMALL)
.into_styled(PrimitiveStyle::with_stroke(BinaryColor::On, 1))
.draw(&mut display)
.unwrap();
assert_eq!(
display,
MockDisplay::from_pattern(&[
" ",
" ",
" # #",
" # ## ## ",
" # ## ## ",
" # # ",
])
);
}
#[test]
fn empty_styled_iterators() {
let points: [Point; 3] = [Point::new(2, 5), Point::new(3, 4), Point::new(4, 3)];
// No stroke width = no pixels
assert!(Polyline::new(&points)
.into_styled(PrimitiveStyle::with_stroke(Rgb565::BLUE, 0))
.into_pixels()
.eq(core::iter::empty()));
// No stroke color = no pixels
assert!(Polyline::new(&points)
.into_styled::<Rgb565>(PrimitiveStyleBuilder::new().stroke_width(1).build())
.into_pixels()
.eq(core::iter::empty()));
}
}
| {
StyledPixels {
stroke_color: styled.style.effective_stroke_color(),
line_iter: styled.primitive.points(),
}
} |
__main__.py | from server.server import Server
def main():
server = Server(__file__)
server.run()
if __name__ == '__main__': | main() | |
MerchSaleViewModel.ts | import * as ko from 'knockout';
import $ from 'jquery';
import * as countries from './data/countries.json';
import { loadStripe, Stripe, StripeCardElement } from '@stripe/stripe-js';
import { TShirtCartItem } from './TShirtCartItem';
export type TShirtType = {
id: string,
htmlClass: TShirtColor,
name: string,
price: number,
style: TShirtStyle,
sizes: TShirtSize[],
};
type Country = {
country_code: string,
country_name: string,
phone_code: string,
}
type PaymentMethod = 'cc' | 'sepa';
export type TShirtSize = 'XS' | 'S' | 'M' | 'L' | 'XL' | 'XXL' | '3XL';
type TShirtColor = 'bg-white' | 'bg-blue-800';
type TShirtStyle = 'Straight' | 'Fitted';
type TShirtView = 'front' | 'back';
export class | {
countries: Country[];
tShirtTypes: TShirtType[];
colors: TShirtColor[];
styles: TShirtStyle[];
sizes: ko.PureComputed<TShirtSize[]>;
selectedTShirtColor: ko.Observable<TShirtColor>;
selectedTShirtStyle: ko.Observable<TShirtStyle>;
selectedTShirtSize: ko.Observable<TShirtSize | null>;
selectedTShirtType: ko.PureComputed<TShirtType>;
tShirtErrorMessage: ko.Observable<string | null>;
tShirtSuccessMessage: ko.Observable<string | null>;
badgeSetSuccessMessage: ko.Observable<string | null>;
selectedTShirtView: ko.Observable<TShirtView>;
cartTShirts: ko.ObservableArray<TShirtCartItem>;
cartBadgeSetAmount: ko.Observable<number | string>;
badgeSetAmountOptions: ko.PureComputed<(number | string)[]>;
isCartEmpty: ko.PureComputed<boolean>;
subtotalPrice: ko.PureComputed<number>;
shippingPrice: ko.Observable<number>;
shippingPriceXhr: ko.Observable<JQueryXHR | null>;
shippingPriceLoading: ko.PureComputed<boolean>;
totalPrice: ko.PureComputed<number>;
billingName: ko.Observable<string>;
billingSurname: ko.Observable<string>;
billingCompany: ko.Observable<string>;
billingStreet: ko.Observable<string>;
billingHouseNumber: ko.Observable<string>;
billingCity: ko.Observable<string>;
billingZip: ko.Observable<string>;
billingCountry: ko.Observable<string>;
billingRegistrationNumber: ko.Observable<string>;
billingVatId: ko.Observable<string>;
deliveryAddressSameAsBillingAddress: ko.Observable<boolean>;
deliveryName: ko.Observable<string>;
deliverySurname: ko.Observable<string>;
deliveryCompany: ko.Observable<string>;
deliveryStreet: ko.Observable<string>;
deliveryHouseNumber: ko.Observable<string>;
deliveryCity: ko.Observable<string>;
deliveryZip: ko.Observable<string>;
deliveryCountry: ko.Observable<string>;
email: ko.Observable<string>;
phonePrefix: ko.Observable<string>;
phoneNumber: ko.Observable<string>;
isBillingCountryInSepa: ko.PureComputed<boolean>;
selectedPaymentMethod: ko.Observable<PaymentMethod>;
stripeLoading: ko.Observable<boolean>;
stripePromise: Promise<Stripe | null> | null;
cardElement: StripeCardElement | null;
registrationNumberLabel: ko.PureComputed<'IČO' | 'Registration number'>;
vatIdLabel: ko.PureComputed<'DIČ' | 'IČ DPH' | 'VAT ID'>;
isConfirmingOrder: ko.Observable<boolean>;
confirmOrderErrors: ko.ObservableArray<string>;
agreeToPrivacyPolicy: ko.Observable<boolean>;
agreeToTerms: ko.Observable<boolean>;
successfulOrder: ko.Observable<boolean>;
constructor() {
countries.sort((a: Country, b: Country) => {
return a.country_name.localeCompare(b.country_name);
});
this.countries = countries;
this.tShirtTypes = [
{id: 'be46f470-235d-4522-87fa-6df97da342f7', htmlClass: 'bg-blue-800', name: 'Blue T-Shirt (straight cut)', price: 25, style: 'Straight', sizes: ['S', 'M', 'L', 'XL', 'XXL', '3XL']},
{id: 'e0582148-4155-4a70-bfb2-bc87c40c63f3', htmlClass: 'bg-white', name: 'White T-Shirt (straight cut)', price: 30, style: 'Straight', sizes: ['S', 'M', 'L', 'XL', 'XXL', '3XL']},
{id: '196a52d2-d31a-44a7-8b43-9bba5743315a', htmlClass: 'bg-blue-800', name: 'Blue T-Shirt (fitted cut)', price: 25, style: 'Fitted', sizes: ['XS', 'S', 'M', 'L', 'XL']},
{id: '86a2bb63-c8cd-4862-ad7a-5e5f389795d6', htmlClass: 'bg-white', name: 'White T-Shirt (fitted cut)', price: 30, style: 'Fitted', sizes: ['XS', 'S', 'M', 'L', 'XL', 'XXL']},
];
this.colors = ['bg-blue-800', 'bg-white'];
this.styles = this.tShirtTypes.map((type) => {
return type.style;
});
this.selectedTShirtColor = ko.observable('bg-blue-800');
this.selectedTShirtStyle = ko.observable('Straight');
this.selectedTShirtSize = ko.observable(null);
this.selectedTShirtType = ko.pureComputed(() => {
const color = this.selectedTShirtColor();
const style = this.selectedTShirtStyle();
for (const type of this.tShirtTypes) {
if (type.htmlClass === color && type.style === style) {
return type;
}
}
throw new Error('Undefined t-shirt');
});
this.sizes = ko.pureComputed(() => {
const type = this.selectedTShirtType();
return type.sizes;
});
this.sizes.subscribe((sizes) => {
const selected = this.selectedTShirtSize();
if (selected === null) {
return;
}
if (sizes.indexOf(selected) === -1) {
this.selectedTShirtSize(null);
}
});
this.tShirtErrorMessage = ko.observable(null);
this.tShirtSuccessMessage = ko.observable(null);
this.badgeSetSuccessMessage = ko.observable(null);
this.selectedTShirtView = ko.observable('front');
this.cartTShirts = ko.observableArray();
this.cartTShirts.subscribe(() => {
this.updateShippingPrice();
this.updateLocalStorage();
this.getStripe();
});
this.cartBadgeSetAmount = ko.observable(0);
this.cartBadgeSetAmount.subscribe((value) => {
if (typeof value === 'string') {
const newAmount = window.prompt('Please enter the number of badge sets:');
if (newAmount === null || newAmount === '') {
this.cartBadgeSetAmount(1);
return;
}
const parsedAmount = parseInt(newAmount, 10);
if (typeof parsedAmount !== 'number' || parsedAmount < 1 || isNaN(parsedAmount)) {
this.cartBadgeSetAmount(1);
return;
}
this.cartBadgeSetAmount(parsedAmount);
}
this.updateShippingPrice();
this.updateLocalStorage();
this.getStripe();
});
this.badgeSetAmountOptions = ko.pureComputed(() => {
const currentAmount = this.cartBadgeSetAmount();
let maxAmount = 10;
if (typeof currentAmount === 'number') {
maxAmount = Math.max(currentAmount, 10);
}
const options = [];
for (let i = 1; i <= maxAmount; i++) {
options.push(i);
}
options.push('More…');
return options;
});
this.isCartEmpty = ko.pureComputed(() => {
return this.cartTShirts().length === 0 && this.cartBadgeSetAmount() === 0;
});
this.subtotalPrice = ko.pureComputed(() => {
let price = 0;
for (const item of this.cartTShirts()) {
const itemAmount = item.amount();
if (typeof itemAmount !== 'number') {
continue;
}
price += itemAmount * item.tShirtType.price;
}
const badgeSetAmount = this.cartBadgeSetAmount();
if (typeof badgeSetAmount === 'number') {
price += badgeSetAmount * 9.0;
}
return Math.round((price + Number.EPSILON) * 100) / 100;
});
this.shippingPrice = ko.observable(0.0);
this.shippingPriceXhr = ko.observable(null);
this.shippingPriceLoading = ko.pureComputed(() => {
return this.shippingPriceXhr() !== null;
});
this.totalPrice = ko.pureComputed(() => {
return Math.round((this.subtotalPrice() + this.shippingPrice() + Number.EPSILON) * 100) / 100;
});
this.billingName = ko.observable('');
this.billingName.subscribe(() => {
this.updateLocalStorage();
});
this.billingSurname = ko.observable('');
this.billingSurname.subscribe(() => {
this.updateLocalStorage();
});
this.billingCompany = ko.observable('');
this.billingCompany.subscribe(() => {
this.updateLocalStorage();
});
this.billingStreet = ko.observable('');
this.billingStreet.subscribe(() => {
this.updateLocalStorage();
});
this.billingHouseNumber = ko.observable('');
this.billingHouseNumber.subscribe(() => {
this.updateLocalStorage();
});
this.billingCity = ko.observable('');
this.billingCity.subscribe(() => {
this.updateLocalStorage();
});
this.billingZip = ko.observable('');
this.billingZip.subscribe(() => {
this.updateLocalStorage();
});
this.billingCountry = ko.observable('');
this.billingCountry.subscribe((value) => {
this.updateLocalStorage();
if (this.deliveryAddressSameAsBillingAddress()) {
this.deliveryCountry(this.billingCountry());
}
if (this.phoneNumber() === '') {
this.phonePrefix(value);
}
if (!this.isBillingCountryInSepa()) {
this.selectedPaymentMethod('cc');
}
});
this.billingRegistrationNumber = ko.observable('');
this.billingRegistrationNumber.subscribe(() => {
this.updateLocalStorage();
});
this.billingVatId = ko.observable('');
this.billingVatId.subscribe(() => {
this.updateLocalStorage();
});
this.deliveryAddressSameAsBillingAddress = ko.observable(true);
this.deliveryAddressSameAsBillingAddress.subscribe((value) => {
this.updateLocalStorage();
if (!value) {
return;
}
this.deliveryName('');
this.deliverySurname('');
this.deliveryCompany('');
this.deliveryStreet('');
this.deliveryHouseNumber('');
this.deliveryCity('');
this.deliveryZip('');
this.deliveryCountry(this.billingCountry());
});
this.deliveryName = ko.observable('');
this.deliveryName.subscribe(() => {
this.updateLocalStorage();
});
this.deliverySurname = ko.observable('');
this.deliverySurname.subscribe(() => {
this.updateLocalStorage();
});
this.deliveryCompany = ko.observable('');
this.deliveryCompany.subscribe(() => {
this.updateLocalStorage();
});
this.deliveryStreet = ko.observable('');
this.deliveryStreet.subscribe(() => {
this.updateLocalStorage();
});
this.deliveryHouseNumber = ko.observable('');
this.deliveryHouseNumber.subscribe(() => {
this.updateLocalStorage();
});
this.deliveryCity = ko.observable('');
this.deliveryCity.subscribe(() => {
this.updateLocalStorage();
});
this.deliveryZip = ko.observable('');
this.deliveryZip.subscribe(() => {
this.updateLocalStorage();
});
this.deliveryCountry = ko.observable('');
this.deliveryCountry.subscribe(() => {
this.updateLocalStorage();
this.updateShippingPrice();
});
this.email = ko.observable('');
this.email.subscribe(() => {
this.updateLocalStorage();
});
this.phonePrefix = ko.observable('');
this.phonePrefix.subscribe(() => {
this.updateLocalStorage();
});
this.phoneNumber = ko.observable('');
this.phoneNumber.subscribe(() => {
this.updateLocalStorage();
});
this.isBillingCountryInSepa = ko.pureComputed(() => {
const euCountries = ['AT', 'BE', 'BG', 'CY', 'CZ', 'DE', 'DK', 'EE', 'ES', 'FI', 'FR', 'GR', 'HU', 'HR', 'IE', 'IT', 'LT', 'LU', 'LV', 'MT', 'NL', 'PL', 'PT', 'RO', 'SE', 'SI', 'SK'];
euCountries.push('NO'); // Norway
euCountries.push('LI'); // Liechtenstein
euCountries.push('IS'); // Iceland
euCountries.push('CH'); // Switzerland
euCountries.push('MC'); // Monaco
euCountries.push('GB'); // United Kingdom
euCountries.push('AD'); // Andorra
euCountries.push('VA'); // Vatican
euCountries.push('SM'); // San Marino
euCountries.push('GI'); // Gibraltar
euCountries.push('GG'); // Guernsey
euCountries.push('JE'); // Jersey
euCountries.push('IM'); // Isle of Man
return euCountries.indexOf(this.billingCountry()) !== -1;
});
this.selectedPaymentMethod = ko.observable('cc');
this.selectedPaymentMethod.subscribe(() => {
this.updateLocalStorage();
this.confirmOrderErrors([]);
});
this.stripeLoading = ko.observable(true);
this.stripePromise = null;
this.cardElement = null;
this.registrationNumberLabel = ko.pureComputed(() => {
const country = this.billingCountry();
if (country === 'CZ' || country === 'SK') {
return 'IČO';
}
return 'Registration number';
});
this.vatIdLabel = ko.pureComputed(() => {
const country = this.billingCountry();
if (country === 'CZ') {
return 'DIČ';
}
if (country === 'SK') {
return 'IČ DPH';
}
return 'VAT ID';
});
this.isConfirmingOrder = ko.observable(false);
this.confirmOrderErrors = ko.observableArray();
this.agreeToPrivacyPolicy = ko.observable(false);
this.agreeToPrivacyPolicy.subscribe(() => {
this.updateLocalStorage();
});
this.agreeToTerms = ko.observable(false);
this.agreeToTerms.subscribe(() => {
this.updateLocalStorage();
});
this.successfulOrder = ko.observable(false);
this.restoreLocalStorage();
}
selectTShirtColor(color: TShirtColor): void {
this.selectedTShirtColor(color);
this.selectedTShirtView('front');
}
selectTShirtStyle(style: TShirtStyle): void {
this.selectedTShirtStyle(style);
this.selectedTShirtView('front');
}
selectTShirtSize(size: TShirtSize): void {
this.tShirtErrorMessage(null);
this.selectedTShirtSize(size);
};
addTShirtToCart(): void {
const selectedTShirtSize = this.selectedTShirtSize();
if (selectedTShirtSize === null) {
this.tShirtErrorMessage('Please select your T-shirt size first.');
this.tShirtSuccessMessage(null);
return;
}
let existingItem = null;
for (const item of this.cartTShirts()) {
if (
item.tShirtType.id === this.selectedTShirtType().id
&& item.size === selectedTShirtSize
) {
existingItem = item;
break;
}
}
this.tShirtSuccessMessage('T-shirt added! Scroll to bottom to finish the order.');
this.tShirtErrorMessage(null);
if (existingItem !== null) {
existingItem.increaseAmount();
return;
}
const newItem = new TShirtCartItem(
this.selectedTShirtType(),
selectedTShirtSize,
1,
this,
);
this.cartTShirts.push(newItem);
};
addBadgeSetToCart(): void {
const currentAmount = this.cartBadgeSetAmount();
if (typeof currentAmount === 'string') {
return;
}
this.cartBadgeSetAmount(currentAmount + 1);
this.badgeSetSuccessMessage('Badges added! Scroll to bottom to finish the order.');
}
removeBadgeSetsFromCart(): void {
this.badgeSetSuccessMessage(null);
this.cartBadgeSetAmount(0);
}
removeTShirtFromCart(index: number): void {
this.cartTShirts.splice(index, 1);
this.tShirtSuccessMessage(null);
}
updateShippingPrice(): void {
const oldXhr = this.shippingPriceXhr();
if (oldXhr !== null) {
oldXhr.abort();
}
if (this.cartTShirts().length === 0 && this.cartBadgeSetAmount() === 0) {
this.shippingPriceXhr(null);
this.shippingPrice(0.0);
return;
}
const badgeSetAmount = this.cartBadgeSetAmount();
const xhr = $.ajax({
type: 'POST',
url: 'https://merch-api.phpstan.org/shipping-price',
contentType: 'application/json; charset=utf-8',
dataType: 'json',
data: JSON.stringify({
country: this.deliveryCountry(),
items: this.cartTShirts().map((item) => {
const itemAmount = item.amount();
return {
id: item.tShirtType.id,
size: item.size,
amount: typeof itemAmount === 'number' ? itemAmount : 10,
};
}),
badge_set_amount: typeof badgeSetAmount === 'number' ? badgeSetAmount : 10,
}),
});
this.shippingPriceXhr(xhr);
xhr.done((result) => {
this.shippingPrice(Math.round((result.price + Number.EPSILON) * 100) / 100);
this.shippingPriceXhr(null);
});
xhr.fail((reason) => {
if (reason.statusText === 'abort') {
return;
}
this.confirmOrderErrors(['Error occured while fetching the shipping price.']);
});
}
updateLocalStorage(): void {
try {
const badgeSetAmount = this.cartBadgeSetAmount();
const json = {
items: this.cartTShirts().map((item) => {
const itemAmount = item.amount();
return {
id: item.tShirtType.id,
amount: typeof itemAmount === 'number' ? itemAmount : 10,
size: item.size,
};
}),
badgeSetAmount: typeof badgeSetAmount === 'number' ? badgeSetAmount : 10,
billing: {
name: this.billingName(),
surname: this.billingSurname(),
company: this.billingCompany(),
street: this.billingStreet(),
houseNumber: this.billingHouseNumber(),
city: this.billingCity(),
zip: this.billingZip(),
country: this.billingCountry(),
registrationNumber: this.billingRegistrationNumber(),
vatId: this.billingVatId(),
},
deliveryAddressSameAsBillingAddress: this.deliveryAddressSameAsBillingAddress(),
delivery: {
name: this.deliveryName(),
surname: this.deliverySurname(),
company: this.deliveryCompany(),
street: this.deliveryStreet(),
houseNumber: this.deliveryHouseNumber(),
city: this.deliveryCity(),
zip: this.deliveryZip(),
country: this.deliveryCountry(),
},
email: this.email(),
phonePrefix: this.phonePrefix(),
phoneNumber: this.phoneNumber(),
paymentMethod: this.selectedPaymentMethod(),
agreeToPrivacyPolicy: this.agreeToPrivacyPolicy(),
agreeToTerms: this.agreeToTerms(),
};
window.localStorage.setItem('phpstan-merch', JSON.stringify(json));
} catch (e) {
// pass
}
}
restoreLocalStorage(): void {
try {
const jsonString = window.localStorage.getItem('phpstan-merch');
if (jsonString === null) {
$.ajax({
type: 'POST',
url: 'https://merch-api.phpstan.org/user-country',
contentType: 'application/json; charset=utf-8',
dataType: 'json',
}).done((result) => {
for (const country of this.countries) {
if (result.country !== country.country_code) {
continue;
}
this.billingCountry(result.country);
return;
}
});
return;
}
const findTypeById = (id: string): TShirtType => {
for (const type of this.tShirtTypes) {
if (type.id === id) {
return type;
}
}
throw new Error('Undefined t-shirt');
}
const json = JSON.parse(jsonString);
const items = json.items;
const cartItems: TShirtCartItem[] = [];
for (const item of items) {
const type = findTypeById(item.id);
cartItems.push(new TShirtCartItem(type, item.size, item.amount, this));
}
this.cartTShirts(cartItems);
this.cartBadgeSetAmount(json.badgeSetAmount);
this.deliveryAddressSameAsBillingAddress(json.deliveryAddressSameAsBillingAddress);
this.billingName(json.billing.name);
this.billingSurname(json.billing.surname);
this.billingCompany(json.billing.company);
this.billingStreet(json.billing.street);
this.billingHouseNumber(json.billing.houseNumber);
this.billingCity(json.billing.city);
this.billingZip(json.billing.zip);
this.billingCountry(json.billing.country);
this.billingRegistrationNumber(json.billing.registrationNumber);
this.billingVatId(json.billing.vatId);
this.deliveryName(json.delivery.name);
this.deliverySurname(json.delivery.surname);
this.deliveryCompany(json.delivery.company);
this.deliveryStreet(json.delivery.street);
this.deliveryHouseNumber(json.delivery.houseNumber);
this.deliveryCity(json.delivery.city);
this.deliveryZip(json.delivery.zip);
this.deliveryCountry(json.delivery.country);
this.email(json.email);
this.phonePrefix(json.phonePrefix);
this.phoneNumber(json.phoneNumber);
this.selectedPaymentMethod(json.paymentMethod);
this.agreeToPrivacyPolicy(json.agreeToPrivacyPolicy);
this.agreeToTerms(json.agreeToTerms);
} catch (e) {
// pass
}
}
switchToBack(): void {
this.selectedTShirtView('back');
}
switchToFront(): void {
this.selectedTShirtView('front');
}
phonePrefixOptionText(value: Country): string {
if (value.country_code === this.phonePrefix()) {
return '+' + value.phone_code;
}
return value.country_name + ' (+' + value.phone_code + ')';
}
getStripe(): Promise<Stripe | null> {
if (this.stripePromise === null) {
return this.stripePromise = loadStripe('pk_live_51HKgooEsLWYRGjLPxKwP8MAV1zO9d1FqMjINH4m3G1DDhIhZbVbE0T1gpDI3yUUnf618OUjbTCLZwBnQUyKTav7M00SE7777dg').then((stripe) => {
if (stripe === null) {
this.stripeLoading(false);
return null;
}
const style = {
base: {
color: '#32325d',
fontFamily: 'Arial, sans-serif',
fontSmoothing: 'antialiased',
fontSize: '16px',
'::placeholder': {
color: '#32325d'
}
},
invalid: {
fontFamily: 'Arial, sans-serif',
color: '#fa755a',
iconColor: '#fa755a'
}
};
const elements = stripe.elements({locale: 'en-GB'});
const card = elements.create('card', { style });
card.mount('#card-element');
/* card.on("change", function (event) {
// Disable the Pay button if there are no card details in the Element
document.querySelector("button").disabled = event.empty;
document.querySelector("#card-error").textContent = event.error ? event.error.message : "";
}); */
this.cardElement = card;
this.stripeLoading(false);
return stripe;
});
}
return this.stripePromise;
}
getDataPayload(): any {
let deliveryName = this.deliveryName();
let deliverySurname = this.deliverySurname();
let deliveryCompany = this.deliveryCompany() !== '' ? this.deliveryCompany() : null;
let deliveryStreet = this.deliveryStreet();
let deliveryStreetNumber = this.deliveryHouseNumber();
let deliveryCity = this.deliveryCity();
let deliveryZip = this.deliveryZip();
let deliveryCountry = this.deliveryCountry();
if (this.deliveryAddressSameAsBillingAddress()) {
deliveryName = this.billingName();
deliverySurname = this.billingSurname();
deliveryCompany = this.billingCompany() !== '' ? this.billingCompany() : null;
deliveryStreet = this.billingStreet();
deliveryStreetNumber = this.billingHouseNumber();
deliveryCity = this.billingCity();
deliveryZip = this.billingZip();
deliveryCountry = this.billingCountry();
}
const phoneCountry = this.phonePrefix();
let phonePrefix = null;
for (const country of this.countries) {
if (phoneCountry === country.country_code) {
phonePrefix = country.phone_code;
break;
}
}
if (phonePrefix === null) {
throw new Error('Undefined phone prefix');
}
return {
email: this.email(),
billing_name: this.billingName(),
billing_surname: this.billingSurname(),
billing_company: this.billingCompany() !== '' ? this.billingCompany() : null,
billing_street: this.billingStreet(),
billing_street_number: this.billingHouseNumber(),
billing_city: this.billingCity(),
billing_zip: this.billingZip(),
billing_country: this.billingCountry(),
billing_registration_number: this.billingRegistrationNumber() !== '' ? this.billingRegistrationNumber() : null,
billing_vat_id: this.billingVatId() !== '' ? this.billingVatId() : null,
delivery_name: deliveryName,
delivery_surname: deliverySurname,
delivery_company: deliveryCompany,
delivery_street: deliveryStreet,
delivery_street_number: deliveryStreetNumber,
delivery_city: deliveryCity,
delivery_zip: deliveryZip,
delivery_country: deliveryCountry,
delivery_phone: '+' + phonePrefix + this.phoneNumber(),
items: this.cartTShirts().map((item) => {
return {
id: item.tShirtType.id,
size: item.size,
amount: item.amount(),
};
}),
badge_set_amount: this.cartBadgeSetAmount(),
total_price: this.totalPrice(),
};
}
confirmOrder(): void {
if (this.selectedPaymentMethod() === 'cc') {
this.confirmCreditCardOrder();
return;
}
this.confirmSepaOrder();
}
async confirmCreditCardOrder(): Promise<void> {
if (this.shippingPriceLoading()) {
return;
}
const stripe = await this.getStripe();
if (stripe === null) {
return;
}
if (this.cardElement === null) {
return;
}
this.confirmOrderErrors([]);
if (!this.validateForm()) {
return;
}
const card = this.cardElement;
this.isConfirmingOrder(true);
$.ajax({
type: 'POST',
url: 'https://merch-api.phpstan.org/create-payment-intent',
contentType: 'application/json; charset=utf-8',
dataType: 'json',
data: JSON.stringify(this.getDataPayload()),
}).done((paymentIntentResult) => {
stripe.confirmCardPayment(paymentIntentResult.clientSecret, {
payment_method: {
card,
}
}).then((confirmResult) => {
this.isConfirmingOrder(false);
if (confirmResult.error) {
if (typeof confirmResult.error.message !== 'undefined' && confirmResult.error.message !== null) {
this.confirmOrderErrors([confirmResult.error.message]);
return;
}
this.confirmOrderErrors(['Error occured while finishing the order.']);
return;
}
this.markOrderAsSuccessful();
});
}).fail((response) => {
this.isConfirmingOrder(false);
if (typeof response.responseJSON !== 'undefined') {
this.confirmOrderErrors(response.responseJSON.errors);
return;
}
this.confirmOrderErrors(['Error occured while finishing the order.']);
});
}
confirmSepaOrder(): void {
if (this.shippingPriceLoading()) {
return;
}
this.confirmOrderErrors([]);
if (!this.validateForm()) {
return;
}
this.isConfirmingOrder(true);
$.ajax({
type: 'POST',
url: 'https://merch-api.phpstan.org/sepa',
contentType: 'application/json; charset=utf-8',
dataType: 'json',
data: JSON.stringify(this.getDataPayload()),
}).done((result) => {
this.markOrderAsSuccessful();
}).fail((response) => {
if (typeof response.responseJSON !== 'undefined') {
this.confirmOrderErrors(response.responseJSON.errors);
return;
}
this.confirmOrderErrors(['Error occured while finishing the order.']);
}).always(() => {
this.isConfirmingOrder(false);
});
}
markOrderAsSuccessful(): void {
this.successfulOrder(true);
const anyWindow = (window as any);
if (typeof anyWindow.fathom !== 'undefined') {
anyWindow.fathom.trackGoal('DPFY11RI', this.totalPrice() * 100);
}
try {
window.localStorage.removeItem('phpstan-merch');
} catch (e) {
// pass
}
}
validateForm(): boolean {
const errors = [];
if (this.billingName().trim().length === 0) {
errors.push('Please fill in your first name.');
}
if (this.billingSurname().trim().length === 0) {
errors.push('Please fill in your last name.');
}
if (this.billingStreet().trim().length === 0) {
errors.push('Please fill in your street.');
}
if (this.billingHouseNumber().trim().length === 0) {
errors.push('Please fill in your house number.');
}
if (this.billingCity().trim().length === 0) {
errors.push('Please fill in your city.');
}
if (this.billingZip().trim().length === 0) {
errors.push('Please fill in postal code.');
}
if (!this.deliveryAddressSameAsBillingAddress()) {
if (this.deliveryName().trim().length === 0) {
errors.push('Please fill in your first name.');
}
if (this.deliverySurname().trim().length === 0) {
errors.push('Please fill in your last name.');
}
if (this.deliveryStreet().trim().length === 0) {
errors.push('Please fill in your street.');
}
if (this.deliveryHouseNumber().trim().length === 0) {
errors.push('Please fill in your house number.');
}
if (this.deliveryCity().trim().length === 0) {
errors.push('Please fill in your city.');
}
if (this.deliveryZip().trim().length === 0) {
errors.push('Please fill in postal code.');
}
}
if (this.email().trim().length === 0) {
errors.push('Please fill in your email address.');
}
if (this.phoneNumber().trim().length === 0) {
errors.push('Please fill in your phone number.');
}
if (!this.agreeToPrivacyPolicy()) {
errors.push('Agreement to Privacy Policy is required.');
}
if (!this.agreeToTerms()) {
errors.push('Agreement to Terms & Conditions is required.');
}
for (const item of this.cartTShirts()) {
const amount = item.amount();
if (typeof amount !== 'number') {
errors.push('An item has an invalid amount.');
}
}
this.confirmOrderErrors(errors);
return errors.length === 0;
}
}
| MerchSaleViewModel |
script.js | //Global variables
var timerEl = document.getElementById('countdown'); //grabs timer from index, location on page for timer
var welcomePage = document.getElementById('welcome-page'); //Welcome Page
var quizDiv1 = document.getElementById('quiz1'); //Question 1 Container
var quizDiv2 = document.getElementById('quiz2'); //Question 2 Container
var quizDiv3 = document.getElementById('quiz3'); //Question 3 Container
var quizDiv4 = document.getElementById('quiz4'); //Question 4 Container
var userVerdict = document.getElementById('verdict');
var finalPage = document.getElementById('final-page');
var scorePage = document.getElementById('score-page');
var highScores = document.getElementById("highScores");
var displayScore = document.getElementById("final-score");
var hsLink = document.getElementById("view-hs");
// grabbing buttons
var startBtn = document.getElementById('begin-btn');
var saveBtn = document.getElementById('save-btn');
var clearBtn = document.getElementById('clear-btn');
var repeatBtn = document.getElementById('repeat-btn');
var btnA = document.getElementById('btn-a');
var btnB = document.getElementById('btn-b');
var btnC = document.getElementById('btn-c');
var btnD = document.getElementById('btn-d');
var btnA2 = document.getElementById('btn-a2');
var btnB2 = document.getElementById('btn-b2');
var btnC2 = document.getElementById('btn-c2');
var btnD2 = document.getElementById('btn-d2');
var btnA3 = document.getElementById('btn-a3');
var btnB3 = document.getElementById('btn-b3');
var btnC3 = document.getElementById('btn-c3');
var btnD3 = document.getElementById('btn-d3');
var btnA4 = document.getElementById('btn-a4');
var btnB4 = document.getElementById('btn-b4');
var btnC4 = document.getElementById('btn-c4');
var btnD4 = document.getElementById('btn-d4');
var c = 40; //previously was timeLeft, changed to c to match w3 schools example (bless you, w3 schools ppl)
var t;
var timer_is_on = 0;
var userScore;
let isFinalP = false;
var timeDeduction = function(c){
if(c >= 10){
c-10;
} else {
c=0;
}
};
function timedCount(){
timerEl.textContent = "Time: " + c;
c = c-1;
t = setTimeout(timedCount, 1000); //i need a base case
if(c <= 0){
c = 0;
timerEl.textContent = "Time: " + c;
window.setTimeout(closeResult, 2000);
quizDiv1.setAttribute("style", "display: none;");
quizDiv2.setAttribute("style", "display: none;");
quizDiv3.setAttribute("style", "display: none;");
quizDiv4.setAttribute("style", "display: none;");
finalParaOnce();
};
};
function finalPageVisibility(){
finalPage.setAttribute("style", "visibility: visible;");
isFinalP = true;
};
function finalParaOnce(){
if (!isFinalP) finalPageVisibility();
};
function pauseTimer(){
clearTimeout(t);
timer_is_on = 0;
};
function wrongAnswer(){
c = c-10;
if(c <= 0){
c = 0;
timerEl.textContent = "Time: " + c;
};
pauseTimer(c);
timerEl.textContent = "Time: " + c; //changing display to reflect new counter after Q4 deducts point value
window.setTimeout(closeResult, 2000 );
localStorage.setItem("Final Score: ", c)
};
// make result text disappear
function closeResult(){
userVerdict.style.display=" none";
};
// Beginning main function that contains timer and quiz questions
var beginQuiz = function countdown() {
//Begin Timer on click event
if(!timer_is_on){
timer_is_on = 1;
timedCount();
};
// Welcome Page disappears on click (click event below)
function changeVisibility1(){
welcomePage.setAttribute("style", "display: none; ");
quizDiv1.setAttribute("style", "visibility: visible;");
};
changeVisibility1(); // hiding welcome page and shifting to questions
// now can i pull data answer so that if they click the button and it's true then x else if not y
};
//Setting all question containers to display none on main page, only display as click is initiated
quizDiv1.setAttribute("style", "display: none;");
quizDiv2.setAttribute("style", "display: none;");
quizDiv3.setAttribute("style", "display: none;");
quizDiv4.setAttribute("style", "display: none;");
scorePage.setAttribute("style", "display: none;");
finalPage.setAttribute("style", "display: none;");
//Q1
var choiceA = function(){
if(btnA.dataset.answer === "true"){
console.log("YAY");
userVerdict.innerText = "Correct!";
quizDiv2.setAttribute("style", "visibility: visible;");
quizDiv1.setAttribute("style", "display: none;");
} else {
console.log("BOO");
userVerdict.innerText = "Incorrect!";
quizDiv2.setAttribute("style", "visibility: visible;");
quizDiv1.setAttribute("style", "display: none;");
c=c-10;
}
};
var choiceB = function(){
if(btnB.dataset.answer === "true"){
console.log("YAY");
userVerdict.innerText = "Correct!";
quizDiv2.setAttribute("style", "visibility: visible;");
quizDiv1.setAttribute("style", "display: none;");
} else {
console.log("BOO");
userVerdict.innerText = "Incorrect!";
quizDiv2.setAttribute("style", "visibility: visible;");
quizDiv1.setAttribute("style", "display: none;");
c=c-10;
}
};
var choiceC = function(){
if(btnC.dataset.answer === "true"){
console.log("YAY"); | } else {
console.log("BOO");
userVerdict.innerText = "Incorrect!";
quizDiv2.setAttribute("style", "visibility: visible;");
quizDiv1.setAttribute("style", "display: none;");
c=c-10;
}
};
var choiceD = function(){
if(btnD.dataset.answer === "true"){
console.log("YAY");
userVerdict.innerText = "Correct!";
quizDiv2.setAttribute("style", "visibility: visible;");
quizDiv1.setAttribute("style", "display: none;");
} else {
console.log("BOO");
userVerdict.innerText = "Incorrect!";
quizDiv2.setAttribute("style", "visibility: visible;");
quizDiv1.setAttribute("style", "display: none;");
c=c-10;
}
};
//Q2
var choiceA2 = function(){
if(btnA2.dataset.answer === "true"){
console.log("YAY");
userVerdict.innerText = "Correct!";
quizDiv3.setAttribute("style", "visibility: visible;");
quizDiv2.setAttribute("style", "display: none;");
} else {
console.log("BOO");
userVerdict.innerText = "Incorrect!";
quizDiv3.setAttribute("style", "visibility: visible;");
quizDiv2.setAttribute("style", "display: none;");
c=c-10;
}
};
var choiceB2 = function(){
if(btnB2.dataset.answer === "true"){
console.log("YAY");
userVerdict.innerText = "Correct!";
quizDiv3.setAttribute("style", "visibility: visible;");
quizDiv2.setAttribute("style", "display: none;");
} else {
console.log("BOO");
userVerdict.innerText = "Incorrect!";
quizDiv3.setAttribute("style", "visibility: visible;");
quizDiv2.setAttribute("style", "display: none;");
c=c-10;
}
};
var choiceC2 = function(){
if(btnC2.dataset.answer === "true"){
console.log("YAY");
userVerdict.innerText = "Correct!";
quizDiv3.setAttribute("style", "visibility: visible;");
quizDiv2.setAttribute("style", "display: none;");
} else {
console.log("BOO");
userVerdict.innerText = "Incorrect!";
quizDiv3.setAttribute("style", "visibility: visible;");
quizDiv2.setAttribute("style", "display: none;");
c=c-10;
}
};
var choiceD2 = function(){
if(btnD2.dataset.answer === "true"){
console.log("YAY");
userVerdict.innerText = "Correct!";
quizDiv3.setAttribute("style", "visibility: visible;");
quizDiv2.setAttribute("style", "display: none;");
} else {
console.log("BOO");
userVerdict.innerText = "Incorrect!";
quizDiv3.setAttribute("style", "visibility: visible;");
quizDiv2.setAttribute("style", "display: none;");
c=c-10;
}
};
//Q3
var choiceA3 = function(){
if(btnA3.dataset.answer === "true"){
console.log("YAY");
userVerdict.innerText = "Correct!";
quizDiv4.setAttribute("style", "visibility: visible;");
quizDiv3.setAttribute("style", "display: none;");
} else {
console.log("BOO");
userVerdict.innerText = "Incorrect!";
quizDiv4.setAttribute("style", "visibility: visible;");
quizDiv3.setAttribute("style", "display: none;");
c=c-10;
}
};
var choiceB3 = function(){
if(btnB3.dataset.answer === "true"){
console.log("YAY");
userVerdict.innerText = "Correct!";
quizDiv4.setAttribute("style", "visibility: visible;");
quizDiv3.setAttribute("style", "display: none;");
} else {
console.log("BOO");
userVerdict.innerText = "Incorrect!";
quizDiv4.setAttribute("style", "visibility: visible;");
quizDiv3.setAttribute("style", "display: none;");
c=c-10;
}
};
var choiceC3 = function(){
if(btnC3.dataset.answer === "true"){
console.log("YAY");
userVerdict.innerText = "Correct!";
quizDiv4.setAttribute("style", "visibility: visible;");
quizDiv3.setAttribute("style", "display: none;");
} else {
console.log("BOO");
userVerdict.innerText = "Incorrect!";
quizDiv4.setAttribute("style", "visibility: visible;");
quizDiv3.setAttribute("style", "display: none;");
c=c-10;
}
};
var choiceD3 = function(){
if(btnD3.dataset.answer === "true"){
console.log("YAY");
userVerdict.innerText = "Correct!";
quizDiv4.setAttribute("style", "visibility: visible;");
quizDiv3.setAttribute("style", "display: none;");
} else {
console.log("BOO");
userVerdict.innerText = "Incorrect!";
quizDiv4.setAttribute("style", "visibility: visible;");
quizDiv3.setAttribute("style", "display: none;");
c=c-10;
}
};
//Q4
var choiceA4 = function(){
if(btnA4.dataset.answer === "true"){
console.log("YAY");
userVerdict.innerText = "Correct!";
displayScore.innerText = "Your Score: " + c;
finalPage.setAttribute("style", "display: inline;");
quizDiv4.setAttribute("style", "display: none;");
pauseTimer();
window.setTimeout(closeResult, 2000 );
} else {
console.log("BOO");
userVerdict.innerText = "Incorrect!";
timeDeduction;
if (c<10){
c=10;
};
displayScore.innerText = "Your Score: " + (c-10);
wrongAnswer();
finalPage.setAttribute("style", "visibility: visible;");
quizDiv4.setAttribute("style", "display: none;");
}
};
var choiceB4 = function(){
if(btnB4.dataset.answer === "true"){
console.log("YAY");
userVerdict.innerText = "Correct!";
displayScore.innerText = "Your Score: " + c;
finalPage.setAttribute("style", "visibility: visible;");
quizDiv4.setAttribute("style", "display: none;");
pauseTimer();
window.setTimeout(closeResult, 2000 );
} else {
console.log("BOO");
userVerdict.innerText = "Incorrect!";
timeDeduction;
if (c<10){
c=10;
};
displayScore.innerText = "Your Score: " + (c-10);
wrongAnswer();
finalPage.setAttribute("style", "visibility: visible;");
quizDiv4.setAttribute("style", "display: none;");
}
};
var choiceC4 = function(){
if(btnC4.dataset.answer === "true"){
console.log("YAY");
userVerdict.innerText = "Correct!";
displayScore.innerText = "Your Score: " + (c-10);
finalPage.setAttribute("style", "visibility: visible;");
quizDiv4.setAttribute("style", "display: none;");
pauseTimer();
window.setTimeout(closeResult, 2000 );
} else {
console.log("BOO");
userVerdict.innerText = "Incorrect!";
timeDeduction;
if (c<10){
c=10;
};
displayScore.innerText = "Your Score: " + (c-10);
wrongAnswer();
finalPage.setAttribute("style", "visibility: visible;");
quizDiv4.setAttribute("style", "display: none;");
}
};
var choiceD4 = function(){
if(btnD4.dataset.answer === "true"){
console.log("YAY");
userVerdict.innerText = "Correct!";
displayScore.innerText = "Your Score: " + c;
finalPage.setAttribute("style", "visibility: visible;");
quizDiv4.setAttribute("style", "display: none;");
pauseTimer();
window.setTimeout(closeResult, 2000 );
} else {
console.log("BOO");
userVerdict.innerText = "Incorrect!";
timeDeduction;
if (c<10){
c=10;
};
displayScore.innerText = "Your Score: " + (c-10);
wrongAnswer();
finalPage.setAttribute("style", "visibility: visible;");
quizDiv4.setAttribute("style", "display: none;");
}
};
//Final Page
var saveScore = function(){
scorePage.setAttribute("style", "visibility: visible;");
finalPage.setAttribute("style", "display: none;");
userScore = c;
userName = document.getElementById("user-init").value;
const localStorageContent = localStorage.getItem('Initials: ');
var displayScores = function() {
for (var i = 0; i < myScore.length; i++){
var listItem = document.createElement("li");
listItem.textContent = myScore[i].name + ": " + myScore[i].score;
highScores.appendChild(listItem);
}
}
let myScore = JSON.parse(localStorageContent) || [];
myScore.push({name: userName, score: userScore});
displayScores();
// myScore.push({name: userName, score: userScore}); //can only call push of an array, pushes data into an existing array, spec. at the end
localStorage.setItem ("Initials: ", JSON.stringify(myScore));
};
//High Score Page
var playAgain = function(){
location.reload();
};
var eraseScores = function(){
window.localStorage.clear();
};
// View HS page link
hsLink.addEventListener("click", function(){
scorePage.setAttribute("style", "visibility: visible;");
welcomePage.setAttribute("style", "display: none; ");
finalPage.setAttribute("style", "display: none;");
quizDiv1.setAttribute("style", "display: none;");
quizDiv2.setAttribute("style", "display: none;");
quizDiv3.setAttribute("style", "display: none;");
quizDiv4.setAttribute("style", "display: none;");
hsLink.setAttribute("style", "pointer-events: none;");
const localStorageContent = localStorage.getItem('Initials: ');
var displayScores = function() {
for (var i = 0; i < myScore.length; i++){
var listItem = document.createElement("li");
listItem.textContent = myScore[i].name + ": " + myScore[i].score;
highScores.appendChild(listItem);
}
}
let myScore = JSON.parse(localStorageContent) || [];
// myScore.push({name: userName, score: userScore});
displayScores();
localStorage.setItem ("Initials: ", JSON.stringify(myScore));
});
//Quiz/timer starts when start quiz button is clicked
startBtn.addEventListener("click", beginQuiz);
btnA.addEventListener("click", choiceA);
btnB.addEventListener("click", choiceB);
btnC.addEventListener("click", choiceC);
btnD.addEventListener("click", choiceD);
btnA2.addEventListener("click", choiceA2);
btnB2.addEventListener("click", choiceB2);
btnC2.addEventListener("click", choiceC2);
btnD2.addEventListener("click", choiceD2);
btnA3.addEventListener("click", choiceA3);
btnB3.addEventListener("click", choiceB3);
btnC3.addEventListener("click", choiceC3);
btnD3.addEventListener("click", choiceD3);
btnA4.addEventListener("click", choiceA4);
btnB4.addEventListener("click", choiceB4);
btnC4.addEventListener("click", choiceC4);
btnD4.addEventListener("click", choiceD4);
saveBtn.addEventListener("click", saveScore);
repeatBtn.addEventListener("click", playAgain);
clearBtn.addEventListener("click", eraseScores); | userVerdict.innerText = "Correct!";
quizDiv2.setAttribute("style", "visibility: visible;");
quizDiv1.setAttribute("style", "display: none;"); |
float.rs | use crate::array::default_arrays::FromData;
use crate::prelude::*;
use arrow::array::{BooleanArray, PrimitiveArray};
use arrow::bitmap::Bitmap;
use arrow::types::NativeType;
use num::Float;
pub fn is_nan<T>(arr: &PrimitiveArray<T>) -> ArrayRef
where
T: NativeType + Float,
{
let values = Bitmap::from_trusted_len_iter(arr.values().iter().map(|v| v.is_nan()));
Box::new(BooleanArray::from_data_default(
values,
arr.validity().cloned(),
))
}
pub fn is_not_nan<T>(arr: &PrimitiveArray<T>) -> ArrayRef
where
T: NativeType + Float,
{
let values = Bitmap::from_trusted_len_iter(arr.values().iter().map(|v| !v.is_nan()));
Box::new(BooleanArray::from_data_default(
values,
arr.validity().cloned(),
))
}
pub fn is_finite<T>(arr: &PrimitiveArray<T>) -> ArrayRef
where
T: NativeType + Float,
{
let values = Bitmap::from_trusted_len_iter(arr.values().iter().map(|v| v.is_finite()));
Box::new(BooleanArray::from_data_default(
values,
arr.validity().cloned(),
))
}
pub fn is_infinite<T>(arr: &PrimitiveArray<T>) -> ArrayRef
where
T: NativeType + Float,
| {
let values = Bitmap::from_trusted_len_iter(arr.values().iter().map(|v| v.is_infinite()));
Box::new(BooleanArray::from_data_default(
values,
arr.validity().cloned(),
))
} |
|
Login.tsx | import React, { useState } from 'react' | import {
Row,
Col,
Container,
Form,
FormGroup,
Input,
FormText,
Button,
Alert,
Label,
} from 'reactstrap'
const RegistrationForm = ({ user }) => {
const [values, setValues] = useState({
nickname: user ? user.nickname : '',
firstName: user ? user.firstName : '',
lastName: user ? user.lastName : '',
phoneNumber: user && user.contacts ? user.contacts.phoneNumber : '',
address: user && user.contacts ? user.contacts.address : '',
error: null,
done: null,
})
const [agree, setAgree] = useState(false)
const handleChange = (prop) => (event) => {
setValues({
...values,
error: false,
done: false,
[prop]: event.target.value,
})
}
const update = useCommand(
{
type: 'update',
aggregateId: user ? user.id : null,
aggregateName: 'user-profile',
payload: values,
},
(error) => {
if (error) {
setValues({ ...values, error, done: false })
} else {
setValues({ ...values, error: false, done: true })
}
},
[user, values]
)
const { error, done } = values
return (
<React.Fragment>
<Form method="post" action="/api/register">
<FormGroup row>
<Col>{user ? <h4>Profile update</h4> : <h4>Registration</h4>}</Col>
</FormGroup>
<FormGroup row>
<Col>
<Input
name="nickname"
id="nickname"
placeholder="Nickname"
defaultValue={user ? user.nickname : ''}
disabled={!!user}
onChange={handleChange('nickname')}
/>
</Col>
</FormGroup>
<FormGroup row>
<Col>
<Input
name="firstName"
id="firstName"
placeholder="First name"
defaultValue={user ? user.firstName : ''}
onChange={handleChange('firstName')}
/>
</Col>
</FormGroup>
<FormGroup row>
<Col>
<Input
name="lastName"
id="lastName"
placeholder="Last name"
defaultValue={user ? user.lastName : ''}
onChange={handleChange('lastName')}
/>
</Col>
</FormGroup>
<FormGroup row>
<Col>
<Input
name="phoneNumber"
id="phoneNumber"
placeholder="Phone number"
defaultValue={
user && user.contacts ? user.contacts.phoneNumber : ''
}
onChange={handleChange('phoneNumber')}
/>
</Col>
</FormGroup>
<FormGroup row>
<Col>
<Input
type="textarea"
name="address"
id="address"
placeholder="Postal address"
defaultValue={user && user.contacts ? user.contacts.address : ''}
onChange={handleChange('address')}
/>
</Col>
</FormGroup>
<FormGroup row>
<Col>
<FormText color="muted">
Information in name related fields, phone number and address is
personal and will be encrypted before saving to eventstore
</FormText>
</Col>
</FormGroup>
{!user && (
<FormGroup check className="mb-3">
<Label check>
<Input
id="consent"
type="checkbox"
onChange={() => {
setAgree(!agree)
}}
/>{' '}
I give my consent to the processing of personal data
</Label>
</FormGroup>
)}
<FormGroup row>
<Col>
<div className="mb-3">
{user ? (
<Button onClick={() => update()}>Update</Button>
) : (
<Button disabled={!agree} type="submit">
Sign Up
</Button>
)}
</div>
{error && <Alert color="danger">An error occurred</Alert>}
{done && <Alert color="success">Successfully saved</Alert>}
</Col>
</FormGroup>
</Form>
</React.Fragment>
)
}
const Login = ({ user = null }) => {
return (
<Container>
<Row style={{ display: 'flex', justifyContent: 'center' }}>
<Col className="pt-3" xs="8" sm="6">
<RegistrationForm user={user} />
</Col>
</Row>
</Container>
)
}
export default Login | import { useCommand } from '@resolve-js/react-hooks' |
SWF.ts | import { SWFClient } from "./SWFClient";
import { CountClosedWorkflowExecutionsInput } from "./types/CountClosedWorkflowExecutionsInput";
import { CountClosedWorkflowExecutionsOutput } from "./types/CountClosedWorkflowExecutionsOutput";
import { CountOpenWorkflowExecutionsInput } from "./types/CountOpenWorkflowExecutionsInput";
import { CountOpenWorkflowExecutionsOutput } from "./types/CountOpenWorkflowExecutionsOutput";
import { CountPendingActivityTasksInput } from "./types/CountPendingActivityTasksInput";
import { CountPendingActivityTasksOutput } from "./types/CountPendingActivityTasksOutput";
import { CountPendingDecisionTasksInput } from "./types/CountPendingDecisionTasksInput";
import { CountPendingDecisionTasksOutput } from "./types/CountPendingDecisionTasksOutput";
import { DefaultUndefinedFault } from "./types/DefaultUndefinedFault";
import { DeprecateActivityTypeInput } from "./types/DeprecateActivityTypeInput";
import { DeprecateActivityTypeOutput } from "./types/DeprecateActivityTypeOutput";
import { DeprecateDomainInput } from "./types/DeprecateDomainInput";
import { DeprecateDomainOutput } from "./types/DeprecateDomainOutput";
import { DeprecateWorkflowTypeInput } from "./types/DeprecateWorkflowTypeInput";
import { DeprecateWorkflowTypeOutput } from "./types/DeprecateWorkflowTypeOutput";
import { DescribeActivityTypeInput } from "./types/DescribeActivityTypeInput";
import { DescribeActivityTypeOutput } from "./types/DescribeActivityTypeOutput";
import { DescribeDomainInput } from "./types/DescribeDomainInput";
import { DescribeDomainOutput } from "./types/DescribeDomainOutput";
import { DescribeWorkflowExecutionInput } from "./types/DescribeWorkflowExecutionInput";
import { DescribeWorkflowExecutionOutput } from "./types/DescribeWorkflowExecutionOutput";
import { DescribeWorkflowTypeInput } from "./types/DescribeWorkflowTypeInput";
import { DescribeWorkflowTypeOutput } from "./types/DescribeWorkflowTypeOutput";
import { DomainAlreadyExistsFault } from "./types/DomainAlreadyExistsFault";
import { DomainDeprecatedFault } from "./types/DomainDeprecatedFault";
import { GetWorkflowExecutionHistoryInput } from "./types/GetWorkflowExecutionHistoryInput";
import { GetWorkflowExecutionHistoryOutput } from "./types/GetWorkflowExecutionHistoryOutput";
import { LimitExceededFault } from "./types/LimitExceededFault";
import { ListActivityTypesInput } from "./types/ListActivityTypesInput";
import { ListActivityTypesOutput } from "./types/ListActivityTypesOutput";
import { ListClosedWorkflowExecutionsInput } from "./types/ListClosedWorkflowExecutionsInput";
import { ListClosedWorkflowExecutionsOutput } from "./types/ListClosedWorkflowExecutionsOutput";
import { ListDomainsInput } from "./types/ListDomainsInput";
import { ListDomainsOutput } from "./types/ListDomainsOutput";
import { ListOpenWorkflowExecutionsInput } from "./types/ListOpenWorkflowExecutionsInput";
import { ListOpenWorkflowExecutionsOutput } from "./types/ListOpenWorkflowExecutionsOutput";
import { ListTagsForResourceInput } from "./types/ListTagsForResourceInput";
import { ListTagsForResourceOutput } from "./types/ListTagsForResourceOutput";
import { ListWorkflowTypesInput } from "./types/ListWorkflowTypesInput";
import { ListWorkflowTypesOutput } from "./types/ListWorkflowTypesOutput";
import { OperationNotPermittedFault } from "./types/OperationNotPermittedFault";
import { PollForActivityTaskInput } from "./types/PollForActivityTaskInput";
import { PollForActivityTaskOutput } from "./types/PollForActivityTaskOutput";
import { PollForDecisionTaskInput } from "./types/PollForDecisionTaskInput";
import { PollForDecisionTaskOutput } from "./types/PollForDecisionTaskOutput";
import { RecordActivityTaskHeartbeatInput } from "./types/RecordActivityTaskHeartbeatInput";
import { RecordActivityTaskHeartbeatOutput } from "./types/RecordActivityTaskHeartbeatOutput";
import { RegisterActivityTypeInput } from "./types/RegisterActivityTypeInput";
import { RegisterActivityTypeOutput } from "./types/RegisterActivityTypeOutput";
import { RegisterDomainInput } from "./types/RegisterDomainInput";
import { RegisterDomainOutput } from "./types/RegisterDomainOutput";
import { RegisterWorkflowTypeInput } from "./types/RegisterWorkflowTypeInput";
import { RegisterWorkflowTypeOutput } from "./types/RegisterWorkflowTypeOutput";
import { RequestCancelWorkflowExecutionInput } from "./types/RequestCancelWorkflowExecutionInput";
import { RequestCancelWorkflowExecutionOutput } from "./types/RequestCancelWorkflowExecutionOutput";
import { RespondActivityTaskCanceledInput } from "./types/RespondActivityTaskCanceledInput";
import { RespondActivityTaskCanceledOutput } from "./types/RespondActivityTaskCanceledOutput";
import { RespondActivityTaskCompletedInput } from "./types/RespondActivityTaskCompletedInput";
import { RespondActivityTaskCompletedOutput } from "./types/RespondActivityTaskCompletedOutput";
import { RespondActivityTaskFailedInput } from "./types/RespondActivityTaskFailedInput";
import { RespondActivityTaskFailedOutput } from "./types/RespondActivityTaskFailedOutput";
import { RespondDecisionTaskCompletedInput } from "./types/RespondDecisionTaskCompletedInput";
import { RespondDecisionTaskCompletedOutput } from "./types/RespondDecisionTaskCompletedOutput";
import { SignalWorkflowExecutionInput } from "./types/SignalWorkflowExecutionInput";
import { SignalWorkflowExecutionOutput } from "./types/SignalWorkflowExecutionOutput";
import { StartWorkflowExecutionInput } from "./types/StartWorkflowExecutionInput";
import { StartWorkflowExecutionOutput } from "./types/StartWorkflowExecutionOutput";
import { TagResourceInput } from "./types/TagResourceInput";
import { TagResourceOutput } from "./types/TagResourceOutput";
import { TerminateWorkflowExecutionInput } from "./types/TerminateWorkflowExecutionInput";
import { TerminateWorkflowExecutionOutput } from "./types/TerminateWorkflowExecutionOutput";
import { TooManyTagsFault } from "./types/TooManyTagsFault";
import { TypeAlreadyExistsFault } from "./types/TypeAlreadyExistsFault";
import { TypeDeprecatedFault } from "./types/TypeDeprecatedFault";
import { UndeprecateActivityTypeInput } from "./types/UndeprecateActivityTypeInput";
import { UndeprecateActivityTypeOutput } from "./types/UndeprecateActivityTypeOutput";
import { UndeprecateDomainInput } from "./types/UndeprecateDomainInput";
import { UndeprecateDomainOutput } from "./types/UndeprecateDomainOutput";
import { UndeprecateWorkflowTypeInput } from "./types/UndeprecateWorkflowTypeInput";
import { UndeprecateWorkflowTypeOutput } from "./types/UndeprecateWorkflowTypeOutput";
import { UnknownResourceFault } from "./types/UnknownResourceFault";
import { UntagResourceInput } from "./types/UntagResourceInput";
import { UntagResourceOutput } from "./types/UntagResourceOutput";
import { WorkflowExecutionAlreadyStartedFault } from "./types/WorkflowExecutionAlreadyStartedFault";
import { CountClosedWorkflowExecutionsCommand } from "./commands/CountClosedWorkflowExecutionsCommand";
import { CountOpenWorkflowExecutionsCommand } from "./commands/CountOpenWorkflowExecutionsCommand";
import { CountPendingActivityTasksCommand } from "./commands/CountPendingActivityTasksCommand";
import { CountPendingDecisionTasksCommand } from "./commands/CountPendingDecisionTasksCommand";
import { DeprecateActivityTypeCommand } from "./commands/DeprecateActivityTypeCommand";
import { DeprecateDomainCommand } from "./commands/DeprecateDomainCommand";
import { DeprecateWorkflowTypeCommand } from "./commands/DeprecateWorkflowTypeCommand";
import { DescribeActivityTypeCommand } from "./commands/DescribeActivityTypeCommand";
import { DescribeDomainCommand } from "./commands/DescribeDomainCommand";
import { DescribeWorkflowExecutionCommand } from "./commands/DescribeWorkflowExecutionCommand";
import { DescribeWorkflowTypeCommand } from "./commands/DescribeWorkflowTypeCommand";
import { GetWorkflowExecutionHistoryCommand } from "./commands/GetWorkflowExecutionHistoryCommand";
import { ListActivityTypesCommand } from "./commands/ListActivityTypesCommand";
import { ListClosedWorkflowExecutionsCommand } from "./commands/ListClosedWorkflowExecutionsCommand";
import { ListDomainsCommand } from "./commands/ListDomainsCommand";
import { ListOpenWorkflowExecutionsCommand } from "./commands/ListOpenWorkflowExecutionsCommand";
import { ListTagsForResourceCommand } from "./commands/ListTagsForResourceCommand";
import { ListWorkflowTypesCommand } from "./commands/ListWorkflowTypesCommand";
import { PollForActivityTaskCommand } from "./commands/PollForActivityTaskCommand";
import { PollForDecisionTaskCommand } from "./commands/PollForDecisionTaskCommand";
import { RecordActivityTaskHeartbeatCommand } from "./commands/RecordActivityTaskHeartbeatCommand";
import { RegisterActivityTypeCommand } from "./commands/RegisterActivityTypeCommand";
import { RegisterDomainCommand } from "./commands/RegisterDomainCommand";
import { RegisterWorkflowTypeCommand } from "./commands/RegisterWorkflowTypeCommand";
import { RequestCancelWorkflowExecutionCommand } from "./commands/RequestCancelWorkflowExecutionCommand";
import { RespondActivityTaskCanceledCommand } from "./commands/RespondActivityTaskCanceledCommand";
import { RespondActivityTaskCompletedCommand } from "./commands/RespondActivityTaskCompletedCommand";
import { RespondActivityTaskFailedCommand } from "./commands/RespondActivityTaskFailedCommand";
import { RespondDecisionTaskCompletedCommand } from "./commands/RespondDecisionTaskCompletedCommand";
import { SignalWorkflowExecutionCommand } from "./commands/SignalWorkflowExecutionCommand";
import { StartWorkflowExecutionCommand } from "./commands/StartWorkflowExecutionCommand";
import { TagResourceCommand } from "./commands/TagResourceCommand";
import { TerminateWorkflowExecutionCommand } from "./commands/TerminateWorkflowExecutionCommand";
import { UndeprecateActivityTypeCommand } from "./commands/UndeprecateActivityTypeCommand";
import { UndeprecateDomainCommand } from "./commands/UndeprecateDomainCommand";
import { UndeprecateWorkflowTypeCommand } from "./commands/UndeprecateWorkflowTypeCommand";
import { UntagResourceCommand } from "./commands/UntagResourceCommand";
export class SWF extends SWFClient {
/**
* <p>Returns the number of closed workflow executions within the given domain that meet the specified filtering criteria.</p> <note> <p>This operation is eventually consistent. The results are best effort and may not exactly reflect recent updates and changes.</p> </note> <p> <b>Access Control</b> </p> <p>You can use IAM policies to control this action's access to Amazon SWF resources as follows:</p> <ul> <li> <p>Use a <code>Resource</code> element with the domain name to limit the action to only specified domains.</p> </li> <li> <p>Use an <code>Action</code> element to allow or deny permission to call this action.</p> </li> <li> <p>Constrain the following parameters by using a <code>Condition</code> element with the appropriate keys.</p> <ul> <li> <p> <code>tagFilter.tag</code>: String constraint. The key is <code>swf:tagFilter.tag</code>.</p> </li> <li> <p> <code>typeFilter.name</code>: String constraint. The key is <code>swf:typeFilter.name</code>.</p> </li> <li> <p> <code>typeFilter.version</code>: String constraint. The key is <code>swf:typeFilter.version</code>.</p> </li> </ul> </li> </ul> <p>If the caller doesn't have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's <code>cause</code> parameter is set to <code>OPERATION_NOT_PERMITTED</code>. For details and example IAM policies, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public countClosedWorkflowExecutions(
args: CountClosedWorkflowExecutionsInput
): Promise<CountClosedWorkflowExecutionsOutput>;
public countClosedWorkflowExecutions(
args: CountClosedWorkflowExecutionsInput,
cb: (err: any, data?: CountClosedWorkflowExecutionsOutput) => void
): void;
public countClosedWorkflowExecutions(
args: CountClosedWorkflowExecutionsInput,
cb?: (err: any, data?: CountClosedWorkflowExecutionsOutput) => void
): Promise<CountClosedWorkflowExecutionsOutput> | void {
// create the appropriate command and pass it to .send
const command = new CountClosedWorkflowExecutionsCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Returns the number of open workflow executions within the given domain that meet the specified filtering criteria.</p> <note> <p>This operation is eventually consistent. The results are best effort and may not exactly reflect recent updates and changes.</p> </note> <p> <b>Access Control</b> </p> <p>You can use IAM policies to control this action's access to Amazon SWF resources as follows:</p> <ul> <li> <p>Use a <code>Resource</code> element with the domain name to limit the action to only specified domains.</p> </li> <li> <p>Use an <code>Action</code> element to allow or deny permission to call this action.</p> </li> <li> <p>Constrain the following parameters by using a <code>Condition</code> element with the appropriate keys.</p> <ul> <li> <p> <code>tagFilter.tag</code>: String constraint. The key is <code>swf:tagFilter.tag</code>.</p> </li> <li> <p> <code>typeFilter.name</code>: String constraint. The key is <code>swf:typeFilter.name</code>.</p> </li> <li> <p> <code>typeFilter.version</code>: String constraint. The key is <code>swf:typeFilter.version</code>.</p> </li> </ul> </li> </ul> <p>If the caller doesn't have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's <code>cause</code> parameter is set to <code>OPERATION_NOT_PERMITTED</code>. For details and example IAM policies, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public countOpenWorkflowExecutions(
args: CountOpenWorkflowExecutionsInput
): Promise<CountOpenWorkflowExecutionsOutput>;
public countOpenWorkflowExecutions(
args: CountOpenWorkflowExecutionsInput,
cb: (err: any, data?: CountOpenWorkflowExecutionsOutput) => void
): void;
public countOpenWorkflowExecutions(
args: CountOpenWorkflowExecutionsInput,
cb?: (err: any, data?: CountOpenWorkflowExecutionsOutput) => void
): Promise<CountOpenWorkflowExecutionsOutput> | void {
// create the appropriate command and pass it to .send
const command = new CountOpenWorkflowExecutionsCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Returns the estimated number of activity tasks in the specified task list. The count returned is an approximation and isn't guaranteed to be exact. If you specify a task list that no activity task was ever scheduled in then <code>0</code> is returned.</p> <p> <b>Access Control</b> </p> <p>You can use IAM policies to control this action's access to Amazon SWF resources as follows:</p> <ul> <li> <p>Use a <code>Resource</code> element with the domain name to limit the action to only specified domains.</p> </li> <li> <p>Use an <code>Action</code> element to allow or deny permission to call this action.</p> </li> <li> <p>Constrain the <code>taskList.name</code> parameter by using a <code>Condition</code> element with the <code>swf:taskList.name</code> key to allow the action to access only certain task lists.</p> </li> </ul> <p>If the caller doesn't have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's <code>cause</code> parameter is set to <code>OPERATION_NOT_PERMITTED</code>. For details and example IAM policies, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public countPendingActivityTasks(
args: CountPendingActivityTasksInput
): Promise<CountPendingActivityTasksOutput>;
public countPendingActivityTasks(
args: CountPendingActivityTasksInput,
cb: (err: any, data?: CountPendingActivityTasksOutput) => void
): void;
public countPendingActivityTasks(
args: CountPendingActivityTasksInput,
cb?: (err: any, data?: CountPendingActivityTasksOutput) => void
): Promise<CountPendingActivityTasksOutput> | void {
// create the appropriate command and pass it to .send
const command = new CountPendingActivityTasksCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Returns the estimated number of decision tasks in the specified task list. The count returned is an approximation and isn't guaranteed to be exact. If you specify a task list that no decision task was ever scheduled in then <code>0</code> is returned.</p> <p> <b>Access Control</b> </p> <p>You can use IAM policies to control this action's access to Amazon SWF resources as follows:</p> <ul> <li> <p>Use a <code>Resource</code> element with the domain name to limit the action to only specified domains.</p> </li> <li> <p>Use an <code>Action</code> element to allow or deny permission to call this action.</p> </li> <li> <p>Constrain the <code>taskList.name</code> parameter by using a <code>Condition</code> element with the <code>swf:taskList.name</code> key to allow the action to access only certain task lists.</p> </li> </ul> <p>If the caller doesn't have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's <code>cause</code> parameter is set to <code>OPERATION_NOT_PERMITTED</code>. For details and example IAM policies, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public countPendingDecisionTasks(
args: CountPendingDecisionTasksInput
): Promise<CountPendingDecisionTasksOutput>;
public countPendingDecisionTasks(
args: CountPendingDecisionTasksInput,
cb: (err: any, data?: CountPendingDecisionTasksOutput) => void
): void;
public countPendingDecisionTasks(
args: CountPendingDecisionTasksInput,
cb?: (err: any, data?: CountPendingDecisionTasksOutput) => void
): Promise<CountPendingDecisionTasksOutput> | void {
// create the appropriate command and pass it to .send
const command = new CountPendingDecisionTasksCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Deprecates the specified <i>activity type</i>. After an activity type has been deprecated, you cannot create new tasks of that activity type. Tasks of this type that were scheduled before the type was deprecated continue to run.</p> <note> <p>This operation is eventually consistent. The results are best effort and may not exactly reflect recent updates and changes.</p> </note> <p> <b>Access Control</b> </p> <p>You can use IAM policies to control this action's access to Amazon SWF resources as follows:</p> <ul> <li> <p>Use a <code>Resource</code> element with the domain name to limit the action to only specified domains.</p> </li> <li> <p>Use an <code>Action</code> element to allow or deny permission to call this action.</p> </li> <li> <p>Constrain the following parameters by using a <code>Condition</code> element with the appropriate keys.</p> <ul> <li> <p> <code>activityType.name</code>: String constraint. The key is <code>swf:activityType.name</code>.</p> </li> <li> <p> <code>activityType.version</code>: String constraint. The key is <code>swf:activityType.version</code>.</p> </li> </ul> </li> </ul> <p>If the caller doesn't have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's <code>cause</code> parameter is set to <code>OPERATION_NOT_PERMITTED</code>. For details and example IAM policies, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {TypeDeprecatedFault} <p>Returned when the specified activity or workflow type was already deprecated.</p>
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public deprecateActivityType(
args: DeprecateActivityTypeInput
): Promise<DeprecateActivityTypeOutput>;
public deprecateActivityType(
args: DeprecateActivityTypeInput,
cb: (err: any, data?: DeprecateActivityTypeOutput) => void
): void;
public deprecateActivityType(
args: DeprecateActivityTypeInput,
cb?: (err: any, data?: DeprecateActivityTypeOutput) => void
): Promise<DeprecateActivityTypeOutput> | void {
// create the appropriate command and pass it to .send
const command = new DeprecateActivityTypeCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Deprecates the specified domain. After a domain has been deprecated it cannot be used to create new workflow executions or register new types. However, you can still use visibility actions on this domain. Deprecating a domain also deprecates all activity and workflow types registered in the domain. Executions that were started before the domain was deprecated continues to run.</p> <note> <p>This operation is eventually consistent. The results are best effort and may not exactly reflect recent updates and changes.</p> </note> <p> <b>Access Control</b> </p> <p>You can use IAM policies to control this action's access to Amazon SWF resources as follows:</p> <ul> <li> <p>Use a <code>Resource</code> element with the domain name to limit the action to only specified domains.</p> </li> <li> <p>Use an <code>Action</code> element to allow or deny permission to call this action.</p> </li> <li> <p>You cannot use an IAM policy to constrain this action's parameters.</p> </li> </ul> <p>If the caller doesn't have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's <code>cause</code> parameter is set to <code>OPERATION_NOT_PERMITTED</code>. For details and example IAM policies, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {DomainDeprecatedFault} <p>Returned when the specified domain has been deprecated.</p>
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public deprecateDomain(
args: DeprecateDomainInput
): Promise<DeprecateDomainOutput>;
public deprecateDomain(
args: DeprecateDomainInput,
cb: (err: any, data?: DeprecateDomainOutput) => void
): void;
public deprecateDomain(
args: DeprecateDomainInput,
cb?: (err: any, data?: DeprecateDomainOutput) => void
): Promise<DeprecateDomainOutput> | void {
// create the appropriate command and pass it to .send
const command = new DeprecateDomainCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Deprecates the specified <i>workflow type</i>. After a workflow type has been deprecated, you cannot create new executions of that type. Executions that were started before the type was deprecated continues to run. A deprecated workflow type may still be used when calling visibility actions.</p> <note> <p>This operation is eventually consistent. The results are best effort and may not exactly reflect recent updates and changes.</p> </note> <p> <b>Access Control</b> </p> <p>You can use IAM policies to control this action's access to Amazon SWF resources as follows:</p> <ul> <li> <p>Use a <code>Resource</code> element with the domain name to limit the action to only specified domains.</p> </li> <li> <p>Use an <code>Action</code> element to allow or deny permission to call this action.</p> </li> <li> <p>Constrain the following parameters by using a <code>Condition</code> element with the appropriate keys.</p> <ul> <li> <p> <code>workflowType.name</code>: String constraint. The key is <code>swf:workflowType.name</code>.</p> </li> <li> <p> <code>workflowType.version</code>: String constraint. The key is <code>swf:workflowType.version</code>.</p> </li> </ul> </li> </ul> <p>If the caller doesn't have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's <code>cause</code> parameter is set to <code>OPERATION_NOT_PERMITTED</code>. For details and example IAM policies, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {TypeDeprecatedFault} <p>Returned when the specified activity or workflow type was already deprecated.</p>
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public deprecateWorkflowType(
args: DeprecateWorkflowTypeInput
): Promise<DeprecateWorkflowTypeOutput>;
public deprecateWorkflowType(
args: DeprecateWorkflowTypeInput,
cb: (err: any, data?: DeprecateWorkflowTypeOutput) => void
): void;
public deprecateWorkflowType(
args: DeprecateWorkflowTypeInput,
cb?: (err: any, data?: DeprecateWorkflowTypeOutput) => void
): Promise<DeprecateWorkflowTypeOutput> | void {
// create the appropriate command and pass it to .send
const command = new DeprecateWorkflowTypeCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Returns information about the specified activity type. This includes configuration settings provided when the type was registered and other general information about the type.</p> <p> <b>Access Control</b> </p> <p>You can use IAM policies to control this action's access to Amazon SWF resources as follows:</p> <ul> <li> <p>Use a <code>Resource</code> element with the domain name to limit the action to only specified domains.</p> </li> <li> <p>Use an <code>Action</code> element to allow or deny permission to call this action.</p> </li> <li> <p>Constrain the following parameters by using a <code>Condition</code> element with the appropriate keys.</p> <ul> <li> <p> <code>activityType.name</code>: String constraint. The key is <code>swf:activityType.name</code>.</p> </li> <li> <p> <code>activityType.version</code>: String constraint. The key is <code>swf:activityType.version</code>.</p> </li> </ul> </li> </ul> <p>If the caller doesn't have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's <code>cause</code> parameter is set to <code>OPERATION_NOT_PERMITTED</code>. For details and example IAM policies, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public describeActivityType(
args: DescribeActivityTypeInput
): Promise<DescribeActivityTypeOutput>;
public describeActivityType(
args: DescribeActivityTypeInput,
cb: (err: any, data?: DescribeActivityTypeOutput) => void
): void;
public describeActivityType(
args: DescribeActivityTypeInput,
cb?: (err: any, data?: DescribeActivityTypeOutput) => void
): Promise<DescribeActivityTypeOutput> | void {
// create the appropriate command and pass it to .send
const command = new DescribeActivityTypeCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Returns information about the specified domain, including description and status.</p> <p> <b>Access Control</b> </p> <p>You can use IAM policies to control this action's access to Amazon SWF resources as follows:</p> <ul> <li> <p>Use a <code>Resource</code> element with the domain name to limit the action to only specified domains.</p> </li> <li> <p>Use an <code>Action</code> element to allow or deny permission to call this action.</p> </li> <li> <p>You cannot use an IAM policy to constrain this action's parameters.</p> </li> </ul> <p>If the caller doesn't have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's <code>cause</code> parameter is set to <code>OPERATION_NOT_PERMITTED</code>. For details and example IAM policies, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public describeDomain(
args: DescribeDomainInput
): Promise<DescribeDomainOutput>;
public describeDomain(
args: DescribeDomainInput,
cb: (err: any, data?: DescribeDomainOutput) => void
): void;
public describeDomain(
args: DescribeDomainInput,
cb?: (err: any, data?: DescribeDomainOutput) => void
): Promise<DescribeDomainOutput> | void {
// create the appropriate command and pass it to .send
const command = new DescribeDomainCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Returns information about the specified workflow execution including its type and some statistics.</p> <note> <p>This operation is eventually consistent. The results are best effort and may not exactly reflect recent updates and changes.</p> </note> <p> <b>Access Control</b> </p> <p>You can use IAM policies to control this action's access to Amazon SWF resources as follows:</p> <ul> <li> <p>Use a <code>Resource</code> element with the domain name to limit the action to only specified domains.</p> </li> <li> <p>Use an <code>Action</code> element to allow or deny permission to call this action.</p> </li> <li> <p>You cannot use an IAM policy to constrain this action's parameters.</p> </li> </ul> <p>If the caller doesn't have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's <code>cause</code> parameter is set to <code>OPERATION_NOT_PERMITTED</code>. For details and example IAM policies, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p> | */
public describeWorkflowExecution(
args: DescribeWorkflowExecutionInput
): Promise<DescribeWorkflowExecutionOutput>;
public describeWorkflowExecution(
args: DescribeWorkflowExecutionInput,
cb: (err: any, data?: DescribeWorkflowExecutionOutput) => void
): void;
public describeWorkflowExecution(
args: DescribeWorkflowExecutionInput,
cb?: (err: any, data?: DescribeWorkflowExecutionOutput) => void
): Promise<DescribeWorkflowExecutionOutput> | void {
// create the appropriate command and pass it to .send
const command = new DescribeWorkflowExecutionCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Returns information about the specified <i>workflow type</i>. This includes configuration settings specified when the type was registered and other information such as creation date, current status, etc.</p> <p> <b>Access Control</b> </p> <p>You can use IAM policies to control this action's access to Amazon SWF resources as follows:</p> <ul> <li> <p>Use a <code>Resource</code> element with the domain name to limit the action to only specified domains.</p> </li> <li> <p>Use an <code>Action</code> element to allow or deny permission to call this action.</p> </li> <li> <p>Constrain the following parameters by using a <code>Condition</code> element with the appropriate keys.</p> <ul> <li> <p> <code>workflowType.name</code>: String constraint. The key is <code>swf:workflowType.name</code>.</p> </li> <li> <p> <code>workflowType.version</code>: String constraint. The key is <code>swf:workflowType.version</code>.</p> </li> </ul> </li> </ul> <p>If the caller doesn't have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's <code>cause</code> parameter is set to <code>OPERATION_NOT_PERMITTED</code>. For details and example IAM policies, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public describeWorkflowType(
args: DescribeWorkflowTypeInput
): Promise<DescribeWorkflowTypeOutput>;
public describeWorkflowType(
args: DescribeWorkflowTypeInput,
cb: (err: any, data?: DescribeWorkflowTypeOutput) => void
): void;
public describeWorkflowType(
args: DescribeWorkflowTypeInput,
cb?: (err: any, data?: DescribeWorkflowTypeOutput) => void
): Promise<DescribeWorkflowTypeOutput> | void {
// create the appropriate command and pass it to .send
const command = new DescribeWorkflowTypeCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Returns the history of the specified workflow execution. The results may be split into multiple pages. To retrieve subsequent pages, make the call again using the <code>nextPageToken</code> returned by the initial call.</p> <note> <p>This operation is eventually consistent. The results are best effort and may not exactly reflect recent updates and changes.</p> </note> <p> <b>Access Control</b> </p> <p>You can use IAM policies to control this action's access to Amazon SWF resources as follows:</p> <ul> <li> <p>Use a <code>Resource</code> element with the domain name to limit the action to only specified domains.</p> </li> <li> <p>Use an <code>Action</code> element to allow or deny permission to call this action.</p> </li> <li> <p>You cannot use an IAM policy to constrain this action's parameters.</p> </li> </ul> <p>If the caller doesn't have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's <code>cause</code> parameter is set to <code>OPERATION_NOT_PERMITTED</code>. For details and example IAM policies, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public getWorkflowExecutionHistory(
args: GetWorkflowExecutionHistoryInput
): Promise<GetWorkflowExecutionHistoryOutput>;
public getWorkflowExecutionHistory(
args: GetWorkflowExecutionHistoryInput,
cb: (err: any, data?: GetWorkflowExecutionHistoryOutput) => void
): void;
public getWorkflowExecutionHistory(
args: GetWorkflowExecutionHistoryInput,
cb?: (err: any, data?: GetWorkflowExecutionHistoryOutput) => void
): Promise<GetWorkflowExecutionHistoryOutput> | void {
// create the appropriate command and pass it to .send
const command = new GetWorkflowExecutionHistoryCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Returns information about all activities registered in the specified domain that match the specified name and registration status. The result includes information like creation date, current status of the activity, etc. The results may be split into multiple pages. To retrieve subsequent pages, make the call again using the <code>nextPageToken</code> returned by the initial call.</p> <p> <b>Access Control</b> </p> <p>You can use IAM policies to control this action's access to Amazon SWF resources as follows:</p> <ul> <li> <p>Use a <code>Resource</code> element with the domain name to limit the action to only specified domains.</p> </li> <li> <p>Use an <code>Action</code> element to allow or deny permission to call this action.</p> </li> <li> <p>You cannot use an IAM policy to constrain this action's parameters.</p> </li> </ul> <p>If the caller doesn't have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's <code>cause</code> parameter is set to <code>OPERATION_NOT_PERMITTED</code>. For details and example IAM policies, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public listActivityTypes(
args: ListActivityTypesInput
): Promise<ListActivityTypesOutput>;
public listActivityTypes(
args: ListActivityTypesInput,
cb: (err: any, data?: ListActivityTypesOutput) => void
): void;
public listActivityTypes(
args: ListActivityTypesInput,
cb?: (err: any, data?: ListActivityTypesOutput) => void
): Promise<ListActivityTypesOutput> | void {
// create the appropriate command and pass it to .send
const command = new ListActivityTypesCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Returns a list of closed workflow executions in the specified domain that meet the filtering criteria. The results may be split into multiple pages. To retrieve subsequent pages, make the call again using the nextPageToken returned by the initial call.</p> <note> <p>This operation is eventually consistent. The results are best effort and may not exactly reflect recent updates and changes.</p> </note> <p> <b>Access Control</b> </p> <p>You can use IAM policies to control this action's access to Amazon SWF resources as follows:</p> <ul> <li> <p>Use a <code>Resource</code> element with the domain name to limit the action to only specified domains.</p> </li> <li> <p>Use an <code>Action</code> element to allow or deny permission to call this action.</p> </li> <li> <p>Constrain the following parameters by using a <code>Condition</code> element with the appropriate keys.</p> <ul> <li> <p> <code>tagFilter.tag</code>: String constraint. The key is <code>swf:tagFilter.tag</code>.</p> </li> <li> <p> <code>typeFilter.name</code>: String constraint. The key is <code>swf:typeFilter.name</code>.</p> </li> <li> <p> <code>typeFilter.version</code>: String constraint. The key is <code>swf:typeFilter.version</code>.</p> </li> </ul> </li> </ul> <p>If the caller doesn't have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's <code>cause</code> parameter is set to <code>OPERATION_NOT_PERMITTED</code>. For details and example IAM policies, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public listClosedWorkflowExecutions(
args: ListClosedWorkflowExecutionsInput
): Promise<ListClosedWorkflowExecutionsOutput>;
public listClosedWorkflowExecutions(
args: ListClosedWorkflowExecutionsInput,
cb: (err: any, data?: ListClosedWorkflowExecutionsOutput) => void
): void;
public listClosedWorkflowExecutions(
args: ListClosedWorkflowExecutionsInput,
cb?: (err: any, data?: ListClosedWorkflowExecutionsOutput) => void
): Promise<ListClosedWorkflowExecutionsOutput> | void {
// create the appropriate command and pass it to .send
const command = new ListClosedWorkflowExecutionsCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Returns the list of domains registered in the account. The results may be split into multiple pages. To retrieve subsequent pages, make the call again using the nextPageToken returned by the initial call.</p> <note> <p>This operation is eventually consistent. The results are best effort and may not exactly reflect recent updates and changes.</p> </note> <p> <b>Access Control</b> </p> <p>You can use IAM policies to control this action's access to Amazon SWF resources as follows:</p> <ul> <li> <p>Use a <code>Resource</code> element with the domain name to limit the action to only specified domains. The element must be set to <code>arn:aws:swf::AccountID:domain/*</code>, where <i>AccountID</i> is the account ID, with no dashes.</p> </li> <li> <p>Use an <code>Action</code> element to allow or deny permission to call this action.</p> </li> <li> <p>You cannot use an IAM policy to constrain this action's parameters.</p> </li> </ul> <p>If the caller doesn't have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's <code>cause</code> parameter is set to <code>OPERATION_NOT_PERMITTED</code>. For details and example IAM policies, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public listDomains(args: ListDomainsInput): Promise<ListDomainsOutput>;
public listDomains(
args: ListDomainsInput,
cb: (err: any, data?: ListDomainsOutput) => void
): void;
public listDomains(
args: ListDomainsInput,
cb?: (err: any, data?: ListDomainsOutput) => void
): Promise<ListDomainsOutput> | void {
// create the appropriate command and pass it to .send
const command = new ListDomainsCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Returns a list of open workflow executions in the specified domain that meet the filtering criteria. The results may be split into multiple pages. To retrieve subsequent pages, make the call again using the nextPageToken returned by the initial call.</p> <note> <p>This operation is eventually consistent. The results are best effort and may not exactly reflect recent updates and changes.</p> </note> <p> <b>Access Control</b> </p> <p>You can use IAM policies to control this action's access to Amazon SWF resources as follows:</p> <ul> <li> <p>Use a <code>Resource</code> element with the domain name to limit the action to only specified domains.</p> </li> <li> <p>Use an <code>Action</code> element to allow or deny permission to call this action.</p> </li> <li> <p>Constrain the following parameters by using a <code>Condition</code> element with the appropriate keys.</p> <ul> <li> <p> <code>tagFilter.tag</code>: String constraint. The key is <code>swf:tagFilter.tag</code>.</p> </li> <li> <p> <code>typeFilter.name</code>: String constraint. The key is <code>swf:typeFilter.name</code>.</p> </li> <li> <p> <code>typeFilter.version</code>: String constraint. The key is <code>swf:typeFilter.version</code>.</p> </li> </ul> </li> </ul> <p>If the caller doesn't have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's <code>cause</code> parameter is set to <code>OPERATION_NOT_PERMITTED</code>. For details and example IAM policies, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public listOpenWorkflowExecutions(
args: ListOpenWorkflowExecutionsInput
): Promise<ListOpenWorkflowExecutionsOutput>;
public listOpenWorkflowExecutions(
args: ListOpenWorkflowExecutionsInput,
cb: (err: any, data?: ListOpenWorkflowExecutionsOutput) => void
): void;
public listOpenWorkflowExecutions(
args: ListOpenWorkflowExecutionsInput,
cb?: (err: any, data?: ListOpenWorkflowExecutionsOutput) => void
): Promise<ListOpenWorkflowExecutionsOutput> | void {
// create the appropriate command and pass it to .send
const command = new ListOpenWorkflowExecutionsCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>List tags for a given domain.</p>
*
* This operation may fail with one of the following errors:
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {LimitExceededFault} <p>Returned by any operation if a system imposed limitation has been reached. To address this fault you should either clean up unused resources or increase the limit by contacting AWS.</p>
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public listTagsForResource(
args: ListTagsForResourceInput
): Promise<ListTagsForResourceOutput>;
public listTagsForResource(
args: ListTagsForResourceInput,
cb: (err: any, data?: ListTagsForResourceOutput) => void
): void;
public listTagsForResource(
args: ListTagsForResourceInput,
cb?: (err: any, data?: ListTagsForResourceOutput) => void
): Promise<ListTagsForResourceOutput> | void {
// create the appropriate command and pass it to .send
const command = new ListTagsForResourceCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Returns information about workflow types in the specified domain. The results may be split into multiple pages that can be retrieved by making the call repeatedly.</p> <p> <b>Access Control</b> </p> <p>You can use IAM policies to control this action's access to Amazon SWF resources as follows:</p> <ul> <li> <p>Use a <code>Resource</code> element with the domain name to limit the action to only specified domains.</p> </li> <li> <p>Use an <code>Action</code> element to allow or deny permission to call this action.</p> </li> <li> <p>You cannot use an IAM policy to constrain this action's parameters.</p> </li> </ul> <p>If the caller doesn't have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's <code>cause</code> parameter is set to <code>OPERATION_NOT_PERMITTED</code>. For details and example IAM policies, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public listWorkflowTypes(
args: ListWorkflowTypesInput
): Promise<ListWorkflowTypesOutput>;
public listWorkflowTypes(
args: ListWorkflowTypesInput,
cb: (err: any, data?: ListWorkflowTypesOutput) => void
): void;
public listWorkflowTypes(
args: ListWorkflowTypesInput,
cb?: (err: any, data?: ListWorkflowTypesOutput) => void
): Promise<ListWorkflowTypesOutput> | void {
// create the appropriate command and pass it to .send
const command = new ListWorkflowTypesCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Used by workers to get an <a>ActivityTask</a> from the specified activity <code>taskList</code>. This initiates a long poll, where the service holds the HTTP connection open and responds as soon as a task becomes available. The maximum time the service holds on to the request before responding is 60 seconds. If no task is available within 60 seconds, the poll returns an empty result. An empty result, in this context, means that an ActivityTask is returned, but that the value of taskToken is an empty string. If a task is returned, the worker should use its type to identify and process it correctly.</p> <important> <p>Workers should set their client side socket timeout to at least 70 seconds (10 seconds higher than the maximum time service may hold the poll request).</p> </important> <p> <b>Access Control</b> </p> <p>You can use IAM policies to control this action's access to Amazon SWF resources as follows:</p> <ul> <li> <p>Use a <code>Resource</code> element with the domain name to limit the action to only specified domains.</p> </li> <li> <p>Use an <code>Action</code> element to allow or deny permission to call this action.</p> </li> <li> <p>Constrain the <code>taskList.name</code> parameter by using a <code>Condition</code> element with the <code>swf:taskList.name</code> key to allow the action to access only certain task lists.</p> </li> </ul> <p>If the caller doesn't have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's <code>cause</code> parameter is set to <code>OPERATION_NOT_PERMITTED</code>. For details and example IAM policies, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {LimitExceededFault} <p>Returned by any operation if a system imposed limitation has been reached. To address this fault you should either clean up unused resources or increase the limit by contacting AWS.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public pollForActivityTask(
args: PollForActivityTaskInput
): Promise<PollForActivityTaskOutput>;
public pollForActivityTask(
args: PollForActivityTaskInput,
cb: (err: any, data?: PollForActivityTaskOutput) => void
): void;
public pollForActivityTask(
args: PollForActivityTaskInput,
cb?: (err: any, data?: PollForActivityTaskOutput) => void
): Promise<PollForActivityTaskOutput> | void {
// create the appropriate command and pass it to .send
const command = new PollForActivityTaskCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Used by deciders to get a <a>DecisionTask</a> from the specified decision <code>taskList</code>. A decision task may be returned for any open workflow execution that is using the specified task list. The task includes a paginated view of the history of the workflow execution. The decider should use the workflow type and the history to determine how to properly handle the task.</p> <p>This action initiates a long poll, where the service holds the HTTP connection open and responds as soon a task becomes available. If no decision task is available in the specified task list before the timeout of 60 seconds expires, an empty result is returned. An empty result, in this context, means that a DecisionTask is returned, but that the value of taskToken is an empty string.</p> <important> <p>Deciders should set their client side socket timeout to at least 70 seconds (10 seconds higher than the timeout).</p> </important> <important> <p>Because the number of workflow history events for a single workflow execution might be very large, the result returned might be split up across a number of pages. To retrieve subsequent pages, make additional calls to <code>PollForDecisionTask</code> using the <code>nextPageToken</code> returned by the initial call. Note that you do <i>not</i> call <code>GetWorkflowExecutionHistory</code> with this <code>nextPageToken</code>. Instead, call <code>PollForDecisionTask</code> again.</p> </important> <p> <b>Access Control</b> </p> <p>You can use IAM policies to control this action's access to Amazon SWF resources as follows:</p> <ul> <li> <p>Use a <code>Resource</code> element with the domain name to limit the action to only specified domains.</p> </li> <li> <p>Use an <code>Action</code> element to allow or deny permission to call this action.</p> </li> <li> <p>Constrain the <code>taskList.name</code> parameter by using a <code>Condition</code> element with the <code>swf:taskList.name</code> key to allow the action to access only certain task lists.</p> </li> </ul> <p>If the caller doesn't have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's <code>cause</code> parameter is set to <code>OPERATION_NOT_PERMITTED</code>. For details and example IAM policies, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {LimitExceededFault} <p>Returned by any operation if a system imposed limitation has been reached. To address this fault you should either clean up unused resources or increase the limit by contacting AWS.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public pollForDecisionTask(
args: PollForDecisionTaskInput
): Promise<PollForDecisionTaskOutput>;
public pollForDecisionTask(
args: PollForDecisionTaskInput,
cb: (err: any, data?: PollForDecisionTaskOutput) => void
): void;
public pollForDecisionTask(
args: PollForDecisionTaskInput,
cb?: (err: any, data?: PollForDecisionTaskOutput) => void
): Promise<PollForDecisionTaskOutput> | void {
// create the appropriate command and pass it to .send
const command = new PollForDecisionTaskCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Used by activity workers to report to the service that the <a>ActivityTask</a> represented by the specified <code>taskToken</code> is still making progress. The worker can also specify details of the progress, for example percent complete, using the <code>details</code> parameter. This action can also be used by the worker as a mechanism to check if cancellation is being requested for the activity task. If a cancellation is being attempted for the specified task, then the boolean <code>cancelRequested</code> flag returned by the service is set to <code>true</code>.</p> <p>This action resets the <code>taskHeartbeatTimeout</code> clock. The <code>taskHeartbeatTimeout</code> is specified in <a>RegisterActivityType</a>.</p> <p>This action doesn't in itself create an event in the workflow execution history. However, if the task times out, the workflow execution history contains a <code>ActivityTaskTimedOut</code> event that contains the information from the last heartbeat generated by the activity worker.</p> <note> <p>The <code>taskStartToCloseTimeout</code> of an activity type is the maximum duration of an activity task, regardless of the number of <a>RecordActivityTaskHeartbeat</a> requests received. The <code>taskStartToCloseTimeout</code> is also specified in <a>RegisterActivityType</a>.</p> </note> <note> <p>This operation is only useful for long-lived activities to report liveliness of the task and to determine if a cancellation is being attempted.</p> </note> <important> <p>If the <code>cancelRequested</code> flag returns <code>true</code>, a cancellation is being attempted. If the worker can cancel the activity, it should respond with <a>RespondActivityTaskCanceled</a>. Otherwise, it should ignore the cancellation request.</p> </important> <p> <b>Access Control</b> </p> <p>You can use IAM policies to control this action's access to Amazon SWF resources as follows:</p> <ul> <li> <p>Use a <code>Resource</code> element with the domain name to limit the action to only specified domains.</p> </li> <li> <p>Use an <code>Action</code> element to allow or deny permission to call this action.</p> </li> <li> <p>You cannot use an IAM policy to constrain this action's parameters.</p> </li> </ul> <p>If the caller doesn't have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's <code>cause</code> parameter is set to <code>OPERATION_NOT_PERMITTED</code>. For details and example IAM policies, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public recordActivityTaskHeartbeat(
args: RecordActivityTaskHeartbeatInput
): Promise<RecordActivityTaskHeartbeatOutput>;
public recordActivityTaskHeartbeat(
args: RecordActivityTaskHeartbeatInput,
cb: (err: any, data?: RecordActivityTaskHeartbeatOutput) => void
): void;
public recordActivityTaskHeartbeat(
args: RecordActivityTaskHeartbeatInput,
cb?: (err: any, data?: RecordActivityTaskHeartbeatOutput) => void
): Promise<RecordActivityTaskHeartbeatOutput> | void {
// create the appropriate command and pass it to .send
const command = new RecordActivityTaskHeartbeatCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Registers a new <i>activity type</i> along with its configuration settings in the specified domain.</p> <important> <p>A <code>TypeAlreadyExists</code> fault is returned if the type already exists in the domain. You cannot change any configuration settings of the type after its registration, and it must be registered as a new version.</p> </important> <p> <b>Access Control</b> </p> <p>You can use IAM policies to control this action's access to Amazon SWF resources as follows:</p> <ul> <li> <p>Use a <code>Resource</code> element with the domain name to limit the action to only specified domains.</p> </li> <li> <p>Use an <code>Action</code> element to allow or deny permission to call this action.</p> </li> <li> <p>Constrain the following parameters by using a <code>Condition</code> element with the appropriate keys.</p> <ul> <li> <p> <code>defaultTaskList.name</code>: String constraint. The key is <code>swf:defaultTaskList.name</code>.</p> </li> <li> <p> <code>name</code>: String constraint. The key is <code>swf:name</code>.</p> </li> <li> <p> <code>version</code>: String constraint. The key is <code>swf:version</code>.</p> </li> </ul> </li> </ul> <p>If the caller doesn't have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's <code>cause</code> parameter is set to <code>OPERATION_NOT_PERMITTED</code>. For details and example IAM policies, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {TypeAlreadyExistsFault} <p>Returned if the type already exists in the specified domain. You may get this fault if you are registering a type that is either already registered or deprecated, or if you undeprecate a type that is currently registered.</p>
* - {LimitExceededFault} <p>Returned by any operation if a system imposed limitation has been reached. To address this fault you should either clean up unused resources or increase the limit by contacting AWS.</p>
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public registerActivityType(
args: RegisterActivityTypeInput
): Promise<RegisterActivityTypeOutput>;
public registerActivityType(
args: RegisterActivityTypeInput,
cb: (err: any, data?: RegisterActivityTypeOutput) => void
): void;
public registerActivityType(
args: RegisterActivityTypeInput,
cb?: (err: any, data?: RegisterActivityTypeOutput) => void
): Promise<RegisterActivityTypeOutput> | void {
// create the appropriate command and pass it to .send
const command = new RegisterActivityTypeCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Registers a new domain.</p> <p> <b>Access Control</b> </p> <p>You can use IAM policies to control this action's access to Amazon SWF resources as follows:</p> <ul> <li> <p>You cannot use an IAM policy to control domain access for this action. The name of the domain being registered is available as the resource of this action.</p> </li> <li> <p>Use an <code>Action</code> element to allow or deny permission to call this action.</p> </li> <li> <p>You cannot use an IAM policy to constrain this action's parameters.</p> </li> </ul> <p>If the caller doesn't have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's <code>cause</code> parameter is set to <code>OPERATION_NOT_PERMITTED</code>. For details and example IAM policies, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {DomainAlreadyExistsFault} <p>Returned if the domain already exists. You may get this fault if you are registering a domain that is either already registered or deprecated, or if you undeprecate a domain that is currently registered.</p>
* - {LimitExceededFault} <p>Returned by any operation if a system imposed limitation has been reached. To address this fault you should either clean up unused resources or increase the limit by contacting AWS.</p>
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {TooManyTagsFault} <p>You've exceeded the number of tags allowed for a domain.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public registerDomain(
args: RegisterDomainInput
): Promise<RegisterDomainOutput>;
public registerDomain(
args: RegisterDomainInput,
cb: (err: any, data?: RegisterDomainOutput) => void
): void;
public registerDomain(
args: RegisterDomainInput,
cb?: (err: any, data?: RegisterDomainOutput) => void
): Promise<RegisterDomainOutput> | void {
// create the appropriate command and pass it to .send
const command = new RegisterDomainCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Registers a new <i>workflow type</i> and its configuration settings in the specified domain.</p> <p>The retention period for the workflow history is set by the <a>RegisterDomain</a> action.</p> <important> <p>If the type already exists, then a <code>TypeAlreadyExists</code> fault is returned. You cannot change the configuration settings of a workflow type once it is registered and it must be registered as a new version.</p> </important> <p> <b>Access Control</b> </p> <p>You can use IAM policies to control this action's access to Amazon SWF resources as follows:</p> <ul> <li> <p>Use a <code>Resource</code> element with the domain name to limit the action to only specified domains.</p> </li> <li> <p>Use an <code>Action</code> element to allow or deny permission to call this action.</p> </li> <li> <p>Constrain the following parameters by using a <code>Condition</code> element with the appropriate keys.</p> <ul> <li> <p> <code>defaultTaskList.name</code>: String constraint. The key is <code>swf:defaultTaskList.name</code>.</p> </li> <li> <p> <code>name</code>: String constraint. The key is <code>swf:name</code>.</p> </li> <li> <p> <code>version</code>: String constraint. The key is <code>swf:version</code>.</p> </li> </ul> </li> </ul> <p>If the caller doesn't have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's <code>cause</code> parameter is set to <code>OPERATION_NOT_PERMITTED</code>. For details and example IAM policies, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {TypeAlreadyExistsFault} <p>Returned if the type already exists in the specified domain. You may get this fault if you are registering a type that is either already registered or deprecated, or if you undeprecate a type that is currently registered.</p>
* - {LimitExceededFault} <p>Returned by any operation if a system imposed limitation has been reached. To address this fault you should either clean up unused resources or increase the limit by contacting AWS.</p>
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public registerWorkflowType(
args: RegisterWorkflowTypeInput
): Promise<RegisterWorkflowTypeOutput>;
public registerWorkflowType(
args: RegisterWorkflowTypeInput,
cb: (err: any, data?: RegisterWorkflowTypeOutput) => void
): void;
public registerWorkflowType(
args: RegisterWorkflowTypeInput,
cb?: (err: any, data?: RegisterWorkflowTypeOutput) => void
): Promise<RegisterWorkflowTypeOutput> | void {
// create the appropriate command and pass it to .send
const command = new RegisterWorkflowTypeCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Records a <code>WorkflowExecutionCancelRequested</code> event in the currently running workflow execution identified by the given domain, workflowId, and runId. This logically requests the cancellation of the workflow execution as a whole. It is up to the decider to take appropriate actions when it receives an execution history with this event.</p> <note> <p>If the runId isn't specified, the <code>WorkflowExecutionCancelRequested</code> event is recorded in the history of the current open workflow execution with the specified workflowId in the domain.</p> </note> <note> <p>Because this action allows the workflow to properly clean up and gracefully close, it should be used instead of <a>TerminateWorkflowExecution</a> when possible.</p> </note> <p> <b>Access Control</b> </p> <p>You can use IAM policies to control this action's access to Amazon SWF resources as follows:</p> <ul> <li> <p>Use a <code>Resource</code> element with the domain name to limit the action to only specified domains.</p> </li> <li> <p>Use an <code>Action</code> element to allow or deny permission to call this action.</p> </li> <li> <p>You cannot use an IAM policy to constrain this action's parameters.</p> </li> </ul> <p>If the caller doesn't have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's <code>cause</code> parameter is set to <code>OPERATION_NOT_PERMITTED</code>. For details and example IAM policies, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public requestCancelWorkflowExecution(
args: RequestCancelWorkflowExecutionInput
): Promise<RequestCancelWorkflowExecutionOutput>;
public requestCancelWorkflowExecution(
args: RequestCancelWorkflowExecutionInput,
cb: (err: any, data?: RequestCancelWorkflowExecutionOutput) => void
): void;
public requestCancelWorkflowExecution(
args: RequestCancelWorkflowExecutionInput,
cb?: (err: any, data?: RequestCancelWorkflowExecutionOutput) => void
): Promise<RequestCancelWorkflowExecutionOutput> | void {
// create the appropriate command and pass it to .send
const command = new RequestCancelWorkflowExecutionCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Used by workers to tell the service that the <a>ActivityTask</a> identified by the <code>taskToken</code> was successfully canceled. Additional <code>details</code> can be provided using the <code>details</code> argument.</p> <p>These <code>details</code> (if provided) appear in the <code>ActivityTaskCanceled</code> event added to the workflow history.</p> <important> <p>Only use this operation if the <code>canceled</code> flag of a <a>RecordActivityTaskHeartbeat</a> request returns <code>true</code> and if the activity can be safely undone or abandoned.</p> </important> <p>A task is considered open from the time that it is scheduled until it is closed. Therefore a task is reported as open while a worker is processing it. A task is closed after it has been specified in a call to <a>RespondActivityTaskCompleted</a>, RespondActivityTaskCanceled, <a>RespondActivityTaskFailed</a>, or the task has <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dg-basic.html#swf-dev-timeout-types">timed out</a>.</p> <p> <b>Access Control</b> </p> <p>You can use IAM policies to control this action's access to Amazon SWF resources as follows:</p> <ul> <li> <p>Use a <code>Resource</code> element with the domain name to limit the action to only specified domains.</p> </li> <li> <p>Use an <code>Action</code> element to allow or deny permission to call this action.</p> </li> <li> <p>You cannot use an IAM policy to constrain this action's parameters.</p> </li> </ul> <p>If the caller doesn't have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's <code>cause</code> parameter is set to <code>OPERATION_NOT_PERMITTED</code>. For details and example IAM policies, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public respondActivityTaskCanceled(
args: RespondActivityTaskCanceledInput
): Promise<RespondActivityTaskCanceledOutput>;
public respondActivityTaskCanceled(
args: RespondActivityTaskCanceledInput,
cb: (err: any, data?: RespondActivityTaskCanceledOutput) => void
): void;
public respondActivityTaskCanceled(
args: RespondActivityTaskCanceledInput,
cb?: (err: any, data?: RespondActivityTaskCanceledOutput) => void
): Promise<RespondActivityTaskCanceledOutput> | void {
// create the appropriate command and pass it to .send
const command = new RespondActivityTaskCanceledCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Used by workers to tell the service that the <a>ActivityTask</a> identified by the <code>taskToken</code> completed successfully with a <code>result</code> (if provided). The <code>result</code> appears in the <code>ActivityTaskCompleted</code> event in the workflow history.</p> <important> <p>If the requested task doesn't complete successfully, use <a>RespondActivityTaskFailed</a> instead. If the worker finds that the task is canceled through the <code>canceled</code> flag returned by <a>RecordActivityTaskHeartbeat</a>, it should cancel the task, clean up and then call <a>RespondActivityTaskCanceled</a>.</p> </important> <p>A task is considered open from the time that it is scheduled until it is closed. Therefore a task is reported as open while a worker is processing it. A task is closed after it has been specified in a call to RespondActivityTaskCompleted, <a>RespondActivityTaskCanceled</a>, <a>RespondActivityTaskFailed</a>, or the task has <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dg-basic.html#swf-dev-timeout-types">timed out</a>.</p> <p> <b>Access Control</b> </p> <p>You can use IAM policies to control this action's access to Amazon SWF resources as follows:</p> <ul> <li> <p>Use a <code>Resource</code> element with the domain name to limit the action to only specified domains.</p> </li> <li> <p>Use an <code>Action</code> element to allow or deny permission to call this action.</p> </li> <li> <p>You cannot use an IAM policy to constrain this action's parameters.</p> </li> </ul> <p>If the caller doesn't have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's <code>cause</code> parameter is set to <code>OPERATION_NOT_PERMITTED</code>. For details and example IAM policies, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public respondActivityTaskCompleted(
args: RespondActivityTaskCompletedInput
): Promise<RespondActivityTaskCompletedOutput>;
public respondActivityTaskCompleted(
args: RespondActivityTaskCompletedInput,
cb: (err: any, data?: RespondActivityTaskCompletedOutput) => void
): void;
public respondActivityTaskCompleted(
args: RespondActivityTaskCompletedInput,
cb?: (err: any, data?: RespondActivityTaskCompletedOutput) => void
): Promise<RespondActivityTaskCompletedOutput> | void {
// create the appropriate command and pass it to .send
const command = new RespondActivityTaskCompletedCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Used by workers to tell the service that the <a>ActivityTask</a> identified by the <code>taskToken</code> has failed with <code>reason</code> (if specified). The <code>reason</code> and <code>details</code> appear in the <code>ActivityTaskFailed</code> event added to the workflow history.</p> <p>A task is considered open from the time that it is scheduled until it is closed. Therefore a task is reported as open while a worker is processing it. A task is closed after it has been specified in a call to <a>RespondActivityTaskCompleted</a>, <a>RespondActivityTaskCanceled</a>, RespondActivityTaskFailed, or the task has <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dg-basic.html#swf-dev-timeout-types">timed out</a>.</p> <p> <b>Access Control</b> </p> <p>You can use IAM policies to control this action's access to Amazon SWF resources as follows:</p> <ul> <li> <p>Use a <code>Resource</code> element with the domain name to limit the action to only specified domains.</p> </li> <li> <p>Use an <code>Action</code> element to allow or deny permission to call this action.</p> </li> <li> <p>You cannot use an IAM policy to constrain this action's parameters.</p> </li> </ul> <p>If the caller doesn't have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's <code>cause</code> parameter is set to <code>OPERATION_NOT_PERMITTED</code>. For details and example IAM policies, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public respondActivityTaskFailed(
args: RespondActivityTaskFailedInput
): Promise<RespondActivityTaskFailedOutput>;
public respondActivityTaskFailed(
args: RespondActivityTaskFailedInput,
cb: (err: any, data?: RespondActivityTaskFailedOutput) => void
): void;
public respondActivityTaskFailed(
args: RespondActivityTaskFailedInput,
cb?: (err: any, data?: RespondActivityTaskFailedOutput) => void
): Promise<RespondActivityTaskFailedOutput> | void {
// create the appropriate command and pass it to .send
const command = new RespondActivityTaskFailedCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Used by deciders to tell the service that the <a>DecisionTask</a> identified by the <code>taskToken</code> has successfully completed. The <code>decisions</code> argument specifies the list of decisions made while processing the task.</p> <p>A <code>DecisionTaskCompleted</code> event is added to the workflow history. The <code>executionContext</code> specified is attached to the event in the workflow execution history.</p> <p> <b>Access Control</b> </p> <p>If an IAM policy grants permission to use <code>RespondDecisionTaskCompleted</code>, it can express permissions for the list of decisions in the <code>decisions</code> parameter. Each of the decisions has one or more parameters, much like a regular API call. To allow for policies to be as readable as possible, you can express permissions on decisions as if they were actual API calls, including applying conditions to some parameters. For more information, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public respondDecisionTaskCompleted(
args: RespondDecisionTaskCompletedInput
): Promise<RespondDecisionTaskCompletedOutput>;
public respondDecisionTaskCompleted(
args: RespondDecisionTaskCompletedInput,
cb: (err: any, data?: RespondDecisionTaskCompletedOutput) => void
): void;
public respondDecisionTaskCompleted(
args: RespondDecisionTaskCompletedInput,
cb?: (err: any, data?: RespondDecisionTaskCompletedOutput) => void
): Promise<RespondDecisionTaskCompletedOutput> | void {
// create the appropriate command and pass it to .send
const command = new RespondDecisionTaskCompletedCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Records a <code>WorkflowExecutionSignaled</code> event in the workflow execution history and creates a decision task for the workflow execution identified by the given domain, workflowId and runId. The event is recorded with the specified user defined signalName and input (if provided).</p> <note> <p>If a runId isn't specified, then the <code>WorkflowExecutionSignaled</code> event is recorded in the history of the current open workflow with the matching workflowId in the domain.</p> </note> <note> <p>If the specified workflow execution isn't open, this method fails with <code>UnknownResource</code>.</p> </note> <p> <b>Access Control</b> </p> <p>You can use IAM policies to control this action's access to Amazon SWF resources as follows:</p> <ul> <li> <p>Use a <code>Resource</code> element with the domain name to limit the action to only specified domains.</p> </li> <li> <p>Use an <code>Action</code> element to allow or deny permission to call this action.</p> </li> <li> <p>You cannot use an IAM policy to constrain this action's parameters.</p> </li> </ul> <p>If the caller doesn't have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's <code>cause</code> parameter is set to <code>OPERATION_NOT_PERMITTED</code>. For details and example IAM policies, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public signalWorkflowExecution(
args: SignalWorkflowExecutionInput
): Promise<SignalWorkflowExecutionOutput>;
public signalWorkflowExecution(
args: SignalWorkflowExecutionInput,
cb: (err: any, data?: SignalWorkflowExecutionOutput) => void
): void;
public signalWorkflowExecution(
args: SignalWorkflowExecutionInput,
cb?: (err: any, data?: SignalWorkflowExecutionOutput) => void
): Promise<SignalWorkflowExecutionOutput> | void {
// create the appropriate command and pass it to .send
const command = new SignalWorkflowExecutionCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Starts an execution of the workflow type in the specified domain using the provided <code>workflowId</code> and input data.</p> <p>This action returns the newly started workflow execution.</p> <p> <b>Access Control</b> </p> <p>You can use IAM policies to control this action's access to Amazon SWF resources as follows:</p> <ul> <li> <p>Use a <code>Resource</code> element with the domain name to limit the action to only specified domains.</p> </li> <li> <p>Use an <code>Action</code> element to allow or deny permission to call this action.</p> </li> <li> <p>Constrain the following parameters by using a <code>Condition</code> element with the appropriate keys.</p> <ul> <li> <p> <code>tagList.member.0</code>: The key is <code>swf:tagList.member.0</code>.</p> </li> <li> <p> <code>tagList.member.1</code>: The key is <code>swf:tagList.member.1</code>.</p> </li> <li> <p> <code>tagList.member.2</code>: The key is <code>swf:tagList.member.2</code>.</p> </li> <li> <p> <code>tagList.member.3</code>: The key is <code>swf:tagList.member.3</code>.</p> </li> <li> <p> <code>tagList.member.4</code>: The key is <code>swf:tagList.member.4</code>.</p> </li> <li> <p> <code>taskList</code>: String constraint. The key is <code>swf:taskList.name</code>.</p> </li> <li> <p> <code>workflowType.name</code>: String constraint. The key is <code>swf:workflowType.name</code>.</p> </li> <li> <p> <code>workflowType.version</code>: String constraint. The key is <code>swf:workflowType.version</code>.</p> </li> </ul> </li> </ul> <p>If the caller doesn't have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's <code>cause</code> parameter is set to <code>OPERATION_NOT_PERMITTED</code>. For details and example IAM policies, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {TypeDeprecatedFault} <p>Returned when the specified activity or workflow type was already deprecated.</p>
* - {WorkflowExecutionAlreadyStartedFault} <p>Returned by <a>StartWorkflowExecution</a> when an open execution with the same workflowId is already running in the specified domain.</p>
* - {LimitExceededFault} <p>Returned by any operation if a system imposed limitation has been reached. To address this fault you should either clean up unused resources or increase the limit by contacting AWS.</p>
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {DefaultUndefinedFault} <p>The <code>StartWorkflowExecution</code> API action was called without the required parameters set.</p> <p>Some workflow execution parameters, such as the decision <code>taskList</code>, must be set to start the execution. However, these parameters might have been set as defaults when the workflow type was registered. In this case, you can omit these parameters from the <code>StartWorkflowExecution</code> call and Amazon SWF uses the values defined in the workflow type.</p> <note> <p>If these parameters aren't set and no default parameters were defined in the workflow type, this error is displayed.</p> </note>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public startWorkflowExecution(
args: StartWorkflowExecutionInput
): Promise<StartWorkflowExecutionOutput>;
public startWorkflowExecution(
args: StartWorkflowExecutionInput,
cb: (err: any, data?: StartWorkflowExecutionOutput) => void
): void;
public startWorkflowExecution(
args: StartWorkflowExecutionInput,
cb?: (err: any, data?: StartWorkflowExecutionOutput) => void
): Promise<StartWorkflowExecutionOutput> | void {
// create the appropriate command and pass it to .send
const command = new StartWorkflowExecutionCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Add a tag to a Amazon SWF domain.</p> <note> <p>Amazon SWF supports a maximum of 50 tags per resource.</p> </note>
*
* This operation may fail with one of the following errors:
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {TooManyTagsFault} <p>You've exceeded the number of tags allowed for a domain.</p>
* - {LimitExceededFault} <p>Returned by any operation if a system imposed limitation has been reached. To address this fault you should either clean up unused resources or increase the limit by contacting AWS.</p>
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public tagResource(args: TagResourceInput): Promise<TagResourceOutput>;
public tagResource(
args: TagResourceInput,
cb: (err: any, data?: TagResourceOutput) => void
): void;
public tagResource(
args: TagResourceInput,
cb?: (err: any, data?: TagResourceOutput) => void
): Promise<TagResourceOutput> | void {
// create the appropriate command and pass it to .send
const command = new TagResourceCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Records a <code>WorkflowExecutionTerminated</code> event and forces closure of the workflow execution identified by the given domain, runId, and workflowId. The child policy, registered with the workflow type or specified when starting this execution, is applied to any open child workflow executions of this workflow execution.</p> <important> <p>If the identified workflow execution was in progress, it is terminated immediately.</p> </important> <note> <p>If a runId isn't specified, then the <code>WorkflowExecutionTerminated</code> event is recorded in the history of the current open workflow with the matching workflowId in the domain.</p> </note> <note> <p>You should consider using <a>RequestCancelWorkflowExecution</a> action instead because it allows the workflow to gracefully close while <a>TerminateWorkflowExecution</a> doesn't.</p> </note> <p> <b>Access Control</b> </p> <p>You can use IAM policies to control this action's access to Amazon SWF resources as follows:</p> <ul> <li> <p>Use a <code>Resource</code> element with the domain name to limit the action to only specified domains.</p> </li> <li> <p>Use an <code>Action</code> element to allow or deny permission to call this action.</p> </li> <li> <p>You cannot use an IAM policy to constrain this action's parameters.</p> </li> </ul> <p>If the caller doesn't have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's <code>cause</code> parameter is set to <code>OPERATION_NOT_PERMITTED</code>. For details and example IAM policies, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public terminateWorkflowExecution(
args: TerminateWorkflowExecutionInput
): Promise<TerminateWorkflowExecutionOutput>;
public terminateWorkflowExecution(
args: TerminateWorkflowExecutionInput,
cb: (err: any, data?: TerminateWorkflowExecutionOutput) => void
): void;
public terminateWorkflowExecution(
args: TerminateWorkflowExecutionInput,
cb?: (err: any, data?: TerminateWorkflowExecutionOutput) => void
): Promise<TerminateWorkflowExecutionOutput> | void {
// create the appropriate command and pass it to .send
const command = new TerminateWorkflowExecutionCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Undeprecates a previously deprecated <i>activity type</i>. After an activity type has been undeprecated, you can create new tasks of that activity type.</p> <note> <p>This operation is eventually consistent. The results are best effort and may not exactly reflect recent updates and changes.</p> </note> <p> <b>Access Control</b> </p> <p>You can use IAM policies to control this action's access to Amazon SWF resources as follows:</p> <ul> <li> <p>Use a <code>Resource</code> element with the domain name to limit the action to only specified domains.</p> </li> <li> <p>Use an <code>Action</code> element to allow or deny permission to call this action.</p> </li> <li> <p>Constrain the following parameters by using a <code>Condition</code> element with the appropriate keys.</p> <ul> <li> <p> <code>activityType.name</code>: String constraint. The key is <code>swf:activityType.name</code>.</p> </li> <li> <p> <code>activityType.version</code>: String constraint. The key is <code>swf:activityType.version</code>.</p> </li> </ul> </li> </ul> <p>If the caller doesn't have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's <code>cause</code> parameter is set to <code>OPERATION_NOT_PERMITTED</code>. For details and example IAM policies, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {TypeAlreadyExistsFault} <p>Returned if the type already exists in the specified domain. You may get this fault if you are registering a type that is either already registered or deprecated, or if you undeprecate a type that is currently registered.</p>
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public undeprecateActivityType(
args: UndeprecateActivityTypeInput
): Promise<UndeprecateActivityTypeOutput>;
public undeprecateActivityType(
args: UndeprecateActivityTypeInput,
cb: (err: any, data?: UndeprecateActivityTypeOutput) => void
): void;
public undeprecateActivityType(
args: UndeprecateActivityTypeInput,
cb?: (err: any, data?: UndeprecateActivityTypeOutput) => void
): Promise<UndeprecateActivityTypeOutput> | void {
// create the appropriate command and pass it to .send
const command = new UndeprecateActivityTypeCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Undeprecates a previously deprecated domain. After a domain has been undeprecated it can be used to create new workflow executions or register new types.</p> <note> <p>This operation is eventually consistent. The results are best effort and may not exactly reflect recent updates and changes.</p> </note> <p> <b>Access Control</b> </p> <p>You can use IAM policies to control this action's access to Amazon SWF resources as follows:</p> <ul> <li> <p>Use a <code>Resource</code> element with the domain name to limit the action to only specified domains.</p> </li> <li> <p>Use an <code>Action</code> element to allow or deny permission to call this action.</p> </li> <li> <p>You cannot use an IAM policy to constrain this action's parameters.</p> </li> </ul> <p>If the caller doesn't have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's <code>cause</code> parameter is set to <code>OPERATION_NOT_PERMITTED</code>. For details and example IAM policies, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {DomainAlreadyExistsFault} <p>Returned if the domain already exists. You may get this fault if you are registering a domain that is either already registered or deprecated, or if you undeprecate a domain that is currently registered.</p>
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public undeprecateDomain(
args: UndeprecateDomainInput
): Promise<UndeprecateDomainOutput>;
public undeprecateDomain(
args: UndeprecateDomainInput,
cb: (err: any, data?: UndeprecateDomainOutput) => void
): void;
public undeprecateDomain(
args: UndeprecateDomainInput,
cb?: (err: any, data?: UndeprecateDomainOutput) => void
): Promise<UndeprecateDomainOutput> | void {
// create the appropriate command and pass it to .send
const command = new UndeprecateDomainCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Undeprecates a previously deprecated <i>workflow type</i>. After a workflow type has been undeprecated, you can create new executions of that type. </p> <note> <p>This operation is eventually consistent. The results are best effort and may not exactly reflect recent updates and changes.</p> </note> <p> <b>Access Control</b> </p> <p>You can use IAM policies to control this action's access to Amazon SWF resources as follows:</p> <ul> <li> <p>Use a <code>Resource</code> element with the domain name to limit the action to only specified domains.</p> </li> <li> <p>Use an <code>Action</code> element to allow or deny permission to call this action.</p> </li> <li> <p>Constrain the following parameters by using a <code>Condition</code> element with the appropriate keys.</p> <ul> <li> <p> <code>workflowType.name</code>: String constraint. The key is <code>swf:workflowType.name</code>.</p> </li> <li> <p> <code>workflowType.version</code>: String constraint. The key is <code>swf:workflowType.version</code>.</p> </li> </ul> </li> </ul> <p>If the caller doesn't have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's <code>cause</code> parameter is set to <code>OPERATION_NOT_PERMITTED</code>. For details and example IAM policies, see <a href="https://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to Amazon SWF Workflows</a> in the <i>Amazon SWF Developer Guide</i>.</p>
*
* This operation may fail with one of the following errors:
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {TypeAlreadyExistsFault} <p>Returned if the type already exists in the specified domain. You may get this fault if you are registering a type that is either already registered or deprecated, or if you undeprecate a type that is currently registered.</p>
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public undeprecateWorkflowType(
args: UndeprecateWorkflowTypeInput
): Promise<UndeprecateWorkflowTypeOutput>;
public undeprecateWorkflowType(
args: UndeprecateWorkflowTypeInput,
cb: (err: any, data?: UndeprecateWorkflowTypeOutput) => void
): void;
public undeprecateWorkflowType(
args: UndeprecateWorkflowTypeInput,
cb?: (err: any, data?: UndeprecateWorkflowTypeOutput) => void
): Promise<UndeprecateWorkflowTypeOutput> | void {
// create the appropriate command and pass it to .send
const command = new UndeprecateWorkflowTypeCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
/**
* <p>Remove a tag from a Amazon SWF domain.</p>
*
* This operation may fail with one of the following errors:
* - {UnknownResourceFault} <p>Returned when the named resource cannot be found with in the scope of this operation (region or domain). This could happen if the named resource was never created or is no longer available for this operation.</p>
* - {LimitExceededFault} <p>Returned by any operation if a system imposed limitation has been reached. To address this fault you should either clean up unused resources or increase the limit by contacting AWS.</p>
* - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {Error} An error originating from the SDK or customizations rather than the service
*/
public untagResource(args: UntagResourceInput): Promise<UntagResourceOutput>;
public untagResource(
args: UntagResourceInput,
cb: (err: any, data?: UntagResourceOutput) => void
): void;
public untagResource(
args: UntagResourceInput,
cb?: (err: any, data?: UntagResourceOutput) => void
): Promise<UntagResourceOutput> | void {
// create the appropriate command and pass it to .send
const command = new UntagResourceCommand(args);
if (typeof cb === "function") {
this.send(command, cb);
} else {
return this.send(command);
}
}
} | * - {OperationNotPermittedFault} <p>Returned when the caller doesn't have sufficient permissions to invoke the action.</p>
* - {Error} An error originating from the SDK or customizations rather than the service |
anchor_detector.py | from .aaa_util import eval_results, get_summary, convert_df
class AnchorDetector:
def __init__(self, offline):
|
def initialize(self, seq_info):
self.seq_info = seq_info
self.previous_offline = None
def fixed_detect(self, frame_idx, duration):
feedback_length = duration
if (frame_idx + 1) % duration == 0:
is_anchor, feedback = (
True,
self._get_feedback(frame_idx - duration + 1, frame_idx),
)
else:
is_anchor, feedback = False, None
return is_anchor, feedback, feedback_length
def stable_detect(self, seq_info, frame_idx, duration, threshold):
if frame_idx + 1 > duration:
current_offline = self._get_feedback(frame_idx - duration + 1, frame_idx)
if self.previous_offline is not None and current_offline is not None:
overlap_previous = self.previous_offline[
self.previous_offline[:, 0] > 1
]
overlap_previous[:, 0] -= 1
overlap_previous = convert_df(overlap_previous, is_offline=True)
overlap_current = current_offline[current_offline[:, 0] < duration]
overlap_current = convert_df(overlap_current, is_offline=True)
feedback_length = duration
else:
current_offline = self._get_feedback(0, frame_idx)
if self.previous_offline is not None and current_offline is not None:
overlap_previous = convert_df(self.previous_offline, is_offline=True)
overlap_current = current_offline[current_offline[:, 0] <= frame_idx]
overlap_current = convert_df(overlap_current, is_offline=True)
feedback_length = frame_idx + 1
if self.previous_offline is not None and current_offline is not None:
prev_acc, prev_ana, _ = eval_results(
seq_info, overlap_previous, overlap_current
)
prev_sum = get_summary(prev_acc, prev_ana)
curr_acc, curr_ana, _ = eval_results(
seq_info, overlap_current, overlap_previous
)
curr_sum = get_summary(curr_acc, curr_ana)
mean_mota = (prev_sum[3] + curr_sum[3]) / 2
if mean_mota >= threshold:
is_anchor = True
feedback = current_offline
else:
is_anchor = False
feedback = None
# print(f"Frame {frame_idx}, MOTA {mean_mota}")
else:
is_anchor = False
feedback = None
self.previous_offline = current_offline
return is_anchor, feedback, feedback_length
def _get_feedback(self, start_frame, end_frame):
try:
feedback = self.offline.track(start_frame, end_frame)
except (RuntimeError, ValueError):
feedback = None
return feedback
| self.offline = offline |
registry.rs | use std::env;
use std::fs::{self, File};
use std::iter::repeat;
use std::time::Duration;
use curl::easy::{Easy, SslOpt};
use git2;
use registry::{Registry, NewCrate, NewCrateDependency};
use url::percent_encoding::{percent_encode, QUERY_ENCODE_SET};
use version;
use core::source::Source;
use core::{Package, SourceId, Workspace};
use core::dependency::Kind;
use core::manifest::ManifestMetadata;
use ops;
use sources::{RegistrySource};
use util::config::{self, Config};
use util::paths;
use util::ToUrl;
use util::errors::{CargoError, CargoResult, CargoResultExt};
use util::important_paths::find_root_manifest_for_wd;
pub struct RegistryConfig {
pub index: Option<String>,
pub token: Option<String>,
}
pub struct PublishOpts<'cfg> {
pub config: &'cfg Config,
pub token: Option<String>,
pub index: Option<String>,
pub verify: bool,
pub allow_dirty: bool,
pub jobs: Option<u32>,
pub dry_run: bool,
}
pub fn publish(ws: &Workspace, opts: &PublishOpts) -> CargoResult<()> {
let pkg = ws.current()?;
if !pkg.publish() {
bail!("some crates cannot be published.\n\
`{}` is marked as unpublishable", pkg.name());
}
if pkg.manifest().patch().len() > 0 {
bail!("published crates cannot contain [patch] sections");
}
let (mut registry, reg_id) = registry(opts.config,
opts.token.clone(),
opts.index.clone())?;
verify_dependencies(pkg, ®_id)?;
// Prepare a tarball, with a non-surpressable warning if metadata
// is missing since this is being put online.
let tarball = ops::package(ws, &ops::PackageOpts {
config: opts.config,
verify: opts.verify,
list: false,
check_metadata: true,
allow_dirty: opts.allow_dirty,
jobs: opts.jobs,
})?.unwrap();
// Upload said tarball to the specified destination
opts.config.shell().status("Uploading", pkg.package_id().to_string())?;
transmit(opts.config, pkg, tarball.file(), &mut registry, opts.dry_run)?;
Ok(())
}
fn verify_dependencies(pkg: &Package, registry_src: &SourceId)
-> CargoResult<()> {
for dep in pkg.dependencies().iter() {
if dep.source_id().is_path() {
if !dep.specified_req() {
bail!("all path dependencies must have a version specified \
when publishing.\ndependency `{}` does not specify \
a version", dep.name())
}
} else if dep.source_id() != registry_src {
bail!("crates cannot be published to crates.io with dependencies sourced from \
a repository\neither publish `{}` as its own crate on crates.io and \
specify a crates.io version as a dependency or pull it into this \
repository and specify it with a path and version\n(crate `{}` has \
repository path `{}`)", dep.name(), dep.name(), dep.source_id());
}
}
Ok(())
}
fn transmit(config: &Config,
pkg: &Package,
tarball: &File, | NewCrateDependency {
optional: dep.is_optional(),
default_features: dep.uses_default_features(),
name: dep.name().to_string(),
features: dep.features().to_vec(),
version_req: dep.version_req().to_string(),
target: dep.platform().map(|s| s.to_string()),
kind: match dep.kind() {
Kind::Normal => "normal",
Kind::Build => "build",
Kind::Development => "dev",
}.to_string(),
}
}).collect::<Vec<NewCrateDependency>>();
let manifest = pkg.manifest();
let ManifestMetadata {
ref authors, ref description, ref homepage, ref documentation,
ref keywords, ref readme, ref repository, ref license, ref license_file,
ref categories, ref badges,
} = *manifest.metadata();
let readme = match *readme {
Some(ref readme) => Some(paths::read(&pkg.root().join(readme))?),
None => None,
};
if let Some(ref file) = *license_file {
if fs::metadata(&pkg.root().join(file)).is_err() {
bail!("the license file `{}` does not exist", file)
}
}
// Do not upload if performing a dry run
if dry_run {
config.shell().warn("aborting upload due to dry run")?;
return Ok(());
}
let publish = registry.publish(&NewCrate {
name: pkg.name().to_string(),
vers: pkg.version().to_string(),
deps: deps,
features: pkg.summary().features().clone(),
authors: authors.clone(),
description: description.clone(),
homepage: homepage.clone(),
documentation: documentation.clone(),
keywords: keywords.clone(),
categories: categories.clone(),
readme: readme,
repository: repository.clone(),
license: license.clone(),
license_file: license_file.clone(),
badges: badges.clone(),
}, tarball);
match publish {
Ok(warnings) => {
if !warnings.invalid_categories.is_empty() {
let msg = format!("\
the following are not valid category slugs and were \
ignored: {}. Please see https://crates.io/category_slugs \
for the list of all category slugs. \
", warnings.invalid_categories.join(", "));
config.shell().warn(&msg)?;
}
if !warnings.invalid_badges.is_empty() {
let msg = format!("\
the following are not valid badges and were ignored: {}. \
Either the badge type specified is unknown or a required \
attribute is missing. Please see \
http://doc.crates.io/manifest.html#package-metadata \
for valid badge types and their required attributes.",
warnings.invalid_badges.join(", "));
config.shell().warn(&msg)?;
}
Ok(())
},
Err(e) => Err(e.into()),
}
}
pub fn registry_configuration(config: &Config) -> CargoResult<RegistryConfig> {
let index = config.get_string("registry.index")?.map(|p| p.val);
let token = config.get_string("registry.token")?.map(|p| p.val);
Ok(RegistryConfig { index: index, token: token })
}
pub fn registry(config: &Config,
token: Option<String>,
index: Option<String>) -> CargoResult<(Registry, SourceId)> {
// Parse all configuration options
let RegistryConfig {
token: token_config,
index: _index_config,
} = registry_configuration(config)?;
let token = token.or(token_config);
let sid = match index {
Some(index) => SourceId::for_registry(&index.to_url()?)?,
None => SourceId::crates_io(config)?,
};
let api_host = {
let mut src = RegistrySource::remote(&sid, config);
src.update().chain_err(|| {
format!("failed to update {}", sid)
})?;
(src.config()?).unwrap().api
};
let handle = http_handle(config)?;
Ok((Registry::new_handle(api_host, token, handle), sid))
}
/// Create a new HTTP handle with appropriate global configuration for cargo.
pub fn http_handle(config: &Config) -> CargoResult<Easy> {
if !config.network_allowed() {
bail!("attempting to make an HTTP request, but --frozen was \
specified")
}
// The timeout option for libcurl by default times out the entire transfer,
// but we probably don't want this. Instead we only set timeouts for the
// connect phase as well as a "low speed" timeout so if we don't receive
// many bytes in a large-ish period of time then we time out.
let mut handle = Easy::new();
handle.connect_timeout(Duration::new(30, 0))?;
handle.low_speed_limit(10 /* bytes per second */)?;
handle.low_speed_time(Duration::new(30, 0))?;
handle.useragent(&version().to_string())?;
if let Some(proxy) = http_proxy(config)? {
handle.proxy(&proxy)?;
}
if let Some(cainfo) = config.get_path("http.cainfo")? {
handle.cainfo(&cainfo.val)?;
}
if let Some(check) = config.get_bool("http.check-revoke")? {
handle.ssl_options(SslOpt::new().no_revoke(!check.val))?;
}
if let Some(timeout) = http_timeout(config)? {
handle.connect_timeout(Duration::new(timeout as u64, 0))?;
handle.low_speed_time(Duration::new(timeout as u64, 0))?;
}
Ok(handle)
}
/// Find an explicit HTTP proxy if one is available.
///
/// Favor cargo's `http.proxy`, then git's `http.proxy`. Proxies specified
/// via environment variables are picked up by libcurl.
fn http_proxy(config: &Config) -> CargoResult<Option<String>> {
if let Some(s) = config.get_string("http.proxy")? {
return Ok(Some(s.val))
}
if let Ok(cfg) = git2::Config::open_default() {
if let Ok(s) = cfg.get_str("http.proxy") {
return Ok(Some(s.to_string()))
}
}
Ok(None)
}
/// Determine if an http proxy exists.
///
/// Checks the following for existence, in order:
///
/// * cargo's `http.proxy`
/// * git's `http.proxy`
/// * http_proxy env var
/// * HTTP_PROXY env var
/// * https_proxy env var
/// * HTTPS_PROXY env var
pub fn http_proxy_exists(config: &Config) -> CargoResult<bool> {
if http_proxy(config)?.is_some() {
Ok(true)
} else {
Ok(["http_proxy", "HTTP_PROXY",
"https_proxy", "HTTPS_PROXY"].iter().any(|v| env::var(v).is_ok()))
}
}
pub fn http_timeout(config: &Config) -> CargoResult<Option<i64>> {
if let Some(s) = config.get_i64("http.timeout")? {
return Ok(Some(s.val))
}
Ok(env::var("HTTP_TIMEOUT").ok().and_then(|s| s.parse().ok()))
}
pub fn registry_login(config: &Config, token: String) -> CargoResult<()> {
let RegistryConfig { index: _, token: old_token } = registry_configuration(config)?;
if let Some(old_token) = old_token {
if old_token == token {
return Ok(());
}
}
config::save_credentials(config, token)
}
pub struct OwnersOptions {
pub krate: Option<String>,
pub token: Option<String>,
pub index: Option<String>,
pub to_add: Option<Vec<String>>,
pub to_remove: Option<Vec<String>>,
pub list: bool,
}
pub fn modify_owners(config: &Config, opts: &OwnersOptions) -> CargoResult<()> {
let name = match opts.krate {
Some(ref name) => name.clone(),
None => {
let manifest_path = find_root_manifest_for_wd(None, config.cwd())?;
let pkg = Package::for_path(&manifest_path, config)?;
pkg.name().to_string()
}
};
let (mut registry, _) = registry(config, opts.token.clone(),
opts.index.clone())?;
if let Some(ref v) = opts.to_add {
let v = v.iter().map(|s| &s[..]).collect::<Vec<_>>();
config.shell().status("Owner", format!("adding {:?} to crate {}",
v, name))?;
registry.add_owners(&name, &v).map_err(|e| {
CargoError::from(format!("failed to add owners to crate {}: {}", name, e))
})?;
}
if let Some(ref v) = opts.to_remove {
let v = v.iter().map(|s| &s[..]).collect::<Vec<_>>();
config.shell().status("Owner", format!("removing {:?} from crate {}",
v, name))?;
registry.remove_owners(&name, &v).map_err(|e| {
CargoError::from(format!("failed to remove owners from crate {}: {}", name, e))
})?;
}
if opts.list {
let owners = registry.list_owners(&name).map_err(|e| {
CargoError::from(format!("failed to list owners of crate {}: {}", name, e))
})?;
for owner in owners.iter() {
print!("{}", owner.login);
match (owner.name.as_ref(), owner.email.as_ref()) {
(Some(name), Some(email)) => println!(" ({} <{}>)", name, email),
(Some(s), None) |
(None, Some(s)) => println!(" ({})", s),
(None, None) => println!(""),
}
}
}
Ok(())
}
pub fn yank(config: &Config,
krate: Option<String>,
version: Option<String>,
token: Option<String>,
index: Option<String>,
undo: bool) -> CargoResult<()> {
let name = match krate {
Some(name) => name,
None => {
let manifest_path = find_root_manifest_for_wd(None, config.cwd())?;
let pkg = Package::for_path(&manifest_path, config)?;
pkg.name().to_string()
}
};
let version = match version {
Some(v) => v,
None => bail!("a version must be specified to yank")
};
let (mut registry, _) = registry(config, token, index)?;
if undo {
config.shell().status("Unyank", format!("{}:{}", name, version))?;
registry.unyank(&name, &version).map_err(|e| {
CargoError::from(format!("failed to undo a yank: {}", e))
})?;
} else {
config.shell().status("Yank", format!("{}:{}", name, version))?;
registry.yank(&name, &version).map_err(|e| {
CargoError::from(format!("failed to yank: {}", e))
})?;
}
Ok(())
}
pub fn search(query: &str,
config: &Config,
index: Option<String>,
limit: u8) -> CargoResult<()> {
fn truncate_with_ellipsis(s: &str, max_length: usize) -> String {
if s.len() < max_length {
s.to_string()
} else {
format!("{}…", &s[..max_length - 1])
}
}
let (mut registry, _) = registry(config, None, index)?;
let (crates, total_crates) = registry.search(query, limit).map_err(|e| {
CargoError::from(format!("failed to retrieve search results from the registry: {}", e))
})?;
let list_items = crates.iter()
.map(|krate| (
format!("{} = \"{}\"", krate.name, krate.max_version),
krate.description.as_ref().map(|desc|
truncate_with_ellipsis(&desc.replace("\n", " "), 128))
))
.collect::<Vec<_>>();
let description_margin = list_items.iter()
.map(|&(ref left, _)| left.len() + 4)
.max()
.unwrap_or(0);
for (name, description) in list_items.into_iter() {
let line = match description {
Some(desc) => {
let space = repeat(' ').take(description_margin - name.len())
.collect::<String>();
name + &space + "# " + &desc
}
None => name
};
println!("{}", line);
}
let search_max_limit = 100;
if total_crates > limit as u32 && limit < search_max_limit {
println!("... and {} crates more (use --limit N to see more)",
total_crates - limit as u32);
} else if total_crates > limit as u32 && limit >= search_max_limit {
println!("... and {} crates more (go to http://crates.io/search?q={} to see more)",
total_crates - limit as u32,
percent_encode(query.as_bytes(), QUERY_ENCODE_SET));
}
Ok(())
} | registry: &mut Registry,
dry_run: bool) -> CargoResult<()> {
let deps = pkg.dependencies().iter().map(|dep| { |
model_session.go | /*
* Ory Kratos API
*
* Documentation for all public and administrative Ory Kratos APIs. Public and administrative APIs are exposed on different ports. Public APIs can face the public internet without any protection while administrative APIs should never be exposed without prior authorization. To protect the administative API port you should use something like Nginx, Ory Oathkeeper, or any other technology capable of authorizing incoming requests.
*
* API version: v0.7.1-alpha.1
* Contact: [email protected]
*/
// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT.
package client
import (
"encoding/json"
"time"
)
// Session A Session
type Session struct {
// Whether or not the session is active.
Active *bool `json:"active,omitempty"`
// The Session Authentication Timestamp When this session was authenticated at.
AuthenticatedAt *time.Time `json:"authenticated_at,omitempty"`
// The Session Expiry When this session expires at.
ExpiresAt *time.Time `json:"expires_at,omitempty"`
Id string `json:"id"`
Identity Identity `json:"identity"`
// The Session Issuance Timestamp When this session was authenticated at.
IssuedAt *time.Time `json:"issued_at,omitempty"`
}
// NewSession instantiates a new Session object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewSession(id string, identity Identity) *Session {
this := Session{}
this.Id = id
this.Identity = identity
return &this
}
// NewSessionWithDefaults instantiates a new Session object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewSessionWithDefaults() *Session {
this := Session{}
return &this
}
// GetActive returns the Active field value if set, zero value otherwise.
func (o *Session) GetActive() bool {
if o == nil || o.Active == nil {
var ret bool
return ret
}
return *o.Active
}
// GetActiveOk returns a tuple with the Active field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *Session) GetActiveOk() (*bool, bool) {
if o == nil || o.Active == nil {
return nil, false
}
return o.Active, true
}
// HasActive returns a boolean if a field has been set.
func (o *Session) HasActive() bool {
if o != nil && o.Active != nil {
return true
}
return false
}
// SetActive gets a reference to the given bool and assigns it to the Active field.
func (o *Session) SetActive(v bool) {
o.Active = &v
}
// GetAuthenticatedAt returns the AuthenticatedAt field value if set, zero value otherwise.
func (o *Session) GetAuthenticatedAt() time.Time {
if o == nil || o.AuthenticatedAt == nil {
var ret time.Time
return ret
}
return *o.AuthenticatedAt
}
// GetAuthenticatedAtOk returns a tuple with the AuthenticatedAt field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *Session) GetAuthenticatedAtOk() (*time.Time, bool) {
if o == nil || o.AuthenticatedAt == nil {
return nil, false
}
return o.AuthenticatedAt, true
}
// HasAuthenticatedAt returns a boolean if a field has been set.
func (o *Session) HasAuthenticatedAt() bool {
if o != nil && o.AuthenticatedAt != nil {
return true
}
return false
}
// SetAuthenticatedAt gets a reference to the given time.Time and assigns it to the AuthenticatedAt field.
func (o *Session) SetAuthenticatedAt(v time.Time) {
o.AuthenticatedAt = &v
}
// GetExpiresAt returns the ExpiresAt field value if set, zero value otherwise.
func (o *Session) GetExpiresAt() time.Time {
if o == nil || o.ExpiresAt == nil {
var ret time.Time
return ret
}
return *o.ExpiresAt |
// GetExpiresAtOk returns a tuple with the ExpiresAt field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *Session) GetExpiresAtOk() (*time.Time, bool) {
if o == nil || o.ExpiresAt == nil {
return nil, false
}
return o.ExpiresAt, true
}
// HasExpiresAt returns a boolean if a field has been set.
func (o *Session) HasExpiresAt() bool {
if o != nil && o.ExpiresAt != nil {
return true
}
return false
}
// SetExpiresAt gets a reference to the given time.Time and assigns it to the ExpiresAt field.
func (o *Session) SetExpiresAt(v time.Time) {
o.ExpiresAt = &v
}
// GetId returns the Id field value
func (o *Session) GetId() string {
if o == nil {
var ret string
return ret
}
return o.Id
}
// GetIdOk returns a tuple with the Id field value
// and a boolean to check if the value has been set.
func (o *Session) GetIdOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Id, true
}
// SetId sets field value
func (o *Session) SetId(v string) {
o.Id = v
}
// GetIdentity returns the Identity field value
func (o *Session) GetIdentity() Identity {
if o == nil {
var ret Identity
return ret
}
return o.Identity
}
// GetIdentityOk returns a tuple with the Identity field value
// and a boolean to check if the value has been set.
func (o *Session) GetIdentityOk() (*Identity, bool) {
if o == nil {
return nil, false
}
return &o.Identity, true
}
// SetIdentity sets field value
func (o *Session) SetIdentity(v Identity) {
o.Identity = v
}
// GetIssuedAt returns the IssuedAt field value if set, zero value otherwise.
func (o *Session) GetIssuedAt() time.Time {
if o == nil || o.IssuedAt == nil {
var ret time.Time
return ret
}
return *o.IssuedAt
}
// GetIssuedAtOk returns a tuple with the IssuedAt field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *Session) GetIssuedAtOk() (*time.Time, bool) {
if o == nil || o.IssuedAt == nil {
return nil, false
}
return o.IssuedAt, true
}
// HasIssuedAt returns a boolean if a field has been set.
func (o *Session) HasIssuedAt() bool {
if o != nil && o.IssuedAt != nil {
return true
}
return false
}
// SetIssuedAt gets a reference to the given time.Time and assigns it to the IssuedAt field.
func (o *Session) SetIssuedAt(v time.Time) {
o.IssuedAt = &v
}
func (o Session) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if o.Active != nil {
toSerialize["active"] = o.Active
}
if o.AuthenticatedAt != nil {
toSerialize["authenticated_at"] = o.AuthenticatedAt
}
if o.ExpiresAt != nil {
toSerialize["expires_at"] = o.ExpiresAt
}
if true {
toSerialize["id"] = o.Id
}
if true {
toSerialize["identity"] = o.Identity
}
if o.IssuedAt != nil {
toSerialize["issued_at"] = o.IssuedAt
}
return json.Marshal(toSerialize)
}
type NullableSession struct {
value *Session
isSet bool
}
func (v NullableSession) Get() *Session {
return v.value
}
func (v *NullableSession) Set(val *Session) {
v.value = val
v.isSet = true
}
func (v NullableSession) IsSet() bool {
return v.isSet
}
func (v *NullableSession) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableSession(val *Session) *NullableSession {
return &NullableSession{value: val, isSet: true}
}
func (v NullableSession) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableSession) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | } |
build.go | package cmd
import (
"fmt"
"github.com/ory/viper"
"github.com/spf13/cobra"
"github.com/boson-project/faas"
"github.com/boson-project/faas/buildpacks"
"github.com/boson-project/faas/prompt"
)
func init() {
root.AddCommand(buildCmd)
buildCmd.Flags().StringP("builder", "b", "default", "Buildpack builder, either an as a an image name or a mapping name as defined in func.yaml")
buildCmd.Flags().BoolP("confirm", "c", false, "Prompt to confirm all configuration options (Env: $FUNC_CONFIRM)")
buildCmd.Flags().StringP("image", "i", "", "Full image name in the orm [registry]/[namespace]/[name]:[tag] (optional). This option takes precedence over --registry (Env: $FUNC_IMAGE")
buildCmd.Flags().StringP("path", "p", cwd(), "Path to the project directory (Env: $FUNC_PATH)")
buildCmd.Flags().StringP("registry", "r", "", "Registry + namespace part of the image to build, ex 'quay.io/myuser'. The full image name is automatically determined based on the local directory name. If not provided the registry will be taken from func.yaml (Env: $FUNC_REGISTRY)")
err := buildCmd.RegisterFlagCompletionFunc("builder", CompleteBuilderList)
if err != nil {
fmt.Println("internal: error while calling RegisterFlagCompletionFunc: ", err)
}
}
var buildCmd = &cobra.Command{
Use: "build",
Short: "Build a function project as a container image",
Long: `Build a function project as a container image
This command builds the function project in the current directory or in the directory
specified by --path. The result will be a container image that is pushed to a registry.
The func.yaml file is read to determine the image name and registry.
If the project has not already been built, either --registry or --image must be provided
and the image name is stored in the configuration file.
`,
Example: `
# Build from the local directory, using the given registry as target.
# The full image name will be determined automatically based on the
# project directory name
kn func build --registry quay.io/myuser
# Build from the local directory, specifying the full image name
kn func build --image quay.io/myuser/myfunc
# Re-build, picking up a previously supplied image name from a local func.yml
kn func build
# Build with a custom buildpack builder
kn func build --builder cnbs/sample-builder:bionic
`,
SuggestFor: []string{"biuld", "buidl", "built"},
PreRunE: bindEnv("image", "path", "builder", "registry", "confirm"),
RunE: runBuild,
}
func runBuild(cmd *cobra.Command, _ []string) (err error) {
config := newBuildConfig().Prompt()
function, err := functionWithOverrides(config.Path, functionOverrides{Builder: config.Builder, Image: config.Image})
if err != nil {
return
}
// Check if the Function has been initialized
if !function.Initialized() {
return fmt.Errorf("the given path '%v' does not contain an initialized function. Please create one at this path before deploying", config.Path)
}
// If the Function does not yet have an image name and one was not provided on the command line
if function.Image == "" {
// AND a --registry was not provided, then we need to
// prompt for a registry from which we can derive an image name.
if config.Registry == "" |
// We have the registry, so let's use it to derive the Function image name
config.Image = deriveImage(config.Image, config.Registry, config.Path)
function.Image = config.Image
}
// All set, let's write changes in the config to the disk
err = function.WriteConfig()
if err != nil {
return
}
builder := buildpacks.NewBuilder()
builder.Verbose = config.Verbose
client := faas.New(
faas.WithVerbose(config.Verbose),
faas.WithRegistry(config.Registry), // for deriving image name when --image not provided explicitly.
faas.WithBuilder(builder))
return client.Build(config.Path)
}
type buildConfig struct {
// Image name in full, including registry, repo and tag (overrides
// image name derivation based on Registry and Function Name)
Image string
// Path of the Function implementation on local disk. Defaults to current
// working directory of the process.
Path string
// Push the resulting image to the registry after building.
Push bool
// Registry at which interstitial build artifacts should be kept.
// This setting is ignored if Image is specified, which includes the full
Registry string
// Verbose logging.
Verbose bool
// Confirm: confirm values arrived upon from environment plus flags plus defaults,
// with interactive prompting (only applicable when attached to a TTY).
Confirm bool
Builder string
}
func newBuildConfig() buildConfig {
return buildConfig{
Image: viper.GetString("image"),
Path: viper.GetString("path"),
Registry: viper.GetString("registry"),
Verbose: viper.GetBool("verbose"), // defined on root
Confirm: viper.GetBool("confirm"),
Builder: viper.GetString("builder"),
}
}
// Prompt the user with value of config members, allowing for interaractive changes.
// Skipped if not in an interactive terminal (non-TTY), or if --confirm false (agree to
// all prompts) was set (default).
func (c buildConfig) Prompt() buildConfig {
imageName := deriveImage(c.Image, c.Registry, c.Path)
if !interactiveTerminal() || !c.Confirm {
return c
}
return buildConfig{
Path: prompt.ForString("Path to project directory", c.Path),
Image: prompt.ForString("Full image name (e.g. quay.io/boson/node-sample)", imageName, prompt.WithRequired(true)),
Verbose: c.Verbose,
// Registry not prompted for as it would be confusing when combined with explicit image. Instead it is
// inferred by the derived default for Image, which uses Registry for derivation.
}
}
| {
fmt.Print("A registry for function images is required (e.g. 'quay.io/boson').\n\n")
config.Registry = prompt.ForString("Registry for function images", "")
if config.Registry == "" {
return fmt.Errorf("unable to determine function image name")
}
} |
main.py | import ipaddress
import uuid
import weakref
import enum
from datetime import date, datetime, time, timedelta
from decimal import Decimal
from pathlib import Path
from typing import (
TYPE_CHECKING,
AbstractSet,
Any,
Callable,
ClassVar,
Dict,
List,
Mapping,
Optional,
Sequence,
Set,
Tuple,
Type,
TypeVar,
Union,
cast,
)
from pydantic import BaseModel
from pydantic.errors import ConfigError, DictError
from pydantic.fields import FieldInfo as PydanticFieldInfo
from pydantic.fields import ModelField, Undefined, UndefinedType
from pydantic.main import BaseConfig, ModelMetaclass, validate_model
from pydantic.typing import ForwardRef, NoArgAnyCallable, resolve_annotations
from pydantic.utils import ROOT_KEY, Representation
from sqlalchemy import (
Boolean,
Column,
Date,
DateTime,
Float,
ForeignKey,
Integer,
Interval,
Numeric,
inspect,
Enum
)
from sqlalchemy.orm import RelationshipProperty, declared_attr, registry, relationship
from sqlalchemy.orm.attributes import set_attribute
from sqlalchemy.orm.decl_api import DeclarativeMeta
from sqlalchemy.orm.instrumentation import is_instrumented
from sqlalchemy.sql.schema import MetaData
from sqlalchemy.sql.sqltypes import LargeBinary, Time
from .sql.sqltypes import GUID, AutoString
_T = TypeVar("_T")
def __dataclass_transform__(
*,
eq_default: bool = True,
order_default: bool = False,
kw_only_default: bool = False,
field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()),
) -> Callable[[_T], _T]:
return lambda a: a
class FieldInfo(PydanticFieldInfo):
def __init__(self, default: Any = Undefined, **kwargs: Any) -> None:
primary_key = kwargs.pop("primary_key", False)
nullable = kwargs.pop("nullable", Undefined)
foreign_key = kwargs.pop("foreign_key", Undefined)
index = kwargs.pop("index", Undefined)
sa_column = kwargs.pop("sa_column", Undefined)
sa_column_args = kwargs.pop("sa_column_args", Undefined)
sa_column_kwargs = kwargs.pop("sa_column_kwargs", Undefined)
if sa_column is not Undefined:
if sa_column_args is not Undefined:
raise RuntimeError(
"Passing sa_column_args is not supported when "
"also passing a sa_column"
)
if sa_column_kwargs is not Undefined:
raise RuntimeError(
"Passing sa_column_kwargs is not supported when "
"also passing a sa_column"
)
super().__init__(default=default, **kwargs)
self.primary_key = primary_key
self.nullable = nullable
self.foreign_key = foreign_key
self.index = index
self.sa_column = sa_column
self.sa_column_args = sa_column_args
self.sa_column_kwargs = sa_column_kwargs
class RelationshipInfo(Representation):
def __init__(
self,
*,
back_populates: Optional[str] = None,
link_model: Optional[Any] = None,
sa_relationship: Optional[RelationshipProperty] = None, # type: ignore
sa_relationship_args: Optional[Sequence[Any]] = None,
sa_relationship_kwargs: Optional[Mapping[str, Any]] = None,
) -> None:
if sa_relationship is not None:
if sa_relationship_args is not None:
raise RuntimeError(
"Passing sa_relationship_args is not supported when "
"also passing a sa_relationship"
) | "also passing a sa_relationship"
)
self.back_populates = back_populates
self.link_model = link_model
self.sa_relationship = sa_relationship
self.sa_relationship_args = sa_relationship_args
self.sa_relationship_kwargs = sa_relationship_kwargs
def Field(
default: Any = Undefined,
*,
default_factory: Optional[NoArgAnyCallable] = None,
alias: Optional[str] = None,
title: Optional[str] = None,
description: Optional[str] = None,
exclude: Union[
AbstractSet[Union[int, str]], Mapping[Union[int, str], Any], Any
] = None,
include: Union[
AbstractSet[Union[int, str]], Mapping[Union[int, str], Any], Any
] = None,
const: Optional[bool] = None,
gt: Optional[float] = None,
ge: Optional[float] = None,
lt: Optional[float] = None,
le: Optional[float] = None,
multiple_of: Optional[float] = None,
min_items: Optional[int] = None,
max_items: Optional[int] = None,
min_length: Optional[int] = None,
max_length: Optional[int] = None,
allow_mutation: bool = True,
regex: Optional[str] = None,
primary_key: bool = False,
foreign_key: Optional[Any] = None,
nullable: Union[bool, UndefinedType] = Undefined,
index: Union[bool, UndefinedType] = Undefined,
sa_column: Union[Column, UndefinedType] = Undefined, # type: ignore
sa_column_args: Union[Sequence[Any], UndefinedType] = Undefined,
sa_column_kwargs: Union[Mapping[str, Any], UndefinedType] = Undefined,
schema_extra: Optional[Dict[str, Any]] = None,
) -> Any:
current_schema_extra = schema_extra or {}
field_info = FieldInfo(
default,
default_factory=default_factory,
alias=alias,
title=title,
description=description,
exclude=exclude,
include=include,
const=const,
gt=gt,
ge=ge,
lt=lt,
le=le,
multiple_of=multiple_of,
min_items=min_items,
max_items=max_items,
min_length=min_length,
max_length=max_length,
allow_mutation=allow_mutation,
regex=regex,
primary_key=primary_key,
foreign_key=foreign_key,
nullable=nullable,
index=index,
sa_column=sa_column,
sa_column_args=sa_column_args,
sa_column_kwargs=sa_column_kwargs,
**current_schema_extra,
)
field_info._validate()
return field_info
def Relationship(
*,
back_populates: Optional[str] = None,
link_model: Optional[Any] = None,
sa_relationship: Optional[RelationshipProperty] = None, # type: ignore
sa_relationship_args: Optional[Sequence[Any]] = None,
sa_relationship_kwargs: Optional[Mapping[str, Any]] = None,
) -> Any:
relationship_info = RelationshipInfo(
back_populates=back_populates,
link_model=link_model,
sa_relationship=sa_relationship,
sa_relationship_args=sa_relationship_args,
sa_relationship_kwargs=sa_relationship_kwargs,
)
return relationship_info
@__dataclass_transform__(kw_only_default=True, field_descriptors=(Field, FieldInfo))
class SQLModelMetaclass(ModelMetaclass, DeclarativeMeta):
__sqlmodel_relationships__: Dict[str, RelationshipInfo]
__config__: Type[BaseConfig]
__fields__: Dict[str, ModelField]
# Replicate SQLAlchemy
def __setattr__(cls, name: str, value: Any) -> None:
if getattr(cls.__config__, "table", False):
DeclarativeMeta.__setattr__(cls, name, value)
else:
super().__setattr__(name, value)
def __delattr__(cls, name: str) -> None:
if getattr(cls.__config__, "table", False):
DeclarativeMeta.__delattr__(cls, name)
else:
super().__delattr__(name)
# From Pydantic
def __new__(
cls,
name: str,
bases: Tuple[Type[Any], ...],
class_dict: Dict[str, Any],
**kwargs: Any,
) -> Any:
relationships: Dict[str, RelationshipInfo] = {}
dict_for_pydantic = {}
original_annotations = resolve_annotations(
class_dict.get("__annotations__", {}), class_dict.get("__module__", None)
)
pydantic_annotations = {}
relationship_annotations = {}
for k, v in class_dict.items():
if isinstance(v, RelationshipInfo):
relationships[k] = v
else:
dict_for_pydantic[k] = v
for k, v in original_annotations.items():
if k in relationships:
relationship_annotations[k] = v
else:
pydantic_annotations[k] = v
dict_used = {
**dict_for_pydantic,
"__weakref__": None,
"__sqlmodel_relationships__": relationships,
"__annotations__": pydantic_annotations,
}
# Duplicate logic from Pydantic to filter config kwargs because if they are
# passed directly including the registry Pydantic will pass them over to the
# superclass causing an error
allowed_config_kwargs: Set[str] = {
key
for key in dir(BaseConfig)
if not (
key.startswith("__") and key.endswith("__")
) # skip dunder methods and attributes
}
pydantic_kwargs = kwargs.copy()
config_kwargs = {
key: pydantic_kwargs.pop(key)
for key in pydantic_kwargs.keys() & allowed_config_kwargs
}
new_cls = super().__new__(cls, name, bases, dict_used, **config_kwargs)
new_cls.__annotations__ = {
**relationship_annotations,
**pydantic_annotations,
**new_cls.__annotations__,
}
def get_config(name: str) -> Any:
config_class_value = getattr(new_cls.__config__, name, Undefined)
if config_class_value is not Undefined:
return config_class_value
kwarg_value = kwargs.get(name, Undefined)
if kwarg_value is not Undefined:
return kwarg_value
return Undefined
config_table = get_config("table")
if config_table is True:
# If it was passed by kwargs, ensure it's also set in config
new_cls.__config__.table = config_table
for k, v in new_cls.__fields__.items():
col = get_column_from_field(v)
setattr(new_cls, k, col)
# Set a config flag to tell FastAPI that this should be read with a field
# in orm_mode instead of preemptively converting it to a dict.
# This could be done by reading new_cls.__config__.table in FastAPI, but
# that's very specific about SQLModel, so let's have another config that
# other future tools based on Pydantic can use.
new_cls.__config__.read_with_orm_mode = True
config_registry = get_config("registry")
if config_registry is not Undefined:
config_registry = cast(registry, config_registry)
# If it was passed by kwargs, ensure it's also set in config
new_cls.__config__.registry = config_table
setattr(new_cls, "_sa_registry", config_registry)
setattr(new_cls, "metadata", config_registry.metadata)
setattr(new_cls, "__abstract__", True)
return new_cls
# Override SQLAlchemy, allow both SQLAlchemy and plain Pydantic models
def __init__(
cls, classname: str, bases: Tuple[type, ...], dict_: Dict[str, Any], **kw: Any
) -> None:
# Only one of the base classes (or the current one) should be a table model
# this allows FastAPI cloning a SQLModel for the response_model without
# trying to create a new SQLAlchemy, for a new table, with the same name, that
# triggers an error
base_is_table = False
for base in bases:
config = getattr(base, "__config__")
if config and getattr(config, "table", False):
base_is_table = True
break
if getattr(cls.__config__, "table", False) and not base_is_table:
dict_used = dict_.copy()
for field_name, field_value in cls.__fields__.items():
dict_used[field_name] = get_column_from_field(field_value)
for rel_name, rel_info in cls.__sqlmodel_relationships__.items():
if rel_info.sa_relationship:
# There's a SQLAlchemy relationship declared, that takes precedence
# over anything else, use that and continue with the next attribute
dict_used[rel_name] = rel_info.sa_relationship
continue
ann = cls.__annotations__[rel_name]
temp_field = ModelField.infer(
name=rel_name,
value=rel_info,
annotation=ann,
class_validators=None,
config=BaseConfig,
)
relationship_to = temp_field.type_
if isinstance(temp_field.type_, ForwardRef):
relationship_to = temp_field.type_.__forward_arg__
rel_kwargs: Dict[str, Any] = {}
if rel_info.back_populates:
rel_kwargs["back_populates"] = rel_info.back_populates
if rel_info.link_model:
ins = inspect(rel_info.link_model)
local_table = getattr(ins, "local_table")
if local_table is None:
raise RuntimeError(
"Couldn't find the secondary table for "
f"model {rel_info.link_model}"
)
rel_kwargs["secondary"] = local_table
rel_args: List[Any] = []
if rel_info.sa_relationship_args:
rel_args.extend(rel_info.sa_relationship_args)
if rel_info.sa_relationship_kwargs:
rel_kwargs.update(rel_info.sa_relationship_kwargs)
rel_value: RelationshipProperty = relationship( # type: ignore
relationship_to, *rel_args, **rel_kwargs
)
dict_used[rel_name] = rel_value
DeclarativeMeta.__init__(cls, classname, bases, dict_used, **kw)
else:
ModelMetaclass.__init__(cls, classname, bases, dict_, **kw)
def get_sqlachemy_type(field: ModelField) -> Any:
if issubclass(field.type_, str):
if field.field_info.max_length:
return AutoString(length=field.field_info.max_length)
return AutoString
if issubclass(field.type_, float):
return Float
if issubclass(field.type_, bool):
return Boolean
if issubclass(field.type_, int):
return Integer
if issubclass(field.type_, datetime):
return DateTime
if issubclass(field.type_, date):
return Date
if issubclass(field.type_, timedelta):
return Interval
if issubclass(field.type_, time):
return Time
if issubclass(field.type_, enum.Enum):
return Enum(field.type_)
if issubclass(field.type_, bytes):
return LargeBinary
if issubclass(field.type_, Decimal):
return Numeric(
precision=getattr(field.type_, "max_digits", None),
scale=getattr(field.type_, "decimal_places", None),
)
if issubclass(field.type_, ipaddress.IPv4Address):
return AutoString
if issubclass(field.type_, ipaddress.IPv4Network):
return AutoString
if issubclass(field.type_, ipaddress.IPv6Address):
return AutoString
if issubclass(field.type_, ipaddress.IPv6Network):
return AutoString
if issubclass(field.type_, Path):
return AutoString
if issubclass(field.type_, uuid.UUID):
return GUID
def get_column_from_field(field: ModelField) -> Column: # type: ignore
sa_column = getattr(field.field_info, "sa_column", Undefined)
if isinstance(sa_column, Column):
return sa_column
sa_type = get_sqlachemy_type(field)
primary_key = getattr(field.field_info, "primary_key", False)
nullable = not field.required
index = getattr(field.field_info, "index", Undefined)
if index is Undefined:
index = True
if hasattr(field.field_info, "nullable"):
field_nullable = getattr(field.field_info, "nullable")
if field_nullable != Undefined:
nullable = field_nullable
args = []
foreign_key = getattr(field.field_info, "foreign_key", None)
if foreign_key:
args.append(ForeignKey(foreign_key))
kwargs = {
"primary_key": primary_key,
"nullable": nullable,
"index": index,
}
sa_default = Undefined
if field.field_info.default_factory:
sa_default = field.field_info.default_factory
elif field.field_info.default is not Undefined:
sa_default = field.field_info.default
if sa_default is not Undefined:
kwargs["default"] = sa_default
sa_column_args = getattr(field.field_info, "sa_column_args", Undefined)
if sa_column_args is not Undefined:
args.extend(list(cast(Sequence[Any], sa_column_args)))
sa_column_kwargs = getattr(field.field_info, "sa_column_kwargs", Undefined)
if sa_column_kwargs is not Undefined:
kwargs.update(cast(Dict[Any, Any], sa_column_kwargs))
return Column(sa_type, *args, **kwargs)
class_registry = weakref.WeakValueDictionary() # type: ignore
default_registry = registry()
def _value_items_is_true(v: Any) -> bool:
# Re-implement Pydantic's ValueItems.is_true() as it hasn't been released as of
# the current latest, Pydantic 1.8.2
return v is True or v is ...
_TSQLModel = TypeVar("_TSQLModel", bound="SQLModel")
class SQLModel(BaseModel, metaclass=SQLModelMetaclass, registry=default_registry):
# SQLAlchemy needs to set weakref(s), Pydantic will set the other slots values
__slots__ = ("__weakref__",)
__tablename__: ClassVar[Union[str, Callable[..., str]]]
__sqlmodel_relationships__: ClassVar[Dict[str, RelationshipProperty]] # type: ignore
__name__: ClassVar[str]
metadata: ClassVar[MetaData]
class Config:
orm_mode = True
def __new__(cls, *args: Any, **kwargs: Any) -> Any:
new_object = super().__new__(cls)
# SQLAlchemy doesn't call __init__ on the base class
# Ref: https://docs.sqlalchemy.org/en/14/orm/constructors.html
# Set __fields_set__ here, that would have been set when calling __init__
# in the Pydantic model so that when SQLAlchemy sets attributes that are
# added (e.g. when querying from DB) to the __fields_set__, this already exists
object.__setattr__(new_object, "__fields_set__", set())
return new_object
def __init__(__pydantic_self__, **data: Any) -> None:
# Uses something other than `self` the first arg to allow "self" as a
# settable attribute
if TYPE_CHECKING:
__pydantic_self__.__dict__: Dict[str, Any] = {}
__pydantic_self__.__fields_set__: Set[str] = set()
values, fields_set, validation_error = validate_model(
__pydantic_self__.__class__, data
)
# Only raise errors if not a SQLModel model
if (
not getattr(__pydantic_self__.__config__, "table", False)
and validation_error
):
raise validation_error
# Do not set values as in Pydantic, pass them through setattr, so SQLAlchemy
# can handle them
# object.__setattr__(__pydantic_self__, '__dict__', values)
object.__setattr__(__pydantic_self__, "__fields_set__", fields_set)
for key, value in values.items():
setattr(__pydantic_self__, key, value)
non_pydantic_keys = data.keys() - values.keys()
for key in non_pydantic_keys:
if key in __pydantic_self__.__sqlmodel_relationships__:
setattr(__pydantic_self__, key, data[key])
def __setattr__(self, name: str, value: Any) -> None:
if name in {"_sa_instance_state"}:
self.__dict__[name] = value
return
else:
# Set in SQLAlchemy, before Pydantic to trigger events and updates
if getattr(self.__config__, "table", False):
if is_instrumented(self, name):
set_attribute(self, name, value)
# Set in Pydantic model to trigger possible validation changes, only for
# non relationship values
if name not in self.__sqlmodel_relationships__:
super().__setattr__(name, value)
@classmethod
def from_orm(
cls: Type[_TSQLModel], obj: Any, update: Optional[Dict[str, Any]] = None
) -> _TSQLModel:
# Duplicated from Pydantic
if not cls.__config__.orm_mode:
raise ConfigError(
"You must have the config attribute orm_mode=True to use from_orm"
)
obj = {ROOT_KEY: obj} if cls.__custom_root_type__ else cls._decompose_class(obj)
# SQLModel, support update dict
if update is not None:
obj = {**obj, **update}
# End SQLModel support dict
if not getattr(cls.__config__, "table", False):
# If not table, normal Pydantic code
m: _TSQLModel = cls.__new__(cls)
else:
# If table, create the new instance normally to make SQLAlchemy create
# the _sa_instance_state attribute
m = cls()
values, fields_set, validation_error = validate_model(cls, obj)
if validation_error:
raise validation_error
# Updated to trigger SQLAlchemy internal handling
if not getattr(cls.__config__, "table", False):
object.__setattr__(m, "__dict__", values)
else:
for key, value in values.items():
setattr(m, key, value)
# Continue with standard Pydantic logic
object.__setattr__(m, "__fields_set__", fields_set)
m._init_private_attributes()
return m
@classmethod
def parse_obj(
cls: Type["SQLModel"], obj: Any, update: Optional[Dict[str, Any]] = None
) -> "SQLModel":
obj = cls._enforce_dict_if_root(obj)
# SQLModel, support update dict
if update is not None:
obj = {**obj, **update}
# End SQLModel support dict
return super().parse_obj(obj)
def __repr_args__(self) -> Sequence[Tuple[Optional[str], Any]]:
# Don't show SQLAlchemy private attributes
return [(k, v) for k, v in self.__dict__.items() if not k.startswith("_sa_")]
# From Pydantic, override to enforce validation with dict
@classmethod
def validate(cls: Type["SQLModel"], value: Any) -> "SQLModel":
if isinstance(value, cls):
return value.copy() if cls.__config__.copy_on_model_validation else value
value = cls._enforce_dict_if_root(value)
if isinstance(value, dict):
values, fields_set, validation_error = validate_model(cls, value)
if validation_error:
raise validation_error
model = cls(**values)
# Reset fields set, this would have been done in Pydantic in __init__
object.__setattr__(model, "__fields_set__", fields_set)
return model
elif cls.__config__.orm_mode:
return cls.from_orm(value)
elif cls.__custom_root_type__:
return cls.parse_obj(value)
else:
try:
value_as_dict = dict(value)
except (TypeError, ValueError) as e:
raise DictError() from e
return cls(**value_as_dict)
# From Pydantic, override to only show keys from fields, omit SQLAlchemy attributes
def _calculate_keys( # type: ignore
self,
include: Optional[Mapping[Union[int, str], Any]],
exclude: Optional[Mapping[Union[int, str], Any]],
exclude_unset: bool,
update: Optional[Dict[str, Any]] = None,
) -> Optional[AbstractSet[str]]:
if include is None and exclude is None and exclude_unset is False:
# Original in Pydantic:
# return None
# Updated to not return SQLAlchemy attributes
# Do not include relationships as that would easily lead to infinite
# recursion, or traversing the whole database
return self.__fields__.keys() # | self.__sqlmodel_relationships__.keys()
keys: AbstractSet[str]
if exclude_unset:
keys = self.__fields_set__.copy()
else:
# Original in Pydantic:
# keys = self.__dict__.keys()
# Updated to not return SQLAlchemy attributes
# Do not include relationships as that would easily lead to infinite
# recursion, or traversing the whole database
keys = self.__fields__.keys() # | self.__sqlmodel_relationships__.keys()
if include is not None:
keys &= include.keys()
if update:
keys -= update.keys()
if exclude:
keys -= {k for k, v in exclude.items() if _value_items_is_true(v)}
return keys
@declared_attr # type: ignore
def __tablename__(cls) -> str:
return cls.__name__.lower() | if sa_relationship_kwargs is not None:
raise RuntimeError(
"Passing sa_relationship_kwargs is not supported when " |
index.ts | export * from './Disclosure'; |
||
day21.rs | use std::collections::{HashMap, HashSet};
#[derive(PartialEq, Debug, Clone)]
struct Food {
ingredients: HashSet<String>,
allergens: HashSet<String>,
}
impl<T: AsRef<str>> From<T> for Food {
fn from(input: T) -> Self {
let (ingredients, allergens) = input.as_ref().split_once(" (contains ").unwrap();
let ingredients = ingredients.split(' ').map(str::to_string).collect();
let allergens = allergens
.split(", ")
.map(|s| s.trim_end_matches(')'))
.map(str::to_string)
.collect();
Food {
ingredients,
allergens,
}
}
}
pub(crate) fn allergen_free_ingredient_appearance_count(input: &[String]) -> usize {
let mut foods: Vec<Food> = input.iter().map(Food::from).collect();
let allergen_ingredient_pairs = find_allergen_ingredient_pairs(&mut foods);
let allergen_free_ingredients = allergen_free_ingredients(allergen_ingredient_pairs, &foods);
ingredient_appearance_count(&foods, allergen_free_ingredients)
}
pub(crate) fn canonical_dangerous_ingredient_list(input: &[String]) -> String {
let mut foods: Vec<Food> = input.iter().map(Food::from).collect();
let allergen_ingredient_pairs = find_allergen_ingredient_pairs(&mut foods);
let mut sorted_allergen_ingredient_pairs: Vec<&(String, String)> =
allergen_ingredient_pairs.iter().collect();
sorted_allergen_ingredient_pairs.sort_by_key(|(allergen, _)| allergen);
sorted_allergen_ingredient_pairs
.iter()
.map(|(_, ingredient)| ingredient.to_string())
.collect::<Vec<_>>()
.join(",")
}
fn allergen_free_ingredients(
allergen_ingredient_pairs: HashSet<(String, String)>,
foods: &[Food],
) -> HashSet<String> {
let mut allergen_free_ingredients: HashSet<String> = foods
.iter()
.flat_map(|f| f.ingredients.iter())
.cloned()
.collect();
allergen_ingredient_pairs
.iter()
.for_each(|(_, ingredient)| {
allergen_free_ingredients.remove(ingredient);
});
allergen_free_ingredients
}
/// - Each allergen is found in exactly one ingredient.
/// - Each ingredient contains zero or one allergen.
/// - Allergens aren't always marked; when they're listed, the ingredient that contains
/// each listed allergen will be somewhere in the corresponding ingredients list.
/// However, even if an allergen isn't listed, the ingredient that contains that
/// allergen could still be present: maybe they forgot to label it,
/// or maybe it was labeled in a language you don't know.
fn find_allergen_ingredient_pairs(foods: &mut Vec<Food>) -> HashSet<(String, String)> {
let allergens: HashSet<&String> = foods.iter().flat_map(|f| f.allergens.iter()).collect();
let allergen_count = allergens.len();
// let ingredients: HashSet<&String> = foods.iter().flat_map(|f| f.ingredients.iter()).collect();
// println!(
// "{} allergens contained in {} ingredients of {} foods:\n{:?}",
// allergens.len(),
// ingredients.len(),
// foods.len(),
// allergens,
// );
let mut allergen_ingredient_pairs: HashSet<(String, String)> = HashSet::new();
let mut ingredients_by_allergen: HashMap<String, HashSet<String>> = HashMap::new();
while allergen_ingredient_pairs.len() != allergen_count {
for _ in 0..foods.len() {
// Remove food so the borrow checker won't complain about borrowing other food mutably
let mut food = foods.remove(0);
for mut other in foods.iter_mut() {
// Remove resolved ingredients from foods
allergen_ingredient_pairs
.iter()
.for_each(|(_, ingredient)| {
food.ingredients.remove(ingredient);
other.ingredients.remove(ingredient);
});
resolve_shared_ingredients_and_allergens(
&mut food,
&mut other,
&mut allergen_ingredient_pairs,
&mut ingredients_by_allergen,
);
resolve_single_ingredients(&mut allergen_ingredient_pairs, &mut food);
resolve_single_ingredients(&mut allergen_ingredient_pairs, &mut other);
}
foods.push(food);
}
}
allergen_ingredient_pairs
}
fn intersect(set1: &HashSet<String>, set2: &HashSet<String>) -> HashSet<String> {
set1.intersection(set2).cloned().collect()
}
fn resolve_shared_ingredients_and_allergens(
food1: &mut Food,
food2: &mut Food,
allergen_ingredient_pairs: &mut HashSet<(String, String)>,
ingredients_by_allergen: &mut HashMap<String, HashSet<String>>,
) {
let shared_allergens = intersect(&food1.allergens, &food2.allergens);
if shared_allergens.is_empty() {
return;
}
let shared_ingredients = intersect(&food1.ingredients, &food2.ingredients);
if shared_ingredients.is_empty() {
return;
}
if shared_allergens.len() == 1 {
if let Some((the_allergen, the_ingredient)) = narrow_down_possible_ingredients(
ingredients_by_allergen,
shared_allergens,
&shared_ingredients,
) {
allergen_ingredient_pairs.insert((the_allergen, the_ingredient));
}
}
if shared_ingredients.len() == 1 && (food1.allergens.len() == 1 || food2.allergens.len() == 1) {
resolve_single_shared_ingredient(
food1,
food2,
allergen_ingredient_pairs,
shared_ingredients,
);
}
}
fn narrow_down_possible_ingredients(
ingredients_by_allergen: &mut HashMap<String, HashSet<String>>,
shared_allergens: HashSet<String>,
shared_ingredients: &HashSet<String>,
) -> Option<(String, String)> {
let the_allergen = shared_allergens.iter().next().unwrap().clone();
if let Some(ingredients) = ingredients_by_allergen.remove(&the_allergen) {
let common_ingredients = intersect(&ingredients, shared_ingredients);
if common_ingredients.len() == 1 {
let the_ingredient = common_ingredients.iter().next().unwrap().to_string();
ingredients_by_allergen.insert(the_allergen.clone(), common_ingredients);
return Some((the_allergen, the_ingredient));
}
ingredients_by_allergen.insert(the_allergen, common_ingredients);
} else {
ingredients_by_allergen.insert(the_allergen, shared_ingredients.clone());
}
None
}
fn resolve_single_shared_ingredient(
food1: &mut Food,
food2: &mut Food,
allergen_ingredient_pairs: &mut HashSet<(String, String)>,
shared_ingredients: HashSet<String>,
) {
// Make `food` the one with the single matching allergen
let (matching_food, other_food) = match (food1.allergens.len(), food2.allergens.len()) {
(1, _) => (food1, food2),
(_, 1) => (food2, food1),
(_, _) => unreachable!(),
};
let the_allergen = matching_food.allergens.iter().next().unwrap();
if other_food.allergens.contains(the_allergen) {
let the_ingredient = shared_ingredients.iter().next().unwrap();
// println!(
// "Resolved by shared ingredient: {} contains {}",
// the_ingredient, the_allergen
// );
allergen_ingredient_pairs.insert((the_allergen.to_string(), the_ingredient.to_string()));
// Remove the only allergen, and its ingredient
matching_food.allergens = HashSet::new();
matching_food.ingredients.remove(the_ingredient);
}
}
fn resolve_single_ingredients(
allergen_ingredient_pairs: &mut HashSet<(String, String)>,
food: &mut Food,
) {
if food.ingredients.len() == 1 && food.allergens.len() == 1 {
let ingredient = food.ingredients.iter().next().unwrap();
let allergen = food.allergens.iter().next().unwrap();
// println!(
// "Resolved by single ingredient: {} contains {}",
// ingredient, allergen
// );
allergen_ingredient_pairs.insert((allergen.to_string(), ingredient.to_string()));
food.allergens = HashSet::new();
food.ingredients = HashSet::new();
}
}
fn ingredient_appearance_count(foods: &[Food], ingredients: HashSet<String>) -> usize {
foods
.iter()
.map(|f| {
f.ingredients
.iter()
.filter(|i| ingredients.contains(*i))
.count()
})
.sum()
}
#[cfg(test)]
mod tests {
use super::*;
use line_reader::*;
const EXAMPLE_1: &str = "mxmxvkd kfcds sqjhc nhms (contains dairy, fish)
trh fvjkl sbzzf mxmxvkd (contains dairy)
sqjhc fvjkl (contains soy)
sqjhc mxmxvkd sbzzf (contains fish)";
#[test]
fn food_from_str() {
let to_strings = |str_vec: Vec<&str>| str_vec.into_iter().map(str::to_string).collect();
let actual = Food::from("mxmxvkd kfcds sqjhc nhms (contains dairy, fish)");
let expected = Food {
ingredients: to_strings(vec!["mxmxvkd", "kfcds", "sqjhc", "nhms"]),
allergens: to_strings(vec!["dairy", "fish"]),
};
assert_eq!(actual, expected);
}
#[test]
fn part_1_example_1() {
assert_eq!(
allergen_free_ingredient_appearance_count(&read_str_to_lines(EXAMPLE_1)),
5
);
}
#[test]
fn example_1_ingredient_appearance_count() {
let foods: Vec<Food> = read_str_to_lines(EXAMPLE_1)
.iter()
.map(Food::from)
.collect();
let allergen_free_ingredients = vec![
"kfcds".to_string(),
"nhms".to_string(),
"sbzzf".to_string(),
"trh".to_string(),
];
assert_eq!(
ingredient_appearance_count(
&foods,
allergen_free_ingredients.iter().cloned().collect()
),
5
);
}
#[test]
fn part_1() {
assert_eq!(
allergen_free_ingredient_appearance_count(&read_file_to_lines("input/day21.txt")),
2517
);
}
#[test]
fn part_2_example_1() {
assert_eq!(
canonical_dangerous_ingredient_list(&read_str_to_lines(EXAMPLE_1)),
"mxmxvkd,sqjhc,fvjkl".to_string()
);
}
#[test]
fn part_2() |
}
| {
assert_eq!(
canonical_dangerous_ingredient_list(&read_file_to_lines("input/day21.txt")),
"rhvbn,mmcpg,kjf,fvk,lbmt,jgtb,hcbdb,zrb".to_string()
);
} |
union_dtor.rs | #![allow(
dead_code,
non_snake_case,
non_camel_case_types,
non_upper_case_globals
)]
#[repr(C)]
pub union UnionWithDtor {
pub mFoo: ::std::os::raw::c_int,
pub mBar: *mut ::std::os::raw::c_void,
}
#[test]
fn bindgen_test_layout_UnionWithDtor() {
assert_eq!(
::std::mem::size_of::<UnionWithDtor>(),
8usize,
concat!("Size of: ", stringify!(UnionWithDtor))
);
assert_eq!(
::std::mem::align_of::<UnionWithDtor>(),
8usize,
concat!("Alignment of ", stringify!(UnionWithDtor))
);
assert_eq!(
unsafe {
let uninit = ::std::mem::MaybeUninit::<UnionWithDtor>::uninit();
let ptr = uninit.as_ptr();
::std::ptr::addr_of!((*ptr).mFoo) as usize - ptr as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(UnionWithDtor),
"::",
stringify!(mFoo)
)
);
assert_eq!(
unsafe {
let uninit = ::std::mem::MaybeUninit::<UnionWithDtor>::uninit();
let ptr = uninit.as_ptr();
::std::ptr::addr_of!((*ptr).mBar) as usize - ptr as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(UnionWithDtor),
"::",
stringify!(mBar)
)
);
}
extern "C" {
#[link_name = "\u{1}_ZN13UnionWithDtorD1Ev"]
pub fn UnionWithDtor_UnionWithDtor_destructor(this: *mut UnionWithDtor);
}
impl Default for UnionWithDtor {
fn default() -> Self |
}
impl UnionWithDtor {
#[inline]
pub unsafe fn destruct(&mut self) {
UnionWithDtor_UnionWithDtor_destructor(self)
}
}
| {
let mut s = ::std::mem::MaybeUninit::<Self>::uninit();
unsafe {
::std::ptr::write_bytes(s.as_mut_ptr(), 0, 1);
s.assume_init()
}
} |
logging.py | """
This is a configuration file for logs from FullwavePy modules.
The engine is the 'logging' module which is a part of the Python's
standard library.
Additionally, the 'autologging' module is used as a very convienient wrapper.
It allows to define a function's logger (it contains both the module's and
function's name, as defined in formatter), just by decorating this function
with @logged. It applies also to class methods ('instance methods' of classes,
to be precise) and it's done even more simply, by decorating the class as a whole.
What's more, it provides @traced decorator (works just like @logged)
that allows tracing the execution of the code without a need for
writing your own decorator (and logging from a function inside the
decorator definition is challenging anyway).
Notes
-----
Here we set a default level of a root logger, its two handlers (one for each
type of the standard streams and formatting of all messages.
List of the levels and their
corresponding numerical values:
50 CRITICAL
40 ERROR
30 WARN(ING)
20 INFO
10 DEBUG
1 TRACE
0 NOTSET
"""
from sys import stdout, stderr
from logging import StreamHandler, Formatter, getLogger,\
NOTSET, DEBUG, INFO, WARNING, ERROR, CRITICAL
from autologging import TRACE
# -------------------------------------------------------------------------------
# CONVENIENCE FUNCTIONS
# -------------------------------------------------------------------------------
def log_lvl(lvl):
logger = getLogger()
logger.setLevel(lvl)
def lll(lvl): # alias of above
|
# -------------------------------------------------------------------------------
# FILTERS
# -------------------------------------------------------------------------------
class LevelFilter(object):
"""
Specify a custom logging filter to filter out
records with a level you don't need
Notes
-----
This is the format expected by
handler.addFilter()
Filters are used primarily to filter records
based on more sophisticated criteria than levels
"""
def __init__(self, level):
self.level = level
def filter(self, record):
return record.levelno != self.level
# -------------------------------------------------------------------------------
# FORMATTING OF MESSAGES
# -------------------------------------------------------------------------------
formatter = Formatter("%(levelname)s:%(name)s.%(funcName)s: %(message)s")
# -------------------------------------------------------------------------------
# LOGGERS (ONLY ROOT LOGGER HERE)
# -------------------------------------------------------------------------------
logger = getLogger()
logger.setLevel(INFO)
# -------------------------------------------------------------------------------
# HANDLERS
# -------------------------------------------------------------------------------
# REDIRECT TO STDERR FOR LVL >= WARN
h1 = StreamHandler(stream=stderr)
h1.setLevel(WARNING)
h1.setFormatter(formatter)
# REDIRECT TO STDOUT FOR LVL >= TRACE
h2 = StreamHandler(stream=stdout)
h2.setLevel(TRACE)
h2.setFormatter(formatter)
# EXCLUDE LEVELS HANDLED BY h1 TO PREVENT REDUNDANCY (DOUBLE MESSAGES)
h2.addFilter(LevelFilter(WARNING))
h2.addFilter(LevelFilter(ERROR))
# PUT TOGETHER
logger.handlers = [h1, h2]
# -------------------------------------------------------------------------------
# MUTE CHATTY MATPLOTLIB'S DEBUG. MESSAGES
# -------------------------------------------------------------------------------
mpl_logger = getLogger('matplotlib.pyplot')
mpl_logger.setLevel(WARNING) | log_lvl(lvl) |
conversion.go | /*
Copyright 2020 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package testing
import (
"github.com/ducesoft/apimachinery/pkg/conversion"
"github.com/ducesoft/apimachinery/pkg/runtime"
)
func convertEmbeddedTestToEmbeddedTestExternal(in *EmbeddedTest, out *EmbeddedTestExternal, s conversion.Scope) error {
out.TypeMeta = in.TypeMeta
out.ID = in.ID
if err := runtime.Convert_runtime_Object_To_runtime_RawExtension(&in.Object, &out.Object, s); err != nil {
return err
}
if err := runtime.Convert_runtime_Object_To_runtime_RawExtension(&in.EmptyObject, &out.EmptyObject, s); err != nil {
return err
}
return nil
}
func convertEmbeddedTestExternalToEmbeddedTest(in *EmbeddedTestExternal, out *EmbeddedTest, s conversion.Scope) error {
out.TypeMeta = in.TypeMeta
out.ID = in.ID
if err := runtime.Convert_runtime_RawExtension_To_runtime_Object(&in.Object, &out.Object, s); err != nil {
return err
}
if err := runtime.Convert_runtime_RawExtension_To_runtime_Object(&in.EmptyObject, &out.EmptyObject, s); err != nil {
return err
}
return nil
}
func | (in *ObjectTest, out *ObjectTestExternal, s conversion.Scope) error {
out.TypeMeta = in.TypeMeta
out.ID = in.ID
if in.Items != nil {
out.Items = make([]runtime.RawExtension, len(in.Items))
for i := range in.Items {
if err := runtime.Convert_runtime_Object_To_runtime_RawExtension(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convertObjectTestExternalToObjectTest(in *ObjectTestExternal, out *ObjectTest, s conversion.Scope) error {
out.TypeMeta = in.TypeMeta
out.ID = in.ID
if in.Items != nil {
out.Items = make([]runtime.Object, len(in.Items))
for i := range in.Items {
if err := runtime.Convert_runtime_RawExtension_To_runtime_Object(&in.Items[i], &out.Items[i], s); err != nil {
return err
}
}
} else {
out.Items = nil
}
return nil
}
func convertInternalSimpleToExternalSimple(in *InternalSimple, out *ExternalSimple, s conversion.Scope) error {
out.TypeMeta = in.TypeMeta
out.TestString = in.TestString
return nil
}
func convertExternalSimpleToInternalSimple(in *ExternalSimple, out *InternalSimple, s conversion.Scope) error {
out.TypeMeta = in.TypeMeta
out.TestString = in.TestString
return nil
}
func convertInternalExtensionTypeToExternalExtensionType(in *InternalExtensionType, out *ExternalExtensionType, s conversion.Scope) error {
out.TypeMeta = in.TypeMeta
if err := runtime.Convert_runtime_Object_To_runtime_RawExtension(&in.Extension, &out.Extension, s); err != nil {
return err
}
return nil
}
func convertExternalExtensionTypeToInternalExtensionType(in *ExternalExtensionType, out *InternalExtensionType, s conversion.Scope) error {
out.TypeMeta = in.TypeMeta
if err := runtime.Convert_runtime_RawExtension_To_runtime_Object(&in.Extension, &out.Extension, s); err != nil {
return err
}
return nil
}
func convertInternalOptionalExtensionTypeToExternalOptionalExtensionType(in *InternalOptionalExtensionType, out *ExternalOptionalExtensionType, s conversion.Scope) error {
out.TypeMeta = in.TypeMeta
if err := runtime.Convert_runtime_Object_To_runtime_RawExtension(&in.Extension, &out.Extension, s); err != nil {
return err
}
return nil
}
func convertExternalOptionalExtensionTypeToInternalOptionalExtensionType(in *ExternalOptionalExtensionType, out *InternalOptionalExtensionType, s conversion.Scope) error {
out.TypeMeta = in.TypeMeta
if err := runtime.Convert_runtime_RawExtension_To_runtime_Object(&in.Extension, &out.Extension, s); err != nil {
return err
}
return nil
}
func convertTestType1ToExternalTestType1(in *TestType1, out *ExternalTestType1, s conversion.Scope) error {
out.MyWeirdCustomEmbeddedVersionKindField = in.MyWeirdCustomEmbeddedVersionKindField
out.A = in.A
out.B = in.B
out.C = in.C
out.D = in.D
out.E = in.E
out.F = in.F
out.G = in.G
out.H = in.H
out.I = in.I
out.J = in.J
out.K = in.K
out.L = in.L
out.M = in.M
if in.N != nil {
out.N = make(map[string]ExternalTestType2)
for key := range in.N {
in, tmp := in.N[key], ExternalTestType2{}
if err := convertTestType2ToExternalTestType2(&in, &tmp, s); err != nil {
return err
}
out.N[key] = tmp
}
} else {
out.N = nil
}
if in.O != nil {
out.O = new(ExternalTestType2)
if err := convertTestType2ToExternalTestType2(in.O, out.O, s); err != nil {
return err
}
} else {
out.O = nil
}
if in.P != nil {
out.P = make([]ExternalTestType2, len(in.P))
for i := range in.P {
if err := convertTestType2ToExternalTestType2(&in.P[i], &out.P[i], s); err != nil {
return err
}
}
}
return nil
}
func convertExternalTestType1ToTestType1(in *ExternalTestType1, out *TestType1, s conversion.Scope) error {
out.MyWeirdCustomEmbeddedVersionKindField = in.MyWeirdCustomEmbeddedVersionKindField
out.A = in.A
out.B = in.B
out.C = in.C
out.D = in.D
out.E = in.E
out.F = in.F
out.G = in.G
out.H = in.H
out.I = in.I
out.J = in.J
out.K = in.K
out.L = in.L
out.M = in.M
if in.N != nil {
out.N = make(map[string]TestType2)
for key := range in.N {
in, tmp := in.N[key], TestType2{}
if err := convertExternalTestType2ToTestType2(&in, &tmp, s); err != nil {
return err
}
out.N[key] = tmp
}
} else {
out.N = nil
}
if in.O != nil {
out.O = new(TestType2)
if err := convertExternalTestType2ToTestType2(in.O, out.O, s); err != nil {
return err
}
} else {
out.O = nil
}
if in.P != nil {
out.P = make([]TestType2, len(in.P))
for i := range in.P {
if err := convertExternalTestType2ToTestType2(&in.P[i], &out.P[i], s); err != nil {
return err
}
}
}
return nil
}
func convertTestType2ToExternalTestType2(in *TestType2, out *ExternalTestType2, s conversion.Scope) error {
out.A = in.A
out.B = in.B
return nil
}
func convertExternalTestType2ToTestType2(in *ExternalTestType2, out *TestType2, s conversion.Scope) error {
out.A = in.A
out.B = in.B
return nil
}
func RegisterConversions(s *runtime.Scheme) error {
if err := s.AddConversionFunc((*EmbeddedTest)(nil), (*EmbeddedTestExternal)(nil), func(a, b interface{}, scope conversion.Scope) error {
return convertEmbeddedTestToEmbeddedTestExternal(a.(*EmbeddedTest), b.(*EmbeddedTestExternal), scope)
}); err != nil {
return err
}
if err := s.AddConversionFunc((*EmbeddedTestExternal)(nil), (*EmbeddedTest)(nil), func(a, b interface{}, scope conversion.Scope) error {
return convertEmbeddedTestExternalToEmbeddedTest(a.(*EmbeddedTestExternal), b.(*EmbeddedTest), scope)
}); err != nil {
return err
}
if err := s.AddConversionFunc((*ObjectTest)(nil), (*ObjectTestExternal)(nil), func(a, b interface{}, scope conversion.Scope) error {
return convertObjectTestToObjectTestExternal(a.(*ObjectTest), b.(*ObjectTestExternal), scope)
}); err != nil {
return err
}
if err := s.AddConversionFunc((*ObjectTestExternal)(nil), (*ObjectTest)(nil), func(a, b interface{}, scope conversion.Scope) error {
return convertObjectTestExternalToObjectTest(a.(*ObjectTestExternal), b.(*ObjectTest), scope)
}); err != nil {
return err
}
if err := s.AddConversionFunc((*InternalSimple)(nil), (*ExternalSimple)(nil), func(a, b interface{}, scope conversion.Scope) error {
return convertInternalSimpleToExternalSimple(a.(*InternalSimple), b.(*ExternalSimple), scope)
}); err != nil {
return err
}
if err := s.AddConversionFunc((*ExternalSimple)(nil), (*InternalSimple)(nil), func(a, b interface{}, scope conversion.Scope) error {
return convertExternalSimpleToInternalSimple(a.(*ExternalSimple), b.(*InternalSimple), scope)
}); err != nil {
return err
}
if err := s.AddConversionFunc((*InternalExtensionType)(nil), (*ExternalExtensionType)(nil), func(a, b interface{}, scope conversion.Scope) error {
return convertInternalExtensionTypeToExternalExtensionType(a.(*InternalExtensionType), b.(*ExternalExtensionType), scope)
}); err != nil {
return err
}
if err := s.AddConversionFunc((*ExternalExtensionType)(nil), (*InternalExtensionType)(nil), func(a, b interface{}, scope conversion.Scope) error {
return convertExternalExtensionTypeToInternalExtensionType(a.(*ExternalExtensionType), b.(*InternalExtensionType), scope)
}); err != nil {
return err
}
if err := s.AddConversionFunc((*InternalOptionalExtensionType)(nil), (*ExternalOptionalExtensionType)(nil), func(a, b interface{}, scope conversion.Scope) error {
return convertInternalOptionalExtensionTypeToExternalOptionalExtensionType(a.(*InternalOptionalExtensionType), b.(*ExternalOptionalExtensionType), scope)
}); err != nil {
return err
}
if err := s.AddConversionFunc((*ExternalOptionalExtensionType)(nil), (*InternalOptionalExtensionType)(nil), func(a, b interface{}, scope conversion.Scope) error {
return convertExternalOptionalExtensionTypeToInternalOptionalExtensionType(a.(*ExternalOptionalExtensionType), b.(*InternalOptionalExtensionType), scope)
}); err != nil {
return err
}
if err := s.AddConversionFunc((*TestType1)(nil), (*ExternalTestType1)(nil), func(a, b interface{}, scope conversion.Scope) error {
return convertTestType1ToExternalTestType1(a.(*TestType1), b.(*ExternalTestType1), scope)
}); err != nil {
return err
}
if err := s.AddConversionFunc((*ExternalTestType1)(nil), (*TestType1)(nil), func(a, b interface{}, scope conversion.Scope) error {
return convertExternalTestType1ToTestType1(a.(*ExternalTestType1), b.(*TestType1), scope)
}); err != nil {
return err
}
if err := s.AddConversionFunc((*TestType2)(nil), (*ExternalTestType2)(nil), func(a, b interface{}, scope conversion.Scope) error {
return convertTestType2ToExternalTestType2(a.(*TestType2), b.(*ExternalTestType2), scope)
}); err != nil {
return err
}
if err := s.AddConversionFunc((*ExternalTestType2)(nil), (*TestType2)(nil), func(a, b interface{}, scope conversion.Scope) error {
return convertExternalTestType2ToTestType2(a.(*ExternalTestType2), b.(*TestType2), scope)
}); err != nil {
return err
}
return nil
}
| convertObjectTestToObjectTestExternal |
link.py | # MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from uhd_restpy.base import Base
from uhd_restpy.files import Files
from typing import List, Any, Union
class Link(Base):
| """List of impairment links. Each link consists of a pair of ports.
The Link class encapsulates a list of link resources that are managed by the system.
A list of resources can be retrieved from the server using the Link.find() method.
"""
__slots__ = ()
_SDM_NAME = 'link'
_SDM_ATT_MAP = {
'ForwardingInterruption': 'forwardingInterruption',
'Name': 'name',
'RxPortName': 'rxPortName',
'TxPortName': 'txPortName',
}
_SDM_ENUM_MAP = {
}
def __init__(self, parent, list_op=False):
super(Link, self).__init__(parent, list_op)
@property
def LosLof(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.impairment.link.loslof.loslof.LosLof): An instance of the LosLof class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.impairment.link.loslof.loslof import LosLof
if self._properties.get('LosLof', None) is not None:
return self._properties.get('LosLof')
else:
return LosLof(self)._select()
@property
def ForwardingInterruption(self):
# type: () -> bool
"""
Returns
-------
- bool: Emulate a link fault. Drop all packets received.
"""
return self._get_attribute(self._SDM_ATT_MAP['ForwardingInterruption'])
@ForwardingInterruption.setter
def ForwardingInterruption(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['ForwardingInterruption'], value)
@property
def Name(self):
# type: () -> str
"""
Returns
-------
- str: The name of the link: receiving port -> transmitting port.
"""
return self._get_attribute(self._SDM_ATT_MAP['Name'])
@property
def RxPortName(self):
# type: () -> str
"""
Returns
-------
- str: The name of the receiving port.
"""
return self._get_attribute(self._SDM_ATT_MAP['RxPortName'])
@property
def TxPortName(self):
# type: () -> str
"""
Returns
-------
- str: The name of the transmitting port.
"""
return self._get_attribute(self._SDM_ATT_MAP['TxPortName'])
def update(self, ForwardingInterruption=None):
# type: (bool) -> Link
"""Updates link resource on the server.
Args
----
- ForwardingInterruption (bool): Emulate a link fault. Drop all packets received.
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def add(self, ForwardingInterruption=None):
# type: (bool) -> Link
"""Adds a new link resource on the json, only valid with config assistant
Args
----
- ForwardingInterruption (bool): Emulate a link fault. Drop all packets received.
Returns
-------
- self: This instance with all currently retrieved link resources using find and the newly added link resources available through an iterator or index
Raises
------
- Exception: if this function is not being used with config assistance
"""
return self._add_xpath(self._map_locals(self._SDM_ATT_MAP, locals()))
def find(self, ForwardingInterruption=None, Name=None, RxPortName=None, TxPortName=None):
# type: (bool, str, str, str) -> Link
"""Finds and retrieves link resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve link resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all link resources from the server.
Args
----
- ForwardingInterruption (bool): Emulate a link fault. Drop all packets received.
- Name (str): The name of the link: receiving port -> transmitting port.
- RxPortName (str): The name of the receiving port.
- TxPortName (str): The name of the transmitting port.
Returns
-------
- self: This instance with matching link resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of link data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the link resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href) |
|
middleware.go | /*
Copyright 2017 Gravitational, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package auth
import (
"context"
"crypto/tls"
"crypto/x509"
"net"
"net/http"
"github.com/gravitational/teleport"
"github.com/gravitational/teleport/lib/limiter"
"github.com/gravitational/teleport/lib/services"
"github.com/gravitational/teleport/lib/tlsca"
"github.com/gravitational/teleport/lib/utils"
"golang.org/x/net/http2"
"github.com/gravitational/trace"
)
// TLSServerConfig is a configuration for TLS server
type TLSServerConfig struct {
// TLS is a base TLS configuration
TLS *tls.Config
// API is API server configuration
APIConfig
// LimiterConfig is limiter config
LimiterConfig limiter.LimiterConfig
// AccessPoint is caching access point
AccessPoint AccessPoint
// Component is used for debugging purposes
Component string
// AcceptedUsage restricts authentication
// to a subset of certificates based on the metadata
AcceptedUsage []string
}
// CheckAndSetDefaults checks and sets default values
func (c *TLSServerConfig) CheckAndSetDefaults() error {
if c.TLS == nil {
return trace.BadParameter("missing parameter TLS")
}
c.TLS.ClientAuth = tls.VerifyClientCertIfGiven
if c.TLS.ClientCAs == nil {
return trace.BadParameter("missing parameter TLS.ClientCAs")
}
if c.TLS.RootCAs == nil {
return trace.BadParameter("missing parameter TLS.RootCAs")
}
if len(c.TLS.Certificates) == 0 {
return trace.BadParameter("missing parameter TLS.Certificates")
}
if c.AccessPoint == nil {
return trace.BadParameter("missing parameter AccessPoint")
}
return nil
}
// TLSServer is TLS auth server
type TLSServer struct {
*http.Server
// TLSServerConfig is TLS server configuration used for auth server
TLSServerConfig
}
// NewTLSServer returns new unstarted TLS server
func | (cfg TLSServerConfig) (*TLSServer, error) {
if err := cfg.CheckAndSetDefaults(); err != nil {
return nil, trace.Wrap(err)
}
// limiter limits requests by frequency and amount of simultaneous
// connections per client
limiter, err := limiter.NewLimiter(cfg.LimiterConfig)
if err != nil {
return nil, trace.Wrap(err)
}
// authMiddleware authenticates request assuming TLS client authentication
// adds authentication information to the context
// and passes it to the API server
authMiddleware := &AuthMiddleware{
AccessPoint: cfg.AccessPoint,
AcceptedUsage: cfg.AcceptedUsage,
}
authMiddleware.Wrap(NewGRPCServer(cfg.APIConfig))
// Wrap sets the next middleware in chain to the authMiddleware
limiter.WrapHandle(authMiddleware)
// force client auth if given
cfg.TLS.ClientAuth = tls.VerifyClientCertIfGiven
cfg.TLS.NextProtos = []string{http2.NextProtoTLS}
server := &TLSServer{
TLSServerConfig: cfg,
Server: &http.Server{
Handler: limiter,
},
}
server.TLS.GetConfigForClient = server.GetConfigForClient
return server, nil
}
// Serve takes TCP listener, upgrades to TLS using config and starts serving
func (t *TLSServer) Serve(listener net.Listener) error {
return t.Server.Serve(tls.NewListener(listener, t.TLS))
}
// GetConfigForClient is getting called on every connection
// and server's GetConfigForClient reloads the list of trusted
// local and remote certificate authorities
func (t *TLSServer) GetConfigForClient(info *tls.ClientHelloInfo) (*tls.Config, error) {
var clusterName string
var err error
if info.ServerName != "" {
clusterName, err = DecodeClusterName(info.ServerName)
if err != nil {
if !trace.IsNotFound(err) {
log.Warningf("Client sent unsupported cluster name %q, what resulted in error %v.", info.ServerName, err)
return nil, trace.AccessDenied("access is denied")
}
}
}
// update client certificate pool based on currently trusted TLS
// certificate authorities.
// TODO(klizhentas) drop connections of the TLS cert authorities
// that are not trusted
// TODO(klizhentas) what are performance implications of returning new config
// per connections? E.g. what happens to session tickets. Benchmark this.
pool, err := t.AuthServer.ClientCertPool(clusterName)
if err != nil {
log.Errorf("failed to retrieve client pool: %v", trace.DebugReport(err))
// this falls back to the default config
return nil, nil
}
tlsCopy := t.TLS.Clone()
tlsCopy.ClientCAs = pool
return tlsCopy, nil
}
// AuthMiddleware is authentication middleware checking every request
type AuthMiddleware struct {
// AccessPoint is a caching access point for auth server
AccessPoint AccessPoint
// Handler is HTTP handler called after the middleware checks requests
Handler http.Handler
// AcceptedUsage restricts authentication
// to a subset of certificates based on certificate metadata,
// for example middleware can reject certificates with mismatching usage.
// If empty, will only accept certificates with non-limited usage,
// if set, will accept certificates with non-limited usage,
// and usage exactly matching the specified values.
AcceptedUsage []string
}
// Wrap sets next handler in chain
func (a *AuthMiddleware) Wrap(h http.Handler) {
a.Handler = h
}
// GetUser returns authenticated user based on request metadata set by HTTP server
func (a *AuthMiddleware) GetUser(r *http.Request) (interface{}, error) {
peers := r.TLS.PeerCertificates
if len(peers) > 1 {
// when turning intermediaries on, don't forget to verify
// https://github.com/kubernetes/kubernetes/pull/34524/files#diff-2b283dde198c92424df5355f39544aa4R59
return nil, trace.AccessDenied("access denied: intermediaries are not supported")
}
localClusterName, err := a.AccessPoint.GetDomainName()
if err != nil {
return nil, trace.Wrap(err)
}
// with no client authentication in place, middleware
// assumes not-privileged Nop role.
// it theoretically possible to use bearer token auth even
// for connections without auth, but this is not active use-case
// therefore it is not allowed to reduce scope
if len(peers) == 0 {
return BuiltinRole{
GetClusterConfig: a.AccessPoint.GetClusterConfig,
Role: teleport.RoleNop,
Username: string(teleport.RoleNop),
ClusterName: localClusterName,
}, nil
}
clientCert := peers[0]
certClusterName, err := tlsca.ClusterName(clientCert.Issuer)
if err != nil {
log.Warnf("Failed to parse client certificate %v.", err)
return nil, trace.AccessDenied("access denied: invalid client certificate")
}
identity, err := tlsca.FromSubject(clientCert.Subject)
if err != nil {
return nil, trace.Wrap(err)
}
// If there is any restriction on the certificate usage
// reject the API server request. This is done so some classes
// of certificates issued for kubernetes usage by proxy, can not be used
// against auth server. Later on we can extend more
// advanced cert usage, but for now this is the safest option.
if len(identity.Usage) != 0 && !utils.StringSlicesEqual(a.AcceptedUsage, identity.Usage) {
log.Warningf("Restricted certificate of user %q with usage %v rejected while accessing the auth endpoint with acceptable usage %v.",
identity.Username, identity.Usage, a.AcceptedUsage)
return nil, trace.AccessDenied("access denied: invalid client certificate")
}
// this block assumes interactive user from remote cluster
// based on the remote certificate authority cluster name encoded in
// x509 organization name. This is a safe check because:
// 1. Trust and verification is established during TLS handshake
// by creating a cert pool constructed of trusted certificate authorities
// 2. Remote CAs are not allowed to have the same cluster name
// as the local certificate authority
if certClusterName != localClusterName {
// make sure that this user does not have system role
// the local auth server can not truste remote servers
// to issue certificates with system roles (e.g. Admin),
// to get unrestricted access to the local cluster
systemRole := findSystemRole(identity.Groups)
if systemRole != nil {
return RemoteBuiltinRole{
Role: *systemRole,
Username: identity.Username,
ClusterName: certClusterName,
}, nil
}
return RemoteUser{
ClusterName: certClusterName,
Username: identity.Username,
Principals: identity.Principals,
RemoteRoles: identity.Groups,
}, nil
}
// code below expects user or service from local cluster, to distinguish between
// interactive users and services (e.g. proxies), the code below
// checks for presence of system roles issued in certificate identity
systemRole := findSystemRole(identity.Groups)
// in case if the system role is present, assume this is a service
// agent, e.g. Proxy, connecting to the cluster
if systemRole != nil {
return BuiltinRole{
GetClusterConfig: a.AccessPoint.GetClusterConfig,
Role: *systemRole,
Username: identity.Username,
ClusterName: localClusterName,
}, nil
}
// otherwise assume that is a local role, no need to pass the roles
// as it will be fetched from the local database
return LocalUser{
Username: identity.Username,
}, nil
}
func findSystemRole(roles []string) *teleport.Role {
for _, role := range roles {
systemRole := teleport.Role(role)
err := systemRole.Check()
if err == nil {
return &systemRole
}
}
return nil
}
// ServeHTTP serves HTTP requests
func (a *AuthMiddleware) ServeHTTP(w http.ResponseWriter, r *http.Request) {
baseContext := r.Context()
if baseContext == nil {
baseContext = context.TODO()
}
user, err := a.GetUser(r)
if err != nil {
trace.WriteError(w, err)
return
}
// determine authenticated user based on the request parameters
requestWithContext := r.WithContext(context.WithValue(baseContext, ContextUser, user))
a.Handler.ServeHTTP(w, requestWithContext)
}
// ClientCertPool returns trusted x509 cerificate authority pool
func ClientCertPool(client AccessPoint) (*x509.CertPool, error) {
pool := x509.NewCertPool()
var authorities []services.CertAuthority
hostCAs, err := client.GetCertAuthorities(services.HostCA, false)
if err != nil {
return nil, trace.Wrap(err)
}
userCAs, err := client.GetCertAuthorities(services.UserCA, false)
if err != nil {
return nil, trace.Wrap(err)
}
authorities = append(authorities, hostCAs...)
authorities = append(authorities, userCAs...)
for _, auth := range authorities {
for _, keyPair := range auth.GetTLSKeyPairs() {
cert, err := tlsca.ParseCertificatePEM(keyPair.Cert)
if err != nil {
return nil, trace.Wrap(err)
}
pool.AddCert(cert)
}
}
return pool, nil
}
| NewTLSServer |
webgl_util.js | // Unkown source, but useful for Sylvester
// https://developer.mozilla.org/en/WebGL/Adding_2D_content_to_a_WebGL_context
// augment Sylvester some
Matrix.Translation = function (v)
{
if (v.elements.length == 2) {
var r = Matrix.I(3);
r.elements[2][0] = v.elements[0];
r.elements[2][1] = v.elements[1];
return r;
}
if (v.elements.length == 3) {
var r = Matrix.I(4);
r.elements[0][3] = v.elements[0];
r.elements[1][3] = v.elements[1];
r.elements[2][3] = v.elements[2];
return r;
}
throw "Invalid length for Translation";
}
Matrix.prototype.flatten = function ()
{
var result = [];
if (this.elements.length == 0)
return [];
for (var j = 0; j < this.elements[0].length; j++)
for (var i = 0; i < this.elements.length; i++)
result.push(this.elements[i][j]);
return result;
}
Matrix.prototype.ensure4x4 = function()
{
if (this.elements.length == 4 &&
this.elements[0].length == 4)
return this;
if (this.elements.length > 4 ||
this.elements[0].length > 4)
return null;
for (var i = 0; i < this.elements.length; i++) {
for (var j = this.elements[i].length; j < 4; j++) {
if (i == j)
this.elements[i].push(1);
else
this.elements[i].push(0);
}
}
for (var i = this.elements.length; i < 4; i++) {
if (i == 0)
this.elements.push([1, 0, 0, 0]);
else if (i == 1)
this.elements.push([0, 1, 0, 0]);
else if (i == 2)
this.elements.push([0, 0, 1, 0]);
else if (i == 3)
this.elements.push([0, 0, 0, 1]);
}
return this;
};
Matrix.prototype.make3x3 = function()
{
if (this.elements.length != 4 ||
this.elements[0].length != 4)
return null;
return Matrix.create([[this.elements[0][0], this.elements[0][1], this.elements[0][2]],
[this.elements[1][0], this.elements[1][1], this.elements[1][2]],
[this.elements[2][0], this.elements[2][1], this.elements[2][2]]]);
};
Vector.prototype.flatten = function ()
{
return this.elements;
};
function | (m) {
var s = "";
if (m.length == 16) {
for (var i = 0; i < 4; i++) {
s += "<span style='font-family: monospace'>[" + m[i*4+0].toFixed(4) + "," + m[i*4+1].toFixed(4) + "," + m[i*4+2].toFixed(4) + "," + m[i*4+3].toFixed(4) + "]</span><br>";
}
} else if (m.length == 9) {
for (var i = 0; i < 3; i++) {
s += "<span style='font-family: monospace'>[" + m[i*3+0].toFixed(4) + "," + m[i*3+1].toFixed(4) + "," + m[i*3+2].toFixed(4) + "]</font><br>";
}
} else {
return m.toString();
}
return s;
}
//
// gluLookAt
//
function makeLookAt(ex, ey, ez,
cx, cy, cz,
ux, uy, uz)
{
var eye = $V([ex, ey, ez]);
var center = $V([cx, cy, cz]);
var up = $V([ux, uy, uz]);
var mag;
var z = eye.subtract(center).toUnitVector();
var x = up.cross(z).toUnitVector();
var y = z.cross(x).toUnitVector();
var m = $M([[x.e(1), x.e(2), x.e(3), 0],
[y.e(1), y.e(2), y.e(3), 0],
[z.e(1), z.e(2), z.e(3), 0],
[0, 0, 0, 1]]);
var t = $M([[1, 0, 0, -ex],
[0, 1, 0, -ey],
[0, 0, 1, -ez],
[0, 0, 0, 1]]);
return m.x(t);
}
//
// glOrtho
//
function makeOrtho(left, right,
bottom, top,
znear, zfar)
{
var tx = -(right+left)/(right-left);
var ty = -(top+bottom)/(top-bottom);
var tz = -(zfar+znear)/(zfar-znear);
return $M([[2/(right-left), 0, 0, tx],
[0, 2/(top-bottom), 0, ty],
[0, 0, -2/(zfar-znear), tz],
[0, 0, 0, 1]]);
}
//
// gluPerspective
//
function makePerspective(fovy, aspect, znear, zfar)
{
var ymax = znear * Math.tan(fovy * Math.PI / 360.0);
var ymin = -ymax;
var xmin = ymin * aspect;
var xmax = ymax * aspect;
return makeFrustum(xmin, xmax, ymin, ymax, znear, zfar);
}
//
// glFrustum
//
function makeFrustum(left, right,
bottom, top,
znear, zfar)
{
var X = 2*znear/(right-left);
var Y = 2*znear/(top-bottom);
var A = (right+left)/(right-left);
var B = (top+bottom)/(top-bottom);
var C = -(zfar+znear)/(zfar-znear);
var D = -2*zfar*znear/(zfar-znear);
return $M([[X, 0, A, 0],
[0, Y, B, 0],
[0, 0, C, D],
[0, 0, -1, 0]]);
}
//
// glOrtho
//
function makeOrtho(left, right, bottom, top, znear, zfar)
{
var tx = - (right + left) / (right - left);
var ty = - (top + bottom) / (top - bottom);
var tz = - (zfar + znear) / (zfar - znear);
return $M([[2 / (right - left), 0, 0, tx],
[0, 2 / (top - bottom), 0, ty],
[0, 0, -2 / (zfar - znear), tz],
[0, 0, 0, 1]]);
} | mht |
gui.py | import logging
import os
import re
import sys
from typing import Any, Dict
import PySimpleGUI as sg # type: ignore
from PySimpleGUI.PySimpleGUI import Column # type: ignore
from .utils.encryption import encrypt_password, generate_key
logger = logging.getLogger(__name__)
def login_gui() -> Dict[str, Any]:
sg.theme('DarkTeal12')
def | (layout: list, key: str, visible: bool) -> Column:
"""
Helper function to hide and un-hide layouts
"""
return sg.pin(sg.Column(layout, key=key, visible=visible))
def main() -> Dict[str, Any]:
"""
Main GUI function
"""
new_user_section = [
[sg.Text('Username'), sg.Input(key='_USERNAME_', tooltip='What is your myASNB account username?')],
[sg.Text('Password'), sg.Input(key='_PASSWORD_', password_char="*", tooltip='What is your myASNB account password?')],
[sg.Text('Investment Amount (RM)'), sg.Input(key='_INVESTMENT_AMOUNT_', tooltip='How much do you want to invest?', change_submits=True, do_not_clear=True)],
]
layout = [
[sg.Text('myASNB Unit Holder Login', font='Helvetica 20', justification='center')],
[sg.Checkbox('Login as new user', enable_events=True, key='_CHECKBOX_KEY_', tooltip='Tick to login.')],
[collapse(new_user_section, '_SECTION_KEY_', False)],
[sg.OK('Start', tooltip='Start the bot (Press: ENTER)', size=(10, 1), bind_return_key=True, focus=True), sg.Cancel('Quit', tooltip='Goodbye.', size=(5, 1))],
]
window = sg.Window(
'Six Percent',
layout,
auto_size_text=False,
default_element_size=(25, 1),
text_justification='l',
return_keyboard_events=True,
grab_anywhere=False,
)
user_credentials_template = dict(username='', password='', investment_amount='')
user_credentials = user_credentials_template.copy()
section_toggle = False
while True:
event, values = window.read()
if event == '_CHECKBOX_KEY_':
section_toggle = not section_toggle
window['_SECTION_KEY_'].update(visible=section_toggle)
elif event == '_INVESTMENT_AMOUNT_':
window.FindElement(event).Update(re.sub("[^0-9]", "", values[event]))
user_credentials = {
**user_credentials,
'username': values['_USERNAME_'],
'password': values['_PASSWORD_'],
'investment_amount': values['_INVESTMENT_AMOUNT_'],
}
if event in (sg.WIN_CLOSED, 'Quit'):
logger.info('Exiting program gracefully')
window.close()
sys.exit()
elif event == 'Start':
break
window.close()
if not os.path.isfile('secret.key'):
generate_key()
# Encrypts user password before storing it
if user_credentials['password']:
user_credentials['password'] = encrypt_password(user_credentials['password'])
return dict() if user_credentials == user_credentials_template else user_credentials
user_info = main()
return user_info
if __name__ == '__main__':
logger.info(login_gui())
| collapse |
baseline_mr.py | """baseline_mr dataset."""
import tensorflow_datasets as tfds
import tensorflow as tf
# TODO(baseline_mr): Markdown description that will appear on the catalog page.
_DESCRIPTION = """
Description is **formatted** as markdown.
It should also contain any processing which has been applied (if any),
(e.g. corrupted example skipped, images cropped,...):
"""
# TODO(baseline_mr): BibTeX citation
_CITATION = """
"""
class BaselineMr(tfds.core.GeneratorBasedBuilder):
"""DatasetBuilder for baseline_mr dataset."""
VERSION = tfds.core.Version('1.0.0')
RELEASE_NOTES = {
'1.0.0': 'Initial release.',
}
def _info(self) -> tfds.core.DatasetInfo:
"""Returns the dataset metadata."""
# TODO(baseline_mr): Specifies the tfds.core.DatasetInfo object
return tfds.core.DatasetInfo(
builder=self,
description=_DESCRIPTION,
features=tfds.features.FeaturesDict({
'source': tfds.features.Text(),
'target': tfds.features.Text(),
}),
homepage='https://dataset-homepage/',
citation=_CITATION,
)
def _split_generators(self, dl_manager: tfds.download.DownloadManager):
|
def _generate_examples(self, source, target):
"""Yields examples."""
# TODO(baseline_mr): Yields (key, example) tuples from the dataset
src = tf.io.gfile.GFile(source, 'r').readlines()
tgt = tf.io.gfile.GFile(target, 'r').readlines()
for idx, row in enumerate(zip(src, tgt)):
yield idx, {
'source': row[0],
'target': row[1],
}
| """Returns SplitGenerators."""
# TODO(baseline_mr): Downloads the data and defines the splits
path = dl_manager.download_and_extract('https://storage.googleapis.com/ai4b-anuvaad-nmt/baselines/mT5/baseline_mr/strict-en-mr.zip')
# TODO(baseline_mr): Returns the Dict[split names, Iterator[Key, Example]]
return {
'train': self._generate_examples(source=path/'en-mr/train/train.mr', target=path/'en-mr/train/train.en'),
'validation': self._generate_examples(source=path/'en-mr/dev/dev.mr', target=path/'en-mr/dev/dev.en')
} |
ratelimit.rs | /// Provides a simple ratelimit lock (that only works in tokio)
// use tokio::time::
use std::time::Duration;
use crate::Result;
use flume::{bounded as channel, Receiver, Sender};
use std::ops::Deref;
/// Holds the underlying `T` in a rate-limited way.
pub struct Ratelimit<T> {
inner: T,
recv: Receiver<()>,
send: Sender<()>,
wait_time: Duration,
}
struct | <'a, T> {
inner: &'a T,
send: &'a Sender<()>,
wait_time: &'a Duration,
}
impl<T> Ratelimit<T> {
/// Create a new ratelimit with at most `count` uses in `wait_time`.
pub fn new(inner: T, count: usize, wait_time: Duration) -> Self {
let (send, recv) = channel(count);
(0..count).for_each(|_| {
send.send(()).ok();
});
Self {
inner,
send,
recv,
wait_time,
}
}
/// Borrow the inner `T`. You can only hol this reference `count` times in `wait_time`.
/// The clock counts from the moment the ref is dropped.
pub async fn borrow<'a>(&'a self) -> Result<impl Deref<Target = T> + 'a> {
self.recv.recv_async().await?;
Ok(RatelimitGuard {
inner: &self.inner,
send: &self.send,
wait_time: &self.wait_time,
})
}
}
impl<'a, T> Deref for RatelimitGuard<'a, T> {
type Target = T;
fn deref(&self) -> &Self::Target {
self.inner
}
}
impl<'a, T> Drop for RatelimitGuard<'a, T> {
fn drop(&mut self) {
let send = self.send.clone();
let wait_time = self.wait_time.clone();
tokio::spawn(async move {
tokio::time::sleep(wait_time).await;
send.send_async(()).await.ok();
});
}
}
| RatelimitGuard |
usage_api.py | # coding: utf-8
"""
PKS
PKS API # noqa: E501
OpenAPI spec version: 1.1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
# python 2 and python 3 compatibility library
import six
from container_service_extension.lib.pksclient.api_client import ApiClient
class UsageApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def get_usage(self, owner, **kwargs): # noqa: E501
"""get_usage # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_usage(owner, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str owner: The owner name (required)
:return: Usage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_usage_with_http_info(owner, **kwargs) # noqa: E501
else:
(data) = self.get_usage_with_http_info(owner, **kwargs) # noqa: E501
return data
def get_usage_with_http_info(self, owner, **kwargs): # noqa: E501
"""get_usage # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_usage_with_http_info(owner, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str owner: The owner name (required)
:return: Usage
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_usage" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner' is set
if ('owner' not in params or
params['owner'] is None):
raise ValueError("Missing the required parameter `owner` when calling `get_usage`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner' in params:
path_params['owner'] = params['owner'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth', 'uaa'] # noqa: E501
return self.api_client.call_api(
'/usages/{owner}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Usage', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def | (self, **kwargs): # noqa: E501
"""List all usage # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_usages(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[Usage]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_usages_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_usages_with_http_info(**kwargs) # noqa: E501
return data
def list_usages_with_http_info(self, **kwargs): # noqa: E501
"""List all usage # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_usages_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[Usage]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_usages" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth', 'uaa'] # noqa: E501
return self.api_client.call_api(
'/usages', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Usage]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| list_usages |
image.py | from lime import lime_image
from skimage.segmentation import mark_boundaries
class | (object):
@staticmethod
def explainer(images, model, image_size):
output = list()
for image in images:
explainer = lime_image.LimeImageExplainer(random_state=42)
explanation = explainer.explain_instance(
image,
model.predict
)
image, mask = explanation.get_image_and_mask(
model.predict(
image.reshape((1, image_size[0], image_size[1], 3))
).argmax(axis=1)[0],
positive_only=True,
hide_rest=False)
output.append(mark_boundaries(image, mask))
return output | Image |
clientIP.go | package clientIP
import (
"net"
"net/http"
"strings"
)
// ClientIP 尽最大努力实现获取客户端 IP 的算法。
// 解析 X-Real-IP 和 X-Forwarded-For 以便于反向代理(nginx 或 haproxy)可以正常工作。
func GetIp(r *http.Request) string {
xForwardedFor := r.Header.Get("X-Forwarded-For")
ip := strings.TrimSpace(strings.Split(xForwardedFor, ",")[0])
if ip != "" {
return ip
}
ip = strings.TrimSpace(r.Header.Get("X-Real-Ip"))
if | urn ip
}
if ip, _, err := net.SplitHostPort(strings.TrimSpace(r.RemoteAddr)); err == nil {
return ip
}
return ""
}
| ip != "" {
ret |
test_property_map.py | import numpy as np
from qmt.geometry import PropertyMap, MaterialPropertyMap
from qmt.materials import Materials
class DummyPartMap:
def __init__(self, part_ids):
assert len(part_ids) == 2
self.partIds = part_ids
def __call__(self, x):
|
def test_property_map():
int_map = DummyPartMap([0, 1])
str_map = DummyPartMap(['part1', 'part2'])
prop_map1 = PropertyMap(int_map, np.vectorize(lambda p: 'yes' if p > 0 else 'no'))
assert prop_map1.get_part((1., 2.)) == 1
assert np.all(prop_map1.get_part(-np.ones((2, 3))) == 0)
assert prop_map1((1., 2.)) == 'yes'
assert np.all(prop_map1(-np.ones((2, 3))) == 'no')
props = {'part1': 'yes', 'part2': 'no'}
prop_map2 = PropertyMap(str_map, np.vectorize(lambda p: props[p]))
assert prop_map2.get_part((1., 2.)) == 'part2'
assert np.all(prop_map2.get_part(-np.ones((2, 3))) == 'part1')
assert prop_map1((1., 2.)) == 'yes'
assert np.all(prop_map1(-np.ones((2, 3))) == 'no')
def test_materials_property_map():
int_map = DummyPartMap([0, 1])
str_map = DummyPartMap(['part1', 'part2'])
part_materials1 = {0: 'InAs', 1: 'GaSb'}
part_materials2 = {'part1': 'InAs', 'part2': 'Al'}
mat_lib = Materials(matDict={})
mat_lib.add_material('InAs', 'semi', electronMass=0.026, directBandGap=417.,
valenceBandOffset=-590.)
mat_lib.add_material('GaSb', 'semi', electronMass=.039, directBandGap=812.,
valenceBandOffset=-30.)
mat_lib.add_material('Al', 'metal', workFunction=4280.)
prop_map1 = MaterialPropertyMap(int_map, part_materials1, mat_lib, 'electronMass')
assert prop_map1.get_part((1., 2.)) == 1
assert np.all(prop_map1.get_part(-np.ones((2, 3))) == 0)
assert prop_map1((1., 2.)) == mat_lib['GaSb']['electronMass']
assert np.all(prop_map1(-np.ones((2, 3))) == mat_lib['InAs']['electronMass'])
prop_map2 = MaterialPropertyMap(str_map, part_materials2, mat_lib, 'directBandGap', eunit='eV',
fill_value=0.)
assert prop_map2.get_part((1., 2.)) == 'part2'
assert np.all(prop_map2.get_part(-np.ones((2, 3))) == 'part1')
assert prop_map2((1., 2.)) == 0.
assert np.all(prop_map2(-np.ones((2, 3))) == mat_lib.find('InAs', 'eV')['directBandGap'])
| assert np.ndim(x) >= 1
x = np.asanyarray(x)
if np.ndim(x) == 1:
return self.partIds[x[0] > 0]
else:
return np.where(x[..., 0] > 0, self.partIds[1], self.partIds[0]) |
signals.py | """Attach signals to this app's models."""
# -*- coding: utf-8 -*-
import json
import logging
import channels.layers
from asgiref.sync import async_to_sync
from django.db.models.signals import post_save
from django.dispatch import receiver
from .models import Job, Log
logger = logging.getLogger(__name__) # pylint: disable=C0103
def send_message(event):
'''
Call back function to send message to the browser
'''
message = event['text']
channel_layer = channels.layers.get_channel_layer()
# Send message to WebSocket
async_to_sync(channel_layer.send)(text_data=json.dumps(
message
))
@receiver(post_save, sender=Job, dispatch_uid='update_job_status_listeners')
def | (sender, instance, **kwargs):
'''
Sends job status to the browser when a Job is modified
'''
logger.debug("Job modified: {} :: status = {}.".format(
instance, instance.status))
user = instance.owner
group_name = 'job-user-{}'.format(user.username)
message = {
'job_id': instance.id,
'title': instance.title,
'status': instance.status,
'modified': instance.modified.isoformat(),
}
channel_layer = channels.layers.get_channel_layer()
async_to_sync(channel_layer.group_send)(
group_name,
{
'type': 'send_message',
'text': message
}
)
@receiver(post_save, sender=Log, dispatch_uid='update_job_log_listeners')
def update_job_log_listeners(sender, instance, **kwargs):
'''
Sends job status to the browser when a Log is modified
'''
logger.debug("Log modified: {} :: content = {}.".format(
instance, instance.content))
job_pk = instance.job.id
group_name = 'job-log-{}'.format(job_pk)
message = {
'log_id': instance.id,
'time': instance.time.isoformat(),
'content': instance.content,
'stream': instance.stream,
}
channel_layer = channels.layers.get_channel_layer()
async_to_sync(channel_layer.group_send)(
group_name,
{
'type': 'send_message',
'text': message
}
)
| update_job_status_listeners |
deployment.go | /*
Copyright 2020 The Crossplane Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package revision
import (
appsv1 "k8s.io/api/apps/v1"
corev1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"github.com/crossplane/crossplane-runtime/pkg/meta"
metav1alpha1 "github.com/crossplane/crossplane/apis/pkg/meta/v1alpha1"
v1 "github.com/crossplane/crossplane/apis/pkg/v1"
"github.com/crossplane/crossplane/apis/pkg/v1alpha1"
)
var (
replicas = int32(1)
runAsUser = int64(2000)
runAsGroup = int64(2000)
allowPrivilegeEscalation = false
privileged = false
runAsNonRoot = true
)
func | (provider *metav1alpha1.Provider, revision v1.PackageRevision, cc *v1alpha1.ControllerConfig, namespace string) (*corev1.ServiceAccount, *appsv1.Deployment) { // nolint:interfacer,gocyclo
s := &corev1.ServiceAccount{
ObjectMeta: metav1.ObjectMeta{
Name: revision.GetName(),
Namespace: namespace,
OwnerReferences: []metav1.OwnerReference{meta.AsController(meta.TypedReferenceTo(revision, v1.ProviderRevisionGroupVersionKind))},
},
}
pullPolicy := corev1.PullIfNotPresent
if revision.GetPackagePullPolicy() != nil {
pullPolicy = *revision.GetPackagePullPolicy()
}
d := &appsv1.Deployment{
ObjectMeta: metav1.ObjectMeta{
Name: revision.GetName(),
Namespace: namespace,
OwnerReferences: []metav1.OwnerReference{meta.AsController(meta.TypedReferenceTo(revision, v1.ProviderRevisionGroupVersionKind))},
},
Spec: appsv1.DeploymentSpec{
Replicas: &replicas,
Selector: &metav1.LabelSelector{
MatchLabels: map[string]string{"pkg.crossplane.io/revision": revision.GetName()},
},
Template: corev1.PodTemplateSpec{
ObjectMeta: metav1.ObjectMeta{
Name: provider.GetName(),
Namespace: namespace,
Labels: map[string]string{"pkg.crossplane.io/revision": revision.GetName()},
},
Spec: corev1.PodSpec{
SecurityContext: &corev1.PodSecurityContext{
RunAsNonRoot: &runAsNonRoot,
RunAsUser: &runAsUser,
RunAsGroup: &runAsGroup,
},
ServiceAccountName: s.GetName(),
ImagePullSecrets: revision.GetPackagePullSecrets(),
Containers: []corev1.Container{
{
Name: provider.GetName(),
Image: provider.Spec.Controller.Image,
ImagePullPolicy: pullPolicy,
SecurityContext: &corev1.SecurityContext{
RunAsUser: &runAsUser,
RunAsGroup: &runAsGroup,
AllowPrivilegeEscalation: &allowPrivilegeEscalation,
Privileged: &privileged,
RunAsNonRoot: &runAsNonRoot,
},
},
},
},
},
},
}
if cc != nil {
s.Labels = cc.Labels
s.Annotations = cc.Annotations
d.Labels = cc.Labels
d.Annotations = cc.Annotations
if cc.Spec.Replicas != nil {
d.Spec.Replicas = cc.Spec.Replicas
}
if cc.Spec.Image != nil {
d.Spec.Template.Spec.Containers[0].Image = *cc.Spec.Image
}
if cc.Spec.NodeSelector != nil {
d.Spec.Template.Spec.NodeSelector = cc.Spec.NodeSelector
}
if cc.Spec.ServiceAccountName != nil {
d.Spec.Template.Spec.ServiceAccountName = *cc.Spec.ServiceAccountName
}
if cc.Spec.NodeName != nil {
d.Spec.Template.Spec.NodeName = *cc.Spec.NodeName
}
if cc.Spec.PodSecurityContext != nil {
d.Spec.Template.Spec.SecurityContext = cc.Spec.PodSecurityContext
}
if cc.Spec.SecurityContext != nil {
d.Spec.Template.Spec.Containers[0].SecurityContext = cc.Spec.SecurityContext
}
if len(cc.Spec.ImagePullSecrets) > 0 {
d.Spec.Template.Spec.ImagePullSecrets = cc.Spec.ImagePullSecrets
}
if cc.Spec.Affinity != nil {
d.Spec.Template.Spec.Affinity = cc.Spec.Affinity
}
if len(cc.Spec.Tolerations) > 0 {
d.Spec.Template.Spec.Tolerations = cc.Spec.Tolerations
}
if cc.Spec.PriorityClassName != nil {
d.Spec.Template.Spec.PriorityClassName = *cc.Spec.PriorityClassName
}
if cc.Spec.RuntimeClassName != nil {
d.Spec.Template.Spec.RuntimeClassName = cc.Spec.RuntimeClassName
}
if cc.Spec.ResourceRequirements != nil {
d.Spec.Template.Spec.Containers[0].Resources = *cc.Spec.ResourceRequirements
}
if len(cc.Spec.Args) > 0 {
d.Spec.Template.Spec.Containers[0].Args = cc.Spec.Args
}
if len(cc.Spec.EnvFrom) > 0 {
d.Spec.Template.Spec.Containers[0].EnvFrom = cc.Spec.EnvFrom
}
if len(cc.Spec.Env) > 0 {
d.Spec.Template.Spec.Containers[0].Env = cc.Spec.Env
}
}
return s, d
}
| buildProviderDeployment |
app.module.ts | import {BrowserModule} from '@angular/platform-browser';
import {NgModule} from '@angular/core';
import {RouterModule} from '@angular/router';
import {HttpClientModule} from '@angular/common/http';
import {registerLocaleData} from '@angular/common';
import localeDe from '@angular/common/locales/de';
import {AppComponent} from './app.component';
import {WelcomeComponent} from './welcome/welcome.component';
import {HeaderComponent} from './header/header.component';
import {CategoryComponent} from './category/category.component';
import {FocusDirective} from './directives/focus.directive';
import {MenuComponent} from './menu/menu.component';
import {MenuEntryComponent} from './menu-entry/menu-entry.component';
import {ProductGroupComponent} from './product-group/product-group.component';
import {CardComponent} from './card/card.component';
import {DynamicContentComponent} from './dynamic-content/dynamic-content.component';
import {DynamicComponent} from './dynamic/dynamic.component';
import {ContactComponent} from './contact/contact.component';
import {ProductDetailComponent} from './product-detail/product-detail.component';
import {CardSwitcherComponent} from './card-switcher/card-switcher.component';
import {ReactiveFormsModule} from '@angular/forms';
registerLocaleData(localeDe);
@NgModule({
declarations: [
AppComponent,
WelcomeComponent,
HeaderComponent,
CategoryComponent,
FocusDirective,
MenuComponent,
MenuEntryComponent,
ProductGroupComponent,
CardComponent,
DynamicContentComponent,
DynamicComponent,
ContactComponent,
ProductDetailComponent,
CardSwitcherComponent,
],
imports: [
BrowserModule,
HttpClientModule,
RouterModule.forRoot([
{path: '', component: WelcomeComponent},
{path: 'category/:id', component: CategoryComponent},
{path: 'products/:id', component: ProductGroupComponent},
{path: 'product/:id', component: ProductDetailComponent},
{path: 'dynamic/:id', component: DynamicComponent},
{path: 'contact', component: ContactComponent},
]),
ReactiveFormsModule
],
// providers: [{provide: LOCALE_ID, useValue: 'de'}],
bootstrap: [AppComponent]
})
export class | {
}
| AppModule |
injector-list.go | // Copyright Istio Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cmd
import (
"context"
"fmt"
"io"
"sort"
"strconv"
"strings"
"text/tabwriter"
"github.com/spf13/cobra"
admit_v1 "k8s.io/api/admissionregistration/v1"
v1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
api_pkg_labels "k8s.io/apimachinery/pkg/labels"
"istio.io/api/annotation"
"istio.io/api/label"
"istio.io/istio/istioctl/pkg/clioptions"
"istio.io/istio/pkg/config/analysis/analyzers/injection"
analyzer_util "istio.io/istio/pkg/config/analysis/analyzers/util"
"istio.io/istio/pkg/config/resource"
"istio.io/istio/pkg/kube"
)
type revisionCount struct {
// pods in a revision
pods int
// pods that are disabled from injection
disabled int
// pods that are enabled for injection, but whose revision doesn't match their namespace's revision
needsRestart int
}
func injectorCommand() *cobra.Command {
cmd := &cobra.Command{
Use: "injector",
Short: "List sidecar injector and sidecar versions",
Long: `List sidecar injector and sidecar versions`,
Example: ` istioctl experimental injector list`,
Args: func(cmd *cobra.Command, args []string) error {
if len(args) != 0 {
return fmt.Errorf("unknown subcommand %q", args[0])
}
return nil
},
RunE: func(cmd *cobra.Command, args []string) error {
cmd.HelpFunc()(cmd, args)
return nil
},
}
cmd.AddCommand(injectorListCommand())
return cmd
}
func injectorListCommand() *cobra.Command {
var opts clioptions.ControlPlaneOptions
cmd := &cobra.Command{
Use: "list",
Short: "List sidecar injector and sidecar versions",
Long: `List sidecar injector and sidecar versions`,
Example: ` istioctl experimental injector list`,
RunE: func(cmd *cobra.Command, args []string) error {
client, err := kubeClientWithRevision(kubeconfig, configContext, opts.Revision)
if err != nil {
return fmt.Errorf("failed to create k8s client: %v", err)
}
ctx := context.Background()
nslist, err := getNamespaces(ctx, client)
if err != nil {
return err
}
sort.Slice(nslist, func(i, j int) bool {
return nslist[i].Name < nslist[j].Name
})
hooks, err := getWebhooks(ctx, client)
if err != nil {
return err
}
pods, err := getPods(ctx, client)
if err != nil |
err = printNS(cmd.OutOrStdout(), nslist, hooks, pods)
if err != nil {
return err
}
cmd.Println()
injectedImages, err := getInjectedImages(ctx, client)
if err != nil {
return err
}
sort.Slice(hooks, func(i, j int) bool {
return hooks[i].Name < hooks[j].Name
})
return printHooks(cmd.OutOrStdout(), nslist, hooks, injectedImages)
},
}
return cmd
}
func getNamespaces(ctx context.Context, client kube.ExtendedClient) ([]v1.Namespace, error) {
nslist, err := client.Kube().CoreV1().Namespaces().List(ctx, metav1.ListOptions{})
if err != nil {
return []v1.Namespace{}, err
}
return nslist.Items, nil
}
func printNS(writer io.Writer, namespaces []v1.Namespace, hooks []admit_v1.MutatingWebhookConfiguration,
allPods map[resource.Namespace][]v1.Pod) error {
outputCount := 0
w := new(tabwriter.Writer).Init(writer, 0, 8, 1, ' ', 0)
for _, namespace := range namespaces {
if hideFromOutput(resource.Namespace(namespace.Name)) {
continue
}
revision := getInjectedRevision(&namespace, hooks)
podCount := podCountByRevision(allPods[resource.Namespace(namespace.Name)], revision)
if len(podCount) == 0 {
// This namespace has no pods, but we wish to display it if new pods will be auto-injected
if revision != "" {
podCount[revision] = revisionCount{}
}
}
for injectedRevision, count := range podCount {
if outputCount == 0 {
fmt.Fprintln(w, "NAMESPACE\tISTIO-REVISION\tPOD-REVISIONS")
}
outputCount++
fmt.Fprintf(w, "%s\t%s\t%s\n", namespace.Name, revision, renderCounts(injectedRevision, count))
}
}
if outputCount == 0 {
fmt.Fprintf(writer, "No Istio injected namespaces present.\n")
}
return w.Flush()
}
func getWebhooks(ctx context.Context, client kube.ExtendedClient) ([]admit_v1.MutatingWebhookConfiguration, error) {
hooks, err := client.Kube().AdmissionregistrationV1().MutatingWebhookConfigurations().List(ctx, metav1.ListOptions{})
if err != nil {
return []admit_v1.MutatingWebhookConfiguration{}, err
}
return hooks.Items, nil
}
func printHooks(writer io.Writer, namespaces []v1.Namespace, hooks []admit_v1.MutatingWebhookConfiguration, injectedImages map[string]string) error {
if len(hooks) == 0 {
fmt.Fprintf(writer, "No Istio injection hooks present.\n")
return nil
}
w := new(tabwriter.Writer).Init(writer, 0, 8, 1, ' ', 0)
fmt.Fprintln(w, "NAMESPACES\tINJECTOR-HOOK\tISTIO-REVISION\tSIDECAR-IMAGE")
for _, hook := range hooks {
revision := hook.ObjectMeta.GetLabels()[label.IoIstioRev.Name]
namespaces := getMatchingNamespaces(&hook, namespaces)
if len(namespaces) == 0 {
fmt.Fprintf(w, "%s\t%s\t%s\t%s\n", "DOES NOT AUTOINJECT", hook.Name, revision, injectedImages[revision])
continue
}
for _, namespace := range namespaces {
fmt.Fprintf(w, "%s\t%s\t%s\t%s\n", namespace.Name, hook.Name, revision, injectedImages[revision])
}
}
return w.Flush()
}
func getInjector(namespace *v1.Namespace, hooks []admit_v1.MutatingWebhookConfiguration) *admit_v1.MutatingWebhookConfiguration {
// find matching hook
for _, hook := range hooks {
for _, webhook := range hook.Webhooks {
nsSelector, err := metav1.LabelSelectorAsSelector(webhook.NamespaceSelector)
if err != nil {
continue
}
if nsSelector.Matches(api_pkg_labels.Set(namespace.ObjectMeta.Labels)) {
return &hook
}
}
}
return nil
}
func getInjectedRevision(namespace *v1.Namespace, hooks []admit_v1.MutatingWebhookConfiguration) string {
injector := getInjector(namespace, hooks)
if injector != nil {
return injector.ObjectMeta.GetLabels()[label.IoIstioRev.Name]
}
newRev := namespace.ObjectMeta.GetLabels()[label.IoIstioRev.Name]
oldLabel, ok := namespace.ObjectMeta.GetLabels()[analyzer_util.InjectionLabelName]
// If there is no istio-injection=disabled and no istio.io/rev, the namespace isn't injected
if newRev == "" && (ok && oldLabel == "disabled" || !ok) {
return ""
}
if newRev != "" {
return fmt.Sprintf("MISSING/%s", newRev)
}
return fmt.Sprintf("MISSING/%s", analyzer_util.InjectionLabelName)
}
func getMatchingNamespaces(hook *admit_v1.MutatingWebhookConfiguration, namespaces []v1.Namespace) []v1.Namespace {
retval := make([]v1.Namespace, 0)
for _, webhook := range hook.Webhooks {
nsSelector, err := metav1.LabelSelectorAsSelector(webhook.NamespaceSelector)
if err != nil {
return retval
}
for _, namespace := range namespaces {
if nsSelector.Matches(api_pkg_labels.Set(namespace.Labels)) {
retval = append(retval, namespace)
}
}
}
return retval
}
func getPods(ctx context.Context, client kube.ExtendedClient) (map[resource.Namespace][]v1.Pod, error) {
retval := map[resource.Namespace][]v1.Pod{}
// All pods in all namespaces
pods, err := client.Kube().CoreV1().Pods("").List(ctx, metav1.ListOptions{})
if err != nil {
return retval, err
}
for _, pod := range pods.Items {
podList, ok := retval[resource.Namespace(pod.GetNamespace())]
if !ok {
retval[resource.Namespace(pod.GetNamespace())] = []v1.Pod{pod}
} else {
retval[resource.Namespace(pod.GetNamespace())] = append(podList, pod)
}
}
return retval, nil
}
// getInjectedImages() returns a map of revision->dockerimage
func getInjectedImages(ctx context.Context, client kube.ExtendedClient) (map[string]string, error) {
retval := map[string]string{}
// All configs in all namespaces that are Istio revisioned
configMaps, err := client.Kube().CoreV1().ConfigMaps("").List(ctx, metav1.ListOptions{LabelSelector: label.IoIstioRev.Name})
if err != nil {
return retval, err
}
for _, configMap := range configMaps.Items {
image := injection.GetIstioProxyImage(&configMap)
if image != "" {
retval[configMap.ObjectMeta.GetLabels()[label.IoIstioRev.Name]] = image
}
}
return retval, nil
}
// podCountByRevision() returns a map of revision->pods, with "<non-Istio>" as the dummy "revision" for uninjected pods
func podCountByRevision(pods []v1.Pod, expectedRevision string) map[string]revisionCount {
retval := map[string]revisionCount{}
for _, pod := range pods {
revision := pod.ObjectMeta.GetLabels()[label.IoIstioRev.Name]
revisionLabel := revision
if revision == "" {
revisionLabel = "<non-Istio>"
}
counts := retval[revisionLabel]
counts.pods++
if injectionDisabled(&pod) {
counts.disabled++
} else if revision != expectedRevision {
counts.needsRestart++
}
retval[revisionLabel] = counts
}
return retval
}
func hideFromOutput(ns resource.Namespace) bool {
return (analyzer_util.IsSystemNamespace(ns) || ns == resource.Namespace(istioNamespace))
}
func injectionDisabled(pod *v1.Pod) bool {
inject := pod.ObjectMeta.GetAnnotations()[annotation.SidecarInject.Name]
return strings.EqualFold(inject, "false")
}
func renderCounts(injectedRevision string, counts revisionCount) string {
if counts.pods == 0 {
return "<no pods>"
}
podText := strconv.Itoa(counts.pods)
if counts.disabled > 0 {
podText += fmt.Sprintf(" (injection disabled: %d)", counts.disabled)
}
if counts.needsRestart > 0 {
podText += fmt.Sprintf(" NEEDS RESTART: %d", counts.needsRestart)
}
return fmt.Sprintf("%s: %s", injectedRevision, podText)
}
| {
return err
} |
txqptr.rs | #[doc = "Reader of register TXQPTR"]
pub type R = crate::R<u32, super::TXQPTR>;
#[doc = "Writer for register TXQPTR"]
pub type W = crate::W<u32, super::TXQPTR>;
#[doc = "Register TXQPTR `reset()`'s with value 0"]
impl crate::ResetValue for super::TXQPTR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `DMATXQPTR`"]
pub type DMATXQPTR_R = crate::R<u32, u32>;
#[doc = "Write proxy for field `DMATXQPTR`"]
pub struct DMATXQPTR_W<'a> {
w: &'a mut W,
}
impl<'a> DMATXQPTR_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u32) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x3fff_ffff << 2)) | (((value as u32) & 0x3fff_ffff) << 2);
self.w
}
}
impl R {
#[doc = "Bits 2:31 - Transmit buffer queue base address"]
#[inline(always)]
pub fn dmatxqptr(&self) -> DMATXQPTR_R {
DMATXQPTR_R::new(((self.bits >> 2) & 0x3fff_ffff) as u32)
}
}
impl W {
#[doc = "Bits 2:31 - Transmit buffer queue base address"]
#[inline(always)]
pub fn dmatxqptr(&mut self) -> DMATXQPTR_W |
}
| {
DMATXQPTR_W { w: self }
} |
generateDict.js | "use strict";
Object.defineProperty(exports, "__esModule", { value: true });
function generateDict(str) {
let dict = new Map();
const lines = str.split("\r\n");
while (dict.size !== lines.length) {
lines: for (let line of lines) {
let [index, rule] = line.split(":");
if (dict.get(Number(index)))
continue;
if (rule.trim().startsWith('"')) {
dict.set(Number(index), new RegExp(rule.trim()[1]));
}
else {
let o = rule.split("|"), arr = [];
for (let p of o)
arr.push(p.trim().split(" ").map(Number));
if (arr.length == 2) {
if (arr.every(a => a.every(c => dict.get(c)))) {
let reg1 = "", reg2 = "";
arr[0].forEach(e => reg1 += dict.get(e).toString().slice(1, -1));
arr[1].forEach(e => reg2 += dict.get(e).toString().slice(1, -1));
dict.set(Number(index), new RegExp(`((${reg1})|(${reg2}))`));
}
}
else if (arr.length == 1) {
if (arr[0].every(c => dict.get(c))) {
let reg = "";
arr[0].forEach(e => reg += dict.get(e).toString().slice(1, -1));
dict.set(Number(index), new RegExp(reg));
}
}
else {
console.log(arr);
}
}
}
}
return dict;
}
exports.default = generateDict; | ||
dynamic_field.py | import math
from schematics import Model
from schematics.types import ModelType, StringType, PolyModelType, DictType, ListType, BooleanType
from .dynamic_search import BaseDynamicSearch
BACKGROUND_COLORS = [
'black', 'white',
'gray', 'gray.100', 'gray.200', 'gray.300', 'gray.400', 'gray.500', 'gray.600', 'gray.700', 'gray.800', 'gray.900',
'red', 'red.100', 'red.200', 'red.300', 'red.400', 'red.500', 'red.600', 'red.700', 'red.800', 'red.900',
'coral', 'coral.100', 'coral.200', 'coral.300', 'coral.400', 'coral.500', 'coral.600', 'coral.700', 'coral.800', 'coral.900',
'yellow', 'yellow.100', 'yellow.200', 'yellow.300', 'yellow.400', 'yellow.500', 'yellow.600', 'yellow.700', 'yellow.800', 'yellow.900',
'green', 'green.100', 'green.200', 'green.300', 'green.400', 'green.500', 'green.600', 'green.700', 'green.800', 'green.900',
'blue', 'blue.100', 'blue.200', 'blue.300', 'blue.400', 'blue.500', 'blue.600', 'blue.700', 'blue.800', 'blue.900',
'violet', 'violet.100', 'violet.200', 'violet.300', 'violet.400', 'violet.500', 'violet.600', 'violet.700', 'violet.800', 'violet.900',
'peacock', 'peacock.100', 'peacock.200', 'peacock.300', 'peacock.400', 'peacock.500', 'peacock.600', 'peacock.700', 'peacock.800', 'peacock.900',
'indigo', 'indigo.100', 'indigo.200', 'indigo.300', 'indigo.400', 'indigo.500', 'indigo.600', 'indigo.700', 'indigo.800', 'indigo.900',
]
TYPE_BADGE = ['primary', 'indigo.500', 'coral.600', 'peacock.500', 'green.500']
class FieldReference(Model):
resource_type = StringType()
reference_key = StringType(serialize_when_none=False)
class Icon(Model):
image = StringType(serialize_when_none=False)
color = StringType(default='green', choices=BACKGROUND_COLORS)
class BaseField(Model):
type = StringType(choices=["text", "state", "badge", "list", "dict",
"datetime", "image", "enum", "progress", "size"],
serialize_when_none=False)
options = PolyModelType([Model, DictType(PolyModelType(Model))], serialize_when_none=False)
class FieldViewOption(Model):
link = StringType(serialize_when_none=False)
variables = StringType(serialize_when_none=False)
sortable = BooleanType(serialize_when_none=False)
sort_key = StringType(serialize_when_none=False)
translation_id = StringType(serialize_when_none=False)
default = StringType(serialize_when_none=False)
is_optional = BooleanType(serialize_when_none=False)
postfix = StringType(serialize_when_none=False)
prefix = StringType(serialize_when_none=False)
field_description = StringType(serialize_when_none=False)
class BaseDynamicField(BaseField):
name = StringType()
key = StringType()
reference = ModelType(FieldReference, serialize_when_none=False)
@classmethod
def data_source(cls, name, key, **kwargs):
return cls({'key': key, 'name': name, **kwargs})
class TextDyFieldOptions(FieldViewOption):
pass
class BadgeDyFieldOptions(FieldViewOption):
text_color = StringType(serialize_when_none=False)
shape = StringType(serialize_when_none=False, choices=['SQUARE', 'ROUND'])
outline_color = StringType(serialize_when_none=False, choices=BACKGROUND_COLORS)
background_color = StringType(serialize_when_none=False, choices=BACKGROUND_COLORS)
class StateDyFieldOptions(FieldViewOption):
text_color = StringType(serialize_when_none=False)
icon = ModelType(Icon, serialize_when_none=False)
class ImageDyFieldOptions(FieldViewOption):
image_url = StringType(default='')
width = StringType(serialize_when_none=False)
height = StringType(serialize_when_none=False)
class DateTimeDyFieldOptions(FieldViewOption):
source_type = StringType(default='timestamp', choices=['iso8601', 'timestamp'])
source_format = StringType(serialize_when_none=False)
display_format = StringType(serialize_when_none=False)
class ProgressFieldOptions(FieldViewOption):
unit = StringType(serialize_when_none=False)
class SizeFieldOptions(FieldViewOption):
display_unit = StringType(serialize_when_none=False, choices=('BYTES', 'KB', 'MB', 'GB', 'TB', 'PB'))
source_unit = StringType(serialize_when_none=False, choices=('BYTES', 'KB', 'MB', 'GB', 'TB', 'PB'))
class TextDyField(BaseDynamicField):
type = StringType(default="text")
options = PolyModelType(TextDyFieldOptions, serialize_when_none=False)
@classmethod
def data_source(cls, name, key, **kwargs):
_data_source = {'key': key, 'name': name}
if 'options' in kwargs:
_data_source.update({'options': TextDyFieldOptions(kwargs.get('options'))})
if 'reference' in kwargs:
_data_source.update({'reference': kwargs.get('reference')})
return cls(_data_source)
class StateDyField(BaseDynamicField):
type = StringType(default="state")
options = PolyModelType(StateDyFieldOptions, serialize_when_none=False)
@classmethod
def data_source(cls, name, key, **kwargs):
_data_source = {'key': key, 'name': name}
if 'options' in kwargs:
_data_source.update({'options': StateDyFieldOptions(kwargs.get('options'))})
if 'reference' in kwargs:
_data_source.update({'reference': kwargs.get('reference')})
return cls(_data_source)
class BadgeDyField(BaseDynamicField):
type = StringType(default="badge")
options = PolyModelType(BadgeDyFieldOptions, serialize_when_none=False)
@classmethod
def data_source(cls, name, key, **kwargs):
_data_source = {'key': key, 'name': name}
if 'options' in kwargs:
_data_source.update({'options': BadgeDyFieldOptions(kwargs.get('options'))})
else:
_data_source.update({'options': BadgeDyFieldOptions({'background_color': 'gray.200',
'text_color': 'gray.900'})})
if 'reference' in kwargs:
_data_source.update({'reference': kwargs.get('reference')})
return cls(_data_source)
class ImageDyField(BaseDynamicField):
type = StringType(default="image")
options = PolyModelType(ImageDyFieldOptions, serialize_when_none=False)
@classmethod
def data_source(cls, name, key, **kwargs):
_data_source = {'key': key, 'name': name}
if 'options' in kwargs:
_data_source.update({'options': ImageDyFieldOptions(kwargs.get('options'))})
if 'reference' in kwargs:
_data_source.update({'reference': kwargs.get('reference')})
return cls(_data_source)
class DateTimeDyField(BaseDynamicField):
type = StringType(default="datetime")
options = PolyModelType(DateTimeDyFieldOptions, serialize_when_none=False)
@classmethod
def data_source(cls, name, key, **kwargs):
_data_source = {'key': key, 'name': name}
if 'options' in kwargs:
_data_source.update({'options': DateTimeDyFieldOptions(kwargs.get('options'))})
if 'reference' in kwargs:
_data_source.update({'reference': kwargs.get('reference')})
return cls(_data_source)
class DictDyField(BaseDynamicField):
type = StringType(default="dict")
options = PolyModelType(FieldViewOption, serialize_when_none=False)
class StateItemDyField(BaseField):
type = StringType(default="state")
options = PolyModelType(StateDyFieldOptions, serialize_when_none=False)
@classmethod
def set(cls, options):
return cls({'options': StateDyFieldOptions(options)})
class BadgeItemDyField(BaseField):
type = StringType(default="badge")
options = PolyModelType(BadgeDyFieldOptions, serialize_when_none=False)
@classmethod
def set(cls, options):
return cls({'options': BadgeDyFieldOptions(options)})
class ImageItemDyField(BaseField):
type = StringType(default="image")
options = PolyModelType(ImageDyFieldOptions, serialize_when_none=False)
@classmethod
def set(cls, options):
return cls({'options': ImageDyFieldOptions(options)})
class DatetimeItemDyField(BaseField):
type = StringType(default="datetime")
options = PolyModelType(DateTimeDyFieldOptions, serialize_when_none=False)
@classmethod
def | (cls, options):
return cls({'options': DateTimeDyFieldOptions(options)})
class ListDyFieldOptions(FieldViewOption):
item = PolyModelType([BadgeItemDyField, StateDyField, DateTimeDyField, DictDyField], serialize_when_none=False)
sub_key = StringType(serialize_when_none=False)
delimiter = StringType(serialize_when_none=False)
class ListDyField(BaseDynamicField):
type = StringType(default="list")
options = PolyModelType(ListDyFieldOptions, serialize_when_none=False)
@classmethod
def data_source(cls, name, key, **kwargs):
_data_source = {'key': key, 'name': name}
if 'default_badge' in kwargs:
_default_badge = kwargs.get('default_badge')
_list_options = {'delimiter': ' '}
if 'type' in _default_badge and _default_badge.get('type') == 'outline':
_list_options.update({'item': BadgeItemDyField.set({'outline_color': 'violet.500'})})
elif 'type' in _default_badge and _default_badge.get('type') == 'inline':
_list_options.update({'item': BadgeItemDyField.set({'background_color': 'violet.500'})})
if 'sub_key' in _default_badge:
_list_options.update({'sub_key': _default_badge.get('sub_key')})
if 'delimiter' in _default_badge:
_list_options.update({'delimiter': _default_badge.get('delimiter')})
_data_source.update({'options': ListDyFieldOptions(_list_options)})
if 'options' in kwargs:
_data_source.update({'options': ListDyFieldOptions(kwargs.get('options'))})
if 'reference' in kwargs:
_data_source.update({'reference': kwargs.get('reference')})
return cls(_data_source)
class EnumDyField(BaseDynamicField):
type = StringType(default="enum")
options = DictType(PolyModelType([StateItemDyField, BadgeItemDyField, ImageItemDyField, DatetimeItemDyField]),
serialize_when_none=False,
default={})
@classmethod
def data_source(cls, name, key, **kwargs):
_data_source = {'key': key, 'name': name}
_default_badge = kwargs.get('default_badge', {})
_default_state = kwargs.get('default_state', {})
_default_outline_badge = kwargs.get('default_outline_badge', [])
_options_dic = {}
for _key in _default_outline_badge:
_round_index = len(TYPE_BADGE)
_index = _default_outline_badge.index(_key)
_num = math.floor(_index/len(TYPE_BADGE))
if _num > 0:
_round_index = len(TYPE_BADGE)*_num
if _round_index - 1 < _index:
_index = _index - _round_index
_options_dic[_key] = BadgeItemDyField.set({'outline_color': TYPE_BADGE[_index]})
for _key in _default_badge:
for _badge in _default_badge[_key]:
_options_dic[_badge] = BadgeItemDyField.set({'background_color': _key})
for _key in _default_state:
for _state in _default_state[_key]:
_state_options = {'icon': {'color': 'gray.400'}}
if _key == 'safe':
_state_options = {'icon': {'color': 'green.500'}}
elif _key == 'disable':
_state_options.update({'text_color': 'gray.400'})
elif _key == 'warning':
_state_options = {'icon': {'color': 'yellow.500'}}
elif _key == 'available':
_state_options = {'icon': {'color': 'blue.400'}}
elif _key == 'alert':
_state_options = {'text_color': 'red.500', 'icon': {'color': 'red.500'}}
_options_dic[_state] = StateItemDyField.set(_state_options)
_data_source.update({'options': _options_dic})
if 'options' in kwargs:
_data_source.update({'options': kwargs.get('options')})
if 'reference' in kwargs:
_data_source.update({'reference': kwargs.get('reference')})
return cls(_data_source)
class ProgressField(BaseDynamicField):
type = StringType(default="progress")
options = PolyModelType(ProgressFieldOptions, serialize_when_none=False, )
@classmethod
def data_source(cls, name, key, **kwargs):
_data_source = {'key': key, 'name': name}
if 'options' in kwargs:
_data_source.update({'options': kwargs.get('options')})
return cls(_data_source)
class SizeField(BaseDynamicField):
type = StringType(default="size")
options = PolyModelType(SizeFieldOptions, serialize_when_none=False)
@classmethod
def data_source(cls, name, key, **kwargs):
_data_source = {'key': key, 'name': name}
if 'options' in kwargs:
_data_source.update({'options': kwargs.get('options')})
return cls(_data_source)
class SearchEnumField(Model):
label = StringType(serialize_when_none=False)
icon = ModelType(Icon, serialize_when_none=False)
@classmethod
def set_field(cls, label=None, icon=None):
return_dic = {}
if label is not None:
return_dic.update({'label': label})
if icon is not None:
return_dic.update({'icon': Icon(icon)})
return cls(return_dic)
class SearchField(BaseDynamicSearch):
enums = DictType(ModelType(SearchEnumField), serialize_when_none=False)
reference = StringType(serialize_when_none=False)
@classmethod
def set(cls, name='', key='', data_type=None, enums=None, reference=None):
return_dic = {
'name': name,
'key': key
}
if data_type is not None:
return_dic.update({'data_type': data_type})
if reference is not None:
return_dic.update({'reference': reference})
if enums is not None:
convert_enums = {}
for enum_key in enums:
enum_v = enums[enum_key]
convert_enums[enum_key] = SearchEnumField.set_field(**enum_v)
return_dic.update({
'enums': convert_enums
})
return cls(return_dic)
| set |
cifar_few_shot.py | # This code is modified from https://github.com/facebookresearch/low-shot-shrink-hallucinate
import torch
from PIL import Image
import numpy as np
import torchvision.transforms as transforms
import additional_transforms as add_transforms
from abc import abstractmethod
from torchvision.datasets import CIFAR100, CIFAR10
identity = lambda x:x
class SimpleDataset:
def __init__(self, mode, dataset, transform, target_transform=identity):
self.transform = transform
self.dataset = dataset
self.target_transform = target_transform
self.meta = {}
self.meta['image_names'] = []
self.meta['image_labels'] = []
if self.dataset == "CIFAR100":
d = CIFAR100("./", train=True, download=True)
for i, (data, label) in enumerate(d):
if mode == "base":
if label % 3 == 0:
self.meta['image_names'].append(data)
self.meta['image_labels'].append(label)
elif mode == "val":
if label % 3 == 1:
self.meta['image_names'].append(data)
self.meta['image_labels'].append(label)
else:
if label % 3 == 2:
self.meta['image_names'].append(data)
self.meta['image_labels'].append(label)
elif self.dataset == "CIFAR10":
d = CIFAR10("./", train=True, download=True)
for i, (data, label) in enumerate(d):
if mode == "novel":
self.meta['image_names'].append(data)
self.meta['image_labels'].append(label)
def __getitem__(self, i):
img = self.transform(self.meta['image_names'][i])
target = self.target_transform(self.meta['image_labels'][i])
return img, target
def __len__(self):
return len(self.meta['image_names'])
class SetDataset:
def __init__(self, mode, dataset, batch_size, transform):
self.sub_meta = {}
self.cl_list = range(100)
self.dataset = dataset
if mode == "base":
type_ = 0
elif mode == "val":
type_ = 1
else:
type_ = 2
for cl in self.cl_list:
if cl % 3 == type_:
self.sub_meta[cl] = []
if self.dataset == "CIFAR100":
d = CIFAR100("./", train=True, download=True)
elif self.dataset == "CIFAR10":
d = CIFAR10("./", train=True, download=True)
for i, (data, label) in enumerate(d):
if label % 3 == type_:
self.sub_meta[label].append(data)
self.sub_dataloader = []
sub_data_loader_params = dict(batch_size = batch_size,
shuffle = True,
num_workers = 0, #use main thread only or may receive multiple batches
pin_memory = False)
for cl in self.cl_list:
if cl % 3 == type_:
sub_dataset = SubDataset(self.sub_meta[cl], cl, transform = transform )
self.sub_dataloader.append( torch.utils.data.DataLoader(sub_dataset, **sub_data_loader_params) )
def __getitem__(self,i):
return next(iter(self.sub_dataloader[i]))
def __len__(self):
return len(self.sub_dataloader)
class | :
def __init__(self, sub_meta, cl, transform=transforms.ToTensor(), target_transform=identity):
self.sub_meta = sub_meta
self.cl = cl
self.transform = transform
self.target_transform = target_transform
def __getitem__(self,i):
img = self.transform(self.sub_meta[i])
target = self.target_transform(self.cl)
return img, target
def __len__(self):
return len(self.sub_meta)
class EpisodicBatchSampler(object):
def __init__(self, n_classes, n_way, n_episodes):
self.n_classes = n_classes
self.n_way = n_way
self.n_episodes = n_episodes
def __len__(self):
return self.n_episodes
def __iter__(self):
for i in range(self.n_episodes):
yield torch.randperm(self.n_classes)[:self.n_way]
class TransformLoader:
def __init__(self, image_size,
normalize_param = dict(mean= [0.485, 0.456, 0.406] , std=[0.229, 0.224, 0.225]),
jitter_param = dict(Brightness=0.4, Contrast=0.4, Color=0.4)):
self.image_size = image_size
self.normalize_param = normalize_param
self.jitter_param = jitter_param
def parse_transform(self, transform_type):
if transform_type=='ImageJitter':
method = add_transforms.ImageJitter( self.jitter_param )
return method
method = getattr(transforms, transform_type)
if transform_type=='RandomSizedCrop':
return method(self.image_size)
elif transform_type=='CenterCrop':
return method(self.image_size)
elif transform_type=='Scale':
return method([int(self.image_size*1.15), int(self.image_size*1.15)])
elif transform_type=='Normalize':
return method(**self.normalize_param )
else:
return method()
def get_composed_transform(self, aug = False):
if aug:
transform_list = ['RandomSizedCrop', 'ImageJitter', 'RandomHorizontalFlip', 'ToTensor', 'Normalize']
else:
transform_list = ['Scale','CenterCrop', 'ToTensor', 'Normalize']
transform_funcs = [ self.parse_transform(x) for x in transform_list]
transform = transforms.Compose(transform_funcs)
return transform
class DataManager(object):
@abstractmethod
def get_data_loader(self, data_file, aug):
pass
class SimpleDataManager(DataManager):
def __init__(self, dataset, image_size, batch_size):
super(SimpleDataManager, self).__init__()
self.batch_size = batch_size
self.trans_loader = TransformLoader(image_size)
self.dataset = dataset
def get_data_loader(self, mode, aug): #parameters that would change on train/val set
transform = self.trans_loader.get_composed_transform(aug)
dataset = SimpleDataset(mode, self.dataset, transform)
data_loader_params = dict(batch_size = self.batch_size, shuffle = True, num_workers = 12, pin_memory = True)
data_loader = torch.utils.data.DataLoader(dataset, **data_loader_params)
return data_loader
class SetDataManager(DataManager):
def __init__(self, mode, dataset, image_size, n_way=5, n_support=5, n_query=16, n_eposide = 100):
super(SetDataManager, self).__init__()
self.image_size = image_size
self.n_way = n_way
self.batch_size = n_support + n_query
self.n_eposide = n_eposide
self.mode = mode
self.dataset = dataset
self.trans_loader = TransformLoader(image_size)
def get_data_loader(self, aug): #parameters that would change on train/val set
transform = self.trans_loader.get_composed_transform(aug)
dataset = SetDataset(self.mode, self.dataset, self.batch_size, transform)
sampler = EpisodicBatchSampler(len(dataset), self.n_way, self.n_eposide )
data_loader_params = dict(batch_sampler = sampler, num_workers = 12, pin_memory = True)
data_loader = torch.utils.data.DataLoader(dataset, **data_loader_params)
return data_loader
if __name__ == '__main__':
pass | SubDataset |
if1.rs | // if1.rs
pub fn | (a: i32, b: i32) -> i32 {
if a > b { a } else { b }
}
// Don't mind this for now :)
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn ten_is_bigger_than_eight() {
assert_eq!(10, bigger(10, 8));
}
#[test]
fn fortytwo_is_bigger_than_thirtytwo() {
assert_eq!(42, bigger(32, 42));
}
}
| bigger |
dataprotection.go | //
// Copyright (c) 2021 Alex Ullrich
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package core
import (
"crypto/cipher"
"crypto/rand"
"encoding/hex"
"fmt"
"github.com/AlexCuse/etm"
"strings"
)
| return b, nil
}
type dataProtection interface {
encrypt([]byte) ([]byte, error)
decrypt([]byte) ([]byte, error)
}
func newAESProtection(watermillConfig *WatermillConfig) (protection dataProtection, err error) {
if alg := watermillConfig.EncryptionAlgorithm; alg != "" {
key, err := hex.DecodeString(watermillConfig.EncryptionKey)
if err != nil {
return nil, err
}
aead, err := getAead(alg, key)
if err != nil {
return nil, err
}
protection = &aesProtection{
aead: aead,
}
}
return protection, err
}
type aesProtection struct {
aead cipher.AEAD
}
func (ap *aesProtection) encrypt(bytes []byte) ([]byte, error) {
dst := make([]byte, 0)
nonce := make([]byte, ap.aead.NonceSize())
_, err := rand.Read(nonce)
if err != nil {
return dst, err
}
dst = ap.aead.Seal(dst, nonce, bytes, nil)
return dst, err
}
func (ap *aesProtection) decrypt(bytes []byte) ([]byte, error) {
return ap.aead.Open(make([]byte, 0), nil, bytes, nil)
}
const (
AES256SHA512 = "aes256-sha512"
AES256SHA384 = "aes256-sha384"
AES192SHA384 = "aes192-sha384"
AES128SHA256 = "es128-sha256"
)
func getAead(alg string, key []byte) (cipher.AEAD, error) {
switch strings.ToLower(alg) {
case AES128SHA256:
return etm.NewAES128SHA256(key)
case AES192SHA384:
return etm.NewAES192SHA384(key)
case AES256SHA384:
return etm.NewAES256SHA384(key)
case AES256SHA512:
return etm.NewAES256SHA512(key)
default:
return nil, fmt.Errorf("invalid algorithm specified: %s", alg)
}
} | type binaryModifier func([]byte) ([]byte, error)
func noopModifier(b []byte) ([]byte, error) { |
forms.py | from flask_wtf import FlaskForm
from wtforms import StringField
from wtforms import TextField
from wtforms.fields.html5 import EmailField
from wtforms import SubmitField
from wtforms import PasswordField
from wtforms.validators import DataRequired
from wtforms.validators import Length
from wtforms.validators import Email
from wtforms import TextAreaField
from flask_wtf.file import FileField
from flask_wtf.file import FileAllowed
from flask_wtf.file import FileRequired
from shopyoapi.init import photos
class | (FlaskForm):
name = StringField('Quizz Track Name', [
DataRequired()
],
render_kw={
'class':'form-control',
'autocomplete':'off',
}
)
award_image = FileField('Award Image', validators=[
FileAllowed(photos, 'Photo must be a png, jpg, or jpeg!'),
FileRequired('File was empty!')
],
render_kw={
'class':'form-control',
'autocomplete':'off',
}
)
submit = SubmitField('Submit',
render_kw={
'class':'btn btn-info'
}
)
# def __init__(self, *args, **kwargs):
# """Create instance."""
# super(AddTrackForm, self).__init__(*args, **kwargs)
# def validate(self):
# """Validate the form."""
# initial_validation = super(AddTrackForm, self).validate()
# return not initial_validation | AddTrackForm |
stats.directive.js | (function (){
angular.module('ebid')
.directive('stats', stats);
function | () {
return {
restrict: 'E',
templateUrl: 'app/components/stats/stats.html',
controller: 'StatsController',
controllerAs: '$stats'
};
}
})();
| stats |
basics.rs | // Copyright (c) 2018 The predicates-rs Project Developers.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::fmt;
use crate::reflection;
use crate::utils;
use crate::Predicate;
/// Predicate that checks for empty strings.
///
/// This is created by `predicates::str::is_empty`.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct IsEmptyPredicate {}
impl Predicate<str> for IsEmptyPredicate {
fn eval(&self, variable: &str) -> bool {
variable.is_empty()
}
fn find_case<'a>(&'a self, expected: bool, variable: &str) -> Option<reflection::Case<'a>> {
utils::default_find_case(self, expected, variable)
}
}
impl reflection::PredicateReflection for IsEmptyPredicate {}
impl fmt::Display for IsEmptyPredicate {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "var.is_empty()")
}
}
/// Creates a new `Predicate` that ensures a str is empty
///
/// # Examples
///
/// ```
/// use predicates::prelude::*;
///
/// let predicate_fn = predicate::str::is_empty();
/// assert_eq!(true, predicate_fn.eval(""));
/// assert_eq!(false, predicate_fn.eval("Food World"));
/// ```
pub fn is_empty() -> IsEmptyPredicate {
IsEmptyPredicate {}
}
/// Predicate checks start of str
///
/// This is created by `predicates::str::starts_with`.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct StartsWithPredicate {
pattern: String,
}
impl Predicate<str> for StartsWithPredicate {
fn eval(&self, variable: &str) -> bool {
variable.starts_with(&self.pattern)
}
fn find_case<'a>(&'a self, expected: bool, variable: &str) -> Option<reflection::Case<'a>> {
utils::default_find_case(self, expected, variable)
}
}
impl reflection::PredicateReflection for StartsWithPredicate {}
impl fmt::Display for StartsWithPredicate {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "var.starts_with({:?})", self.pattern)
}
}
/// Creates a new `Predicate` that ensures a str starts with `pattern`
///
/// # Examples
///
/// ```
/// use predicates::prelude::*;
///
/// let predicate_fn = predicate::str::starts_with("Hello");
/// assert_eq!(true, predicate_fn.eval("Hello World"));
/// assert_eq!(false, predicate_fn.eval("Goodbye World"));
/// ```
pub fn starts_with<P>(pattern: P) -> StartsWithPredicate
where
P: Into<String>,
{
StartsWithPredicate {
pattern: pattern.into(),
}
}
/// Predicate checks end of str
///
/// This is created by `predicates::str::ends_with`.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct EndsWithPredicate {
pattern: String,
}
impl Predicate<str> for EndsWithPredicate {
fn eval(&self, variable: &str) -> bool {
variable.ends_with(&self.pattern)
}
fn find_case<'a>(&'a self, expected: bool, variable: &str) -> Option<reflection::Case<'a>> {
utils::default_find_case(self, expected, variable)
}
}
impl reflection::PredicateReflection for EndsWithPredicate {}
impl fmt::Display for EndsWithPredicate {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "var.ends_with({:?})", self.pattern)
}
}
/// Creates a new `Predicate` that ensures a str ends with `pattern`
///
/// # Examples
///
/// ```
/// use predicates::prelude::*;
///
/// let predicate_fn = predicate::str::ends_with("World");
/// assert_eq!(true, predicate_fn.eval("Hello World"));
/// assert_eq!(false, predicate_fn.eval("Hello Moon"));
/// ```
pub fn ends_with<P>(pattern: P) -> EndsWithPredicate
where
P: Into<String>,
{
EndsWithPredicate {
pattern: pattern.into(),
}
}
/// Predicate that checks for patterns.
///
/// This is created by `predicates::str:contains`.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ContainsPredicate {
pattern: String,
}
impl ContainsPredicate {
/// Require a specific count of matches.
///
/// # Examples
///
/// ```
/// use predicates::prelude::*;
///
/// let predicate_fn = predicate::str::contains("Two").count(2);
/// assert_eq!(true, predicate_fn.eval("One Two Three Two One"));
/// assert_eq!(false, predicate_fn.eval("One Two Three"));
/// ```
pub fn count(self, count: usize) -> MatchesPredicate |
}
impl Predicate<str> for ContainsPredicate {
fn eval(&self, variable: &str) -> bool {
variable.contains(&self.pattern)
}
fn find_case<'a>(&'a self, expected: bool, variable: &str) -> Option<reflection::Case<'a>> {
utils::default_find_case(self, expected, variable)
}
}
impl reflection::PredicateReflection for ContainsPredicate {}
impl fmt::Display for ContainsPredicate {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "var.contains({:?})", self.pattern)
}
}
/// Predicate that checks for repeated patterns.
///
/// This is created by `predicates::str::contains(...).count`.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct MatchesPredicate {
pattern: String,
count: usize,
}
impl Predicate<str> for MatchesPredicate {
fn eval(&self, variable: &str) -> bool {
variable.matches(&self.pattern).count() == self.count
}
fn find_case<'a>(&'a self, expected: bool, variable: &str) -> Option<reflection::Case<'a>> {
let actual_count = variable.matches(&self.pattern).count();
let result = self.count == actual_count;
if result == expected {
Some(
reflection::Case::new(Some(self), result)
.add_product(reflection::Product::new("actual count", actual_count)),
)
} else {
None
}
}
}
impl reflection::PredicateReflection for MatchesPredicate {
fn parameters<'a>(&'a self) -> Box<dyn Iterator<Item = reflection::Parameter<'a>> + 'a> {
let params = vec![reflection::Parameter::new("count", &self.count)];
Box::new(params.into_iter())
}
}
impl fmt::Display for MatchesPredicate {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "var.contains({})", self.pattern)
}
}
/// Creates a new `Predicate` that ensures a str contains `pattern`
///
/// # Examples
///
/// ```
/// use predicates::prelude::*;
///
/// let predicate_fn = predicate::str::contains("Two");
/// assert_eq!(true, predicate_fn.eval("One Two Three"));
/// assert_eq!(false, predicate_fn.eval("Four Five Six"));
/// ```
pub fn contains<P>(pattern: P) -> ContainsPredicate
where
P: Into<String>,
{
ContainsPredicate {
pattern: pattern.into(),
}
}
| {
MatchesPredicate {
pattern: self.pattern,
count,
}
} |
common_test.go |
"github.com/fizzywhizbang/go-m3u8/m3u8"
"github.com/stretchr/testify/assert"
)
func TestParseAttributes(t *testing.T) {
line := "TEST-ID=\"Help\",URI=\"http://test\",ID=33\n"
mapAttr := m3u8.ParseAttributes(line)
assert.NotNil(t, mapAttr)
assert.Equal(t, "Help", mapAttr["TEST-ID"])
assert.Equal(t, "http://test", mapAttr["URI"])
assert.Equal(t, "33", mapAttr["ID"])
} | package test
import (
"testing" |
|
sprite.go | package graph
import (
"bytes"
"github.com/Shnifer/magellan/v2"
"github.com/hajimehoshi/ebiten"
"image"
"image/color"
"io"
"io/ioutil"
"log"
"math"
)
const Deg2Rad = math.Pi / 180
//const Rad2Deg = 180 / math.Pi
type Sprite struct {
tex Tex
op ebiten.DrawImageOptions
camParams CamParams
//before and past cam parts of geom
dirty bool
colorDirty bool
g1, g2 ebiten.GeoM
//pos and rot point, in sprite before scale
//in pxls
pivot v2.V2
//basic scale here
sx, sy float64
//place of center
pos v2.V2
//in rad
angle float64
//alpha normed
color color.Color
//additional alpha k [0...1]
alpha float64
//number of sprite from sheet
spriteN int
}
func NewSprite(tex Tex, params CamParams) *Sprite {
op := ebiten.DrawImageOptions{}
op.Filter = tex.filter
srcRect := image.Rect(0, 0, tex.sw, tex.sh)
op.SourceRect = &srcRect
w, h := float64(tex.sw), float64(tex.sh)
res := &Sprite{
tex: tex,
op: op,
sx: 1,
sy: 1,
pivot: v2.V2{X: w / 2, Y: h / 2},
color: color.White,
alpha: 1,
camParams: params,
}
res.calcGeom()
return res
}
//without cam
func NewSpriteHUD(tex Tex) *Sprite {
return NewSprite(tex, NoCam)
}
func | (filename string) (io.Reader, error) {
b, err := ioutil.ReadFile(filename)
if err != nil {
return nil, err
}
return bytes.NewBuffer(b), err
}
//doesn't do cache. for tests and models, better use AtlasTex
func NewSpriteFromFile(filename string, smoothFilter bool, sw, sh int, count int, params CamParams) (*Sprite, error) {
tex, err := GetTex(filename, smoothFilter, sw, sh, count, fileLoader)
if err != nil {
return nil, err
}
return NewSprite(tex, params), nil
}
func (s *Sprite) recalcColorM() {
const MaxColor = 0xffff
s.op.ColorM.Reset()
r, g, b, a := s.color.RGBA()
s.op.ColorM.Scale(s.alpha*float64(r)/MaxColor, s.alpha*float64(g)/MaxColor, s.alpha*float64(b)/MaxColor, s.alpha*float64(a)/MaxColor)
}
func (s *Sprite) SetColor(color color.Color) {
if s.color == color {
return
}
s.color = color
s.colorDirty = true
}
func (s *Sprite) SetAlpha(a float64) {
if s.alpha == a {
return
}
s.alpha = a
s.colorDirty = true
}
func (s *Sprite) SetScale(x, y float64) {
if s.sx == x && s.sy == y {
return
}
s.sx = x
s.sy = y
s.dirty = true
}
func (s *Sprite) SetSize(x, y float64) {
sx := x / float64(s.tex.sw)
sy := y / float64(s.tex.sh)
if s.sx == sx && s.sy == sy {
return
}
s.sx = sx
s.sy = sy
s.dirty = true
}
func (s *Sprite) SetSizeProportion(size float64) {
sx := size / float64(s.tex.sw)
sy := size / float64(s.tex.sh)
var scale float64
if sx > sy {
scale = sy
} else {
scale = sx
}
s.sx = scale
s.sy = scale
s.dirty = true
}
//pivotPartial is [0..1,0..1] vector of pivot point in parts of image size
func (s *Sprite) SetPivot(pivotPartial v2.V2) {
s.pivot = v2.V2{
X: pivotPartial.X * float64(s.tex.sw),
Y: pivotPartial.Y * float64(s.tex.sh),
}
s.dirty = true
}
func (s *Sprite) SetPos(pos v2.V2) {
if s.pos == pos {
return
}
s.pos = pos
s.dirty = true
}
func (s *Sprite) SetAng(angleDeg float64) {
if s.angle == angleDeg*Deg2Rad {
return
}
s.angle = angleDeg * Deg2Rad
s.dirty = true
}
func (s *Sprite) SetPosAng(pos v2.V2, angle float64) {
if s.pos == pos && s.angle == angle*Deg2Rad {
return
}
s.pos = pos
s.angle = angle * Deg2Rad
s.dirty = true
}
func (s *Sprite) calcGeom() {
G := ebiten.GeoM{}
G.Translate(-s.pivot.X, -s.pivot.Y)
G.Scale(s.sx, s.sy)
s.g1 = G
G.Reset()
G.Rotate(s.angle)
G.Translate(s.pos.X, s.pos.Y)
s.g2 = G
}
//Copy options, so cam apply do not change
func (s *Sprite) ImageOp() (*ebiten.Image, *ebiten.DrawImageOptions) {
if s.dirty {
s.calcGeom()
s.dirty = false
}
if s.colorDirty {
s.recalcColorM()
s.colorDirty = false
}
op := new(ebiten.DrawImageOptions)
*op = s.op
G := s.g1
//Flip vert before cam coords
if s.camParams.Cam != nil {
G.Scale(1, -1)
if s.camParams.DenyScale {
G.Scale(1/s.camParams.Cam.Scale, 1/s.camParams.Cam.Scale)
}
if s.camParams.DenyAngle {
G.Rotate(s.camParams.Cam.AngleDeg * Deg2Rad)
}
}
G.Concat(s.g2)
if s.camParams.Cam != nil {
G.Concat(s.camParams.Cam.Geom())
}
op.GeoM = G
return s.tex.image, op
}
func (s *Sprite) SkipDrawCheck() bool {
if s == nil {
log.Println("Draw called for nil Sprite")
return true
}
//Check for camera clipping
if s.camParams.Cam != nil {
w := float64(s.tex.sw) * s.sx
h := float64(s.tex.sh) * s.sy
inRect := s.camParams.Cam.RectInSpace(s.pos, w, h)
if !inRect {
return true
}
}
return false
}
func (s *Sprite) Draw(dest *ebiten.Image) {
if s.SkipDrawCheck() {
return
}
img, op := s.ImageOp()
dest.DrawImage(img, op)
}
//MUST support multiple draw with different parameters
func (s *Sprite) DrawF() (DrawF, string) {
if s.SkipDrawCheck() {
return nil, ""
}
//so we calc draw ops on s.DrawF() call not DrawF resolve
img, op := s.ImageOp()
f := func(dest *ebiten.Image) {
dest.DrawImage(img, op)
}
return f, s.tex.name
}
func (s *Sprite) SpriteN() int {
return s.spriteN
}
func (s *Sprite) SpritesCount() int {
return s.tex.count
}
func (s *Sprite) SetSpriteN(n int) {
n = n % s.tex.count
if s.spriteN == n {
return
}
s.spriteN = n
nx := n % s.tex.cols
ny := n / s.tex.cols
rect := image.Rect(nx*s.tex.sw, ny*s.tex.sh, (nx+1)*s.tex.sw, (ny+1)*s.tex.sh)
s.op.SourceRect = &rect
}
func (s *Sprite) NextSprite() {
s.SetSpriteN(s.spriteN + 1)
}
//for noCam use in fact
func (s *Sprite) GetRect() image.Rectangle {
return image.Rect(
int(s.pos.X-s.pivot.X*s.sx),
int(s.pos.Y-s.pivot.Y*s.sy),
int(s.pos.X+(float64(s.tex.sw)-s.pivot.X)*s.sx),
int(s.pos.Y+(float64(s.tex.sh)-s.pivot.Y)*s.sy))
}
func (s *Sprite) TexImageDispose() {
s.tex.image.Dispose()
}
func (s *Sprite) Cols() int {
return s.tex.cols
}
func (s *Sprite) Rows() int {
return s.tex.rows
}
//do not check for new tex size or sprite count
//so it is highly recommended to use the same size textures
func (s *Sprite) SetTex(t Tex) {
s.tex = t
}
func (s *Sprite) AddAng(dAng float64) {
if dAng == 0 {
return
}
s.angle += dAng * Deg2Rad
s.dirty = true
}
func (s *Sprite) IsOver(pos v2.V2, checkTransparent bool) bool {
_, op := s.ImageOp()
geom := op.GeoM
geom.Invert()
px, py := geom.Apply(pos.X, pos.Y)
p := image.Pt(int(px), int(py))
if !p.In(s.tex.image.Bounds()) {
return false
}
if !checkTransparent {
return true
}
_, _, _, a := s.tex.image.At(p.X, p.Y).RGBA()
return a > 0
}
| fileLoader |
redcap_records.py | #!/usr/bin/env python
# Contributors:
# Christopher P. Barnes <[email protected]>
# Andrei Sura: github.com/indera
# Mohan Das Katragadda <[email protected]>
# Philip Chase <[email protected]>
# Ruchi Vivek Desai <[email protected]>
# Taeber Rapczak <[email protected]>
# Nicholas Rejack <[email protected]>
# Josh Hanna <[email protected]>
# Copyright (c) 2015, University of Florida
# All rights reserved.
#
# Distributed under the BSD 3-Clause License
# For full text of the BSD 3-Clause License see http://opensource.org/licenses/BSD-3-Clause
import sys
import argparse
import json
from redcap import Project, RedcapError
def | ():
parser = argparse.ArgumentParser(
description='Read some data from a REDCap Project')
parser.add_argument(
'--token',
dest='token',
default='',
required=True,
help='Specify the authentication/authorization token that will provide access to the REDCap project')
parser.add_argument(
'--url',
dest='url',
default='',
required=True,
help='Specify the url of the REDCap server to connect with')
parser.add_argument(
'--verify_ssl',
dest='verify_ssl',
default=True,
help='Specify whether the SSL cert of the REDCap server should be checked')
parser.add_argument('-i', '--import_data', dest='import_data', default='',
help='Specify the input data file to load into REDCap')
parser.add_argument(
'-f',
'--forms',
dest='forms',
default='',
help='Specify a list of forms, separated by spaces, for which data should be returned.')
parser.add_argument(
'-t',
'--type',
choices=['json', 'csv', 'xml'],
dest='data_type',
default='csv',
help='Specify the file type used as input or output. Valid types: json, csv, xml')
parser.add_argument(
'--fields',
dest='fields',
default='',
help='Specify a list of fields, separated by spaces, for which data should be returned.')
parser.add_argument(
'-e',
'--events',
dest='events',
default='',
help='Specify a list of events, separated by spaces, for which data should be returned.')
parser.add_argument(
'-r',
'--records',
dest='records',
default='',
help='Specify a list of records, separated by spaces, for which data should be returned.')
# prepare the arguments we were given
args = vars(parser.parse_args())
# According to http://pycap.readthedocs.org/en/latest/api.html
# allowed data_types are: csv, json, xml
data_type = args['data_type']
# Turn the 'verify_ssl' parameter into the truth value we need to make a
# REDCap connection
if args['verify_ssl'] == 'y':
args['verify_ssl'] = True
else:
args['verify_ssl'] = False
# Attempt to connect to the REDCap project
try:
project = Project(args['url'], args['token'], "", args['verify_ssl'])
except:
print "Cannot connect to project at " + args['url'] + ' with token ' + args['token']
quit()
# either we export data...
if args['import_data'] == '':
my_forms = args['forms'].split()
my_fields = args['fields'].split()
my_events = args['events'].split()
my_records = args['records'].split()
data = project.export_records(
forms=my_forms,
format = data_type,
fields=my_fields,
events=my_events,
records=my_records,
event_name='unique')
if 'json' == data_type:
print json.dumps(data, ensure_ascii=False)
else:
print str(data)
else:
# ...or we import data
file = args['import_data']
try:
input = open(file, 'r')
except IOError:
print "Cannot open file " + file
quit()
if 'json' == data_type:
json_data = json.load(input)
response = project.import_records(json_data)
else:
response = project.import_records(input.read(), format = data_type)
print response
if __name__ == '__main__':
main()
| main |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.