file_name
stringlengths
3
137
prefix
stringlengths
0
918k
suffix
stringlengths
0
962k
middle
stringlengths
0
812k
time_helper.py
import signal import time from typing import Any, Callable import torch from easydict import EasyDict from .time_helper_base import TimeWrapper from .time_helper_cuda import get_cuda_time_wrapper def build_time_helper(cfg: EasyDict = None, wrapper_type: str = None) -> Callable[[], 'TimeWrapper']: r""" Overview: Build the timehelper Arguments: - cfg (:obj:`dict`): The config file, which is a multilevel dict, have large domain like evaluate, common, model, train etc, and each large domain has it's smaller domain. - wrapper_type (:obj:`str`): The type of wrapper returned, support ``['time', 'cuda']`` Returns: - time_wrapper (:obj:`TimeWrapper`): Return the corresponding timewrapper, Reference: ``ding.utils.timehelper.TimeWrapperTime`` and ``ding.utils.timehelper.get_cuda_time_wrapper``. """ # Note: wrapper_type has higher priority if wrapper_type is not None: time_wrapper_type = wrapper_type elif cfg is not None: time_wrapper_type = cfg.common.time_wrapper_type else: raise RuntimeError('Either wrapper_type or cfg should be provided.') if time_wrapper_type == 'time': return TimeWrapperTime elif time_wrapper_type == 'cuda': if torch.cuda.is_available(): # lazy initialize to make code runnable locally return get_cuda_time_wrapper() else: return TimeWrapperTime else: raise KeyError('invalid time_wrapper_type: {}'.format(time_wrapper_type)) class EasyTimer: r""" Overview: A decent timer wrapper that can be used easily. Interface: ``__init__``, ``__enter__``, ``__exit__`` Example: >>> wait_timer = EasyTimer() >>> with wait_timer: >>> func(...) >>> time_ = wait_timer.value # in second """ def __init__(self, cuda=True): r""" Overview: Init class EasyTimer Arguments: - cuda (:obj:`bool`): Whether to build timer with cuda type """ if torch.cuda.is_available() and cuda: time_wrapper_type = "cuda" else: time_wrapper_type = "time" self._timer = build_time_helper(wrapper_type=time_wrapper_type) self.value = 0.0 def __enter__(self): r""" Overview: Enter timer, start timing """ self.value = 0.0 self._timer.start_time() def __exit__(self, *args): r""" Overview: Exit timer, stop timing """ self.value = self._timer.end_time() class TimeWrapperTime(TimeWrapper): r""" Overview: A class method that inherit from ``TimeWrapper`` class Interface: ``start_time``, ``end_time`` """ # overwrite @classmethod def start_time(cls): r""" Overview: Implement and overide the ``start_time`` method in ``TimeWrapper`` class """ cls.start = time.time() # overwrite @classmethod def end_time(cls): r""" Overview: Implement and overide the end_time method in ``TimeWrapper`` class Returns: - time(:obj:`float`): The time between ``start_time`` and end_time """ cls.end = time.time() return cls.end - cls.start class WatchDog(object): """ Overview: Simple watchdog timer to detect timeouts Arguments: - timeout (:obj:`int`): Timeout value of the ``watchdog [seconds]``. .. note:: If it is not reset before exceeding this value, ``TimeourError`` raised. Interface: ``start``, ``stop`` Examples: >>> watchdog = WatchDog(x) # x is a timeout value >>> ... >>> watchdog.start() >>> ... # Some function """ def __init__(self, timeout: int = 1):
def start(self): r""" Overview: Start watchdog. """ signal.signal(signal.SIGALRM, self._event) signal.alarm(self._timeout) @staticmethod def _event(signum: Any, frame: Any): raise TimeoutError() def stop(self): r""" Overview: Stop watchdog with ``alarm(0)``, ``SIGALRM``, and ``SIG_DFL`` signals. """ signal.alarm(0) signal.signal(signal.SIGALRM, signal.SIG_DFL)
self._timeout = timeout + 1 self._failed = False
VpcSecurityGroupCreateTask.py
# coding=utf8 # Copyright 2018 JDCLOUD.COM # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # NOTE: This class is auto generated by the jdcloud code generator program. class
(object): def __init__(self, task=None): """ :param task: (Optional) """ self.task = task
VpcSecurityGroupCreateTask
JoyStick.py
# JoyStick # # Copyright (c) 2021 Hajime Saito # # Released under the MIT license. # see https://opensource.org/licenses/MIT import pygame from pygame.locals import * import Repeater JOY_MAX_TRIGGER = 16 JOY_NOINPUT = 0 JOY_UP = 0x1 << JOY_MAX_TRIGGER JOY_RIGHT = 0x2 << JOY_MAX_TRIGGER JOY_DOWN = 0x4 << JOY_MAX_TRIGGER JOY_LEFT = 0x8 << JOY_MAX_TRIGGER JOY_TRIGGER1 = 0x1 << 0 JOY_TRIGGER2 = 0x1 << 1 JOY_TRIGGER3 = 0x1 << 2 JOY_TRIGGER4 = 0x1 << 3 JOY_TRIGGER5 = 0x1 << 4 JOY_TRIGGER6 = 0x1 << 5 JOY_TRIGGER7 = 0x1 << 6 JOY_TRIGGER8 = 0x1 << 7 JOY_TRIGGER9 = 0x1 << 8 JOY_TRIGGER10 = 0x1 << 9 JOY_TRIGGER11 = 0x1 << 10 JOY_TRIGGER12 = 0x1 << 11 JOY_TRIGGER13 = 0x1 << 12 JOY_TRIGGER14 = 0x1 << 13 JOY_TRIGGER15 = 0x1 << 14 JOY_TRIGGER16 = 0x1 << 15 JOY_MASK_STICK = (JOY_UP | JOY_RIGHT | JOY_DOWN | JOY_LEFT) JOY_MASK_BUTTON = ~JOY_MASK_STICK class JoyStickBase(object): def __init__(self): self.data = JOY_NOINPUT self.prevData = JOY_NOINPUT self.xorData = JOY_NOINPUT self.latestButtonDown = JOY_NOINPUT self.latestButtonUp = JOY_NOINPUT self.repeater = Repeater.XorRepeater() self.repeater.setDefaultValue(JOY_NOINPUT) self.repeatedData = JOY_NOINPUT def update(self): # update self.data at subclass before call this. self.repeatedData = self.repeater.update(self.data) self.xorData = self.data ^ self.prevData self.latestButtonDown = self.xorData & self.data self.latestButtonUp = self.xorData & ~self.data self.prevData = self.data class JoyKey(JoyStickBase):
class JoyStick(JoyStickBase): def __init__(self, joyStickId=0): super().__init__() if joyStickId >= pygame.joystick.get_count(): raise ValueError("Invalid JoyStick ID {}".format(joyStickId)) self.joyStick = pygame.joystick.Joystick(joyStickId) self.joyStick.init() self.hasHat = True if self.joyStick.get_numhats() > 0 else False def update(self): self.data = JOY_NOINPUT stickDatas = [] if self.hasHat: for i in range(self.joyStick.get_numhats()): x, y = self.joyStick.get_hat(i) stickDatas.extend([ x, -y ]) else: for i in range(self.joyStick.get_numaxes()): stickDatas.append(self.joyStick.get_axis(i)) if stickDatas[1] < -0.5: self.data |= JOY_UP if stickDatas[1] > 0.5: self.data |= JOY_DOWN if stickDatas[0] > 0.5: self.data |= JOY_RIGHT if stickDatas[0] < -0.5: self.data |= JOY_LEFT for i in range(self.joyStick.get_numbuttons()): if self.joyStick.get_button(i) == True: self.data |= 1 << i super().update() class JoyStickIntegrator(JoyStickBase): def __init__(self): super().__init__() self.joySticks = [] def append(self, joyStick): self.joySticks.append(joyStick) def remove(self, joyStick): self.joySticks.remove(joyStick) def update(self): self.data = JOY_NOINPUT self.repeatedData = JOY_NOINPUT self.xorData = JOY_NOINPUT self.latestButtonDown = JOY_NOINPUT self.latestButtonUp = JOY_NOINPUT for joyStick in self.joySticks: joyStick.update() self.data |= joyStick.data self.repeatedData |= joyStick.repeatedData self.xorData |= joyStick.xorData self.latestButtonDown |= joyStick.latestButtonDown self.latestButtonUp |= joyStick.latestButtonUp
def __init__(self): super().__init__() self.vk_up = K_UP self.vk_right = K_RIGHT self.vk_down = K_DOWN self.vk_left = K_LEFT self.vk_button = [ 0 ] * JOY_MAX_TRIGGER self.vk_button[0] = K_z self.vk_button[1] = K_x self.vk_button[2] = K_c def update(self): key = pygame.key.get_pressed() self.data = JOY_NOINPUT if key[self.vk_up] == 1: self.data |= JOY_UP if key[self.vk_right] == 1: self.data |= JOY_RIGHT if key[self.vk_down] == 1: self.data |= JOY_DOWN if key[self.vk_left] == 1: self.data |= JOY_LEFT for i in range(JOY_MAX_TRIGGER): if key[self.vk_button[i]] == 1: self.data |= 1 << i super().update()
InterfaceSummary.tsx
import React, { Component } from 'react' import { connect } from 'react-redux' import copy from 'clipboard-copy' import { GlobalHotKeys } from 'react-hotkeys' import { replace, StoreStateRouterLocationURI, PropTypes } from '../../family' import { serve } from '../../relatives/services/constant' import { METHODS, STATUS_LIST } from './InterfaceForm' import { CopyToClipboard } from '../utils/' import { getRelativeUrl } from '../../utils/URLUtils' import './InterfaceSummary.css' import { showMessage, MSG_TYPE } from 'actions/common' import { TextField, Select, InputLabel, Input, MenuItem, FormControl, RadioGroup, FormControlLabel, Radio } from '@material-ui/core' import Markdown from 'markdown-to-jsx' export enum BODY_OPTION { FORM_DATA = 'FORM_DATA', FORM_URLENCODED = 'FORM_URLENCODED', RAW = 'RAW', BINARY = 'BINARY', } export const getBodyOptionStr = (bo: string) => { if (bo === BODY_OPTION.FORM_URLENCODED) { return 'x-www-form-urlencoded' } else if (bo === BODY_OPTION.FORM_DATA) { return 'form-data' } else if (bo === BODY_OPTION.RAW) { return 'raw' } else if (bo === BODY_OPTION.BINARY) { return 'binary' } else { return '-' } } export function formatBodyOption(type: BODY_OPTION) { switch (type) { case BODY_OPTION.BINARY: return 'Binary' case BODY_OPTION.FORM_DATA: return 'FormData' case BODY_OPTION.FORM_URLENCODED: return 'UrlEncoded' case BODY_OPTION.RAW: return 'Raw' default: return '-' } } export const BODY_OPTION_LIST = [ { label: 'form-data', value: BODY_OPTION.FORM_DATA }, { label: 'x-www-form-urlencoded', value: BODY_OPTION.FORM_URLENCODED }, { label: 'raw', value: BODY_OPTION.RAW }, { label: 'binary', value: BODY_OPTION.BINARY }, ] /** * 参数类型 */ export enum POS_TYPE { QUERY = 2, HEADER = 1, BODY = 3, PRE_REQUEST_SCRIPT = 4, TEST = 5 } function url2name
y) { // copy from http://gitlab.alibaba-inc.com/thx/magix-cli/blob/master/platform/rap.js#L306 const method = itf.method.toLowerCase() const apiUrl = itf.url const projectId = itf.repositoryId const id = itf.id // eslint-disable-next-line const regExp = /^(?:https?:\/\/[^\/]+)?(\/?.+?\/?)(?:\.[^./]+)?$/ const regExpExec = regExp.exec(apiUrl) if (!regExpExec) { return { ok: false, name: '', message: `\n ✘ 您的rap接口url设置格式不正确,参考格式:/api/test.json (接口url:${apiUrl}, 项目id:${projectId}, 接口id:${id})\n`, } } const urlSplit = regExpExec[1].split('/') // 接口地址为RESTful的,清除占位符 // api/:id/get -> api//get // api/bid[0-9]{4}/get -> api//get urlSplit.forEach((item, i) => { // eslint-disable-next-line if (/\:id/.test(item)) { urlSplit[i] = '$id' // eslint-disable-next-line } else if (/[\[\]\{\}]/.test(item)) { urlSplit[i] = '$regx' } }) // 只去除第一个为空的值,最后一个为空保留 // 有可能情况是接口 /api/login 以及 /api/login/ 需要同时存在 if (urlSplit[0].trim() === '') { urlSplit.shift() } urlSplit.push(method) const urlToName = urlSplit.join('_') return { ok: true, name: urlToName, message: '', } } type InterfaceSummaryProps = { store: object; handleChangeInterface: (itf: any) => void; showMessage: typeof showMessage; [x: string]: any; } type InterfaceSummaryState = { bodyOption?: any method?: any status?: any posFilter: POS_TYPE [x: string]: any } class InterfaceSummary extends Component< InterfaceSummaryProps, InterfaceSummaryState > { static contextTypes = { onDeleteInterface: PropTypes.func.isRequired, } constructor(props: any) { super(props) this.state = { bodyOption: props?.itf?.bodyOption ?? BODY_OPTION.FORM_DATA, posFilter: props?.itf?.method === 'POST' ? POS_TYPE.BODY : POS_TYPE.QUERY, } this.changeMethod = this.changeMethod.bind(this) this.changeHandler = this.changeHandler.bind(this) this.switchBodyOption = this.switchBodyOption.bind(this) this.switchPos = this.switchPos.bind(this) this.copyModelName = this.copyModelName.bind(this) props.stateChangeHandler && props.stateChangeHandler(this.state) } switchBodyOption(val: BODY_OPTION) { this.setState({ bodyOption: val }, () => { this.props.stateChangeHandler(this.state) } ) } switchPos(val: POS_TYPE) { return () => { this.setState( { posFilter: val }, () => { this.props.stateChangeHandler(this.state) } ) } } changeMethod(method: any) { this.setState({ method }) } changeStatus(status: any) { this.setState({ status }) } changeHandler(e: any) { this.setState({ [e.target.name]: e.target.value, }) } copyModelName() { const { itf = {} } = this.props const res = url2name(itf) if (!res.ok) { this.props.showMessage(`复制失败: ${res.message}`, MSG_TYPE.ERROR) return } const modelName = res.name copy(modelName) .then(() => { this.props.showMessage( `成功复制 ${modelName} 到剪贴板`, MSG_TYPE.SUCCESS ) }) .catch(() => { this.props.showMessage(`复制失败`, MSG_TYPE.ERROR) }) } render() { const { repository = {}, itf = {}, editable, handleChangeInterface, } = this.props const { posFilter } = this.state const keyMap = { COPY_MODEL_NAME: ['ctrl+alt+c'], } const handlers = { COPY_MODEL_NAME: this.copyModelName, } if (!itf.id) { return null } return ( <div className="InterfaceSummary"> <GlobalHotKeys keyMap={keyMap} handlers={handlers} /> {!editable && ( <div className="header"> <CopyToClipboard text={itf.name}> <span className="title">{itf.name}</span> </CopyToClipboard> </div> )} <ul className="summary"> {editable ? ( <div style={{ maxWidth: 600 }}> <div> <TextField style={{ marginTop: 0 }} id="name" label="名称" value={itf.name || ''} fullWidth={true} autoComplete="off" onChange={e => { handleChangeInterface({ name: e.target.value }) }} margin="normal" /> </div> <div> <TextField id="url" label="地址" value={itf.url || ''} fullWidth={true} autoComplete="off" onChange={e => { handleChangeInterface({ url: e.target.value }) }} margin="normal" /> </div> <div> <div style={{ width: 90, display: 'inline-block' }}> <InputLabel shrink={true} htmlFor="method-label-placeholder"> 类型 </InputLabel> <Select value={itf.method} input={<Input name="method" id="method-label-placeholder" />} onChange={e => { handleChangeInterface({ method: e.target.value }) }} displayEmpty={true} name="method" > {METHODS.map(method => ( <MenuItem key={method} value={method}> {method} </MenuItem> ))} </Select> </div> <div style={{ width: 120, display: 'inline-block' }}> <InputLabel shrink={true} htmlFor="status-label-placeholder" style={{ width: 100 }}> 状态码 </InputLabel> <Select value={itf.status} input={<Input name="status" id="status-label-placeholder" />} onChange={e => { handleChangeInterface({ status: e.target.value }) }} displayEmpty={true} name="status" > {STATUS_LIST.map(status => ( <MenuItem key={status} value={status}> {status} </MenuItem> ))} </Select> </div> </div> <TextField id="description" label="描述(可多行, 支持Markdown)" value={itf.description || ''} fullWidth={true} multiline={true} autoComplete="off" rowsMax={8} onChange={e => { handleChangeInterface({ description: e.target.value }) }} margin="normal" /> </div> ) : ( <> <li> <span className="mr5"> <span className="label">接口ID:</span> {itf.id} </span> </li> <li> <CopyToClipboard text={itf.url} type="right"> <span className="mr5"> <span className="label">地址:</span> <a href={`${serve}/app/mock/${repository.id}${getRelativeUrl(itf.url || '')}`} target="_blank" rel="noopener noreferrer" > {itf.url} </a> </span> </CopyToClipboard> </li> <li> <span> <span className="label">类型:</span> <span>{itf.method}</span> </span> </li> <li> <span> <span className="label">状态码:</span> <span>{itf.status}</span> </span> </li> </> )} </ul> {itf.description && ( <CopyToClipboard text={itf.description}> <Markdown>{itf.description || ''}</Markdown> </CopyToClipboard> )} { editable && ( <ul className="nav nav-tabs" role="tablist"> <li className="nav-item" onClick={this.switchPos(POS_TYPE.HEADER)} > <button className={`nav-link ${posFilter === POS_TYPE.HEADER ? 'active' : ''}`} role="tab" data-toggle="tab" > Headers </button> </li> <li className="nav-item" onClick={this.switchPos(POS_TYPE.QUERY)} > <button className={`nav-link ${posFilter === POS_TYPE.QUERY ? 'active' : ''}`} role="tab" data-toggle="tab" > Query Params </button> </li> <li className="nav-item" onClick={this.switchPos(POS_TYPE.BODY)} > <button className={`nav-link ${posFilter === POS_TYPE.BODY ? 'active' : ''}`} role="tab" data-toggle="tab" > Body Params </button> </li> </ul> ) } { editable && posFilter === POS_TYPE.BODY ? ( <FormControl component="fieldset"> <RadioGroup aria-label="body type" name="body-type" value={this.state.bodyOption} onChange={e => this.switchBodyOption(e.target.value as BODY_OPTION)} row={true} > {BODY_OPTION_LIST.map(x => <FormControlLabel key={x.value} value={x.value} control={<Radio />} label={x.label} />)} </RadioGroup> </FormControl> ) : null } </div > ) } handleDelete = (e: any, itf: any) => { e.preventDefault() const message = '接口被删除后不可恢复!\n确认继续删除吗?' if (window.confirm(message)) { const { onDeleteInterface } = this.context onDeleteInterface(itf.id, () => { const { router, replace } = this.props const uri = StoreStateRouterLocationURI(router) const deleteHref = this.props.active ? uri.removeSearch('itf').href() : uri.href() replace(deleteHref) }) } } handleUpdate = () => { /** empty */ } } const mapStateToProps = () => ({ }) const mapDispatchToProps = { replace, showMessage, } export default connect( mapStateToProps, mapDispatchToProps )(InterfaceSummary)
(itf: an
io_helpers.rs
// Copyright 2020 Red Hat, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::io::Write; use std::path::Path; use anyhow::{anyhow, bail, Result}; use crate::io::Sha256Digest; // ab/cdef....file --> 0xabcdef... pub fn object_path_to_checksum(path: &Path) -> Result<Sha256Digest> { let chksum2 = path .parent() .unwrap() .file_name() .unwrap() .to_str() .unwrap(); let chksum62 = path .file_stem() .unwrap() .to_str() .ok_or_else(|| anyhow!("invalid non-UTF-8 object filename: {:?}", path))?; if chksum2.len() != 2 || chksum62.len() != 62 { bail!("Malformed object path {:?}", path); } let mut bin_chksum = [0u8; 32]; bin_chksum[0] = u8::from_str_radix(chksum2, 16)?; for i in 0..31 { bin_chksum[i + 1] = u8::from_str_radix(&chksum62[i * 2..(i + 1) * 2], 16)?; } Ok(Sha256Digest(bin_chksum)) } // 0xabcdef... --> ab/cdef....file pub fn checksum_to_object_path(chksum: &Sha256Digest, buf: &mut Vec<u8>) -> Result<()> { write!(buf, "{:02x}/", chksum.0[0])?; for i in 1..32 { write!(buf, "{:02x}", chksum.0[i])?; } write!(buf, ".file")?; Ok(()) } #[cfg(test)] mod tests { use super::*; use std::ffi::OsStr; use std::os::unix::ffi::OsStrExt; #[test]
// all zeros checksum_to_object_path(&chksum, &mut buf).unwrap(); assert_eq!( Path::new(OsStr::from_bytes(buf.as_slice())), Path::new("00/00000000000000000000000000000000000000000000000000000000000000.file") ); buf.truncate(0); // not all zeros chksum.0[0] = 0xff; chksum.0[1] = 0xfe; chksum.0[31] = 0xfd; checksum_to_object_path(&chksum, &mut buf).unwrap(); assert_eq!( Path::new(OsStr::from_bytes(buf.as_slice())), Path::new("ff/fe0000000000000000000000000000000000000000000000000000000000fd.file") ); buf.truncate(0); } }
fn test_checksum_to_object_path() { let mut chksum = Sha256Digest([0u8; 32]); let mut buf: Vec<u8> = Vec::new();
gcp.go
/* Copyright 2018 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package fuzz import ( "context" "errors" "fmt" "net/http" "strings" "github.com/golang/glog" computealpha "google.golang.org/api/compute/v0.alpha" computebeta "google.golang.org/api/compute/v0.beta" compute "google.golang.org/api/compute/v1" "google.golang.org/api/googleapi" "k8s.io/kubernetes/pkg/cloudprovider/providers/gce/cloud" "k8s.io/kubernetes/pkg/cloudprovider/providers/gce/cloud/filter" "k8s.io/kubernetes/pkg/cloudprovider/providers/gce/cloud/meta" "k8s.io/ingress-gce/pkg/utils" ) // ForwardingRule is a union of the API version types. type ForwardingRule struct { GA *compute.ForwardingRule Alpha *computealpha.ForwardingRule Beta *computebeta.ForwardingRule } // TargetHTTPProxy is a union of the API version types. type TargetHTTPProxy struct { GA *compute.TargetHttpProxy Alpha *computealpha.TargetHttpProxy Beta *computebeta.TargetHttpProxy } // TargetHTTPSProxy is a union of the API version types. type TargetHTTPSProxy struct { GA *compute.TargetHttpsProxy Alpha *computealpha.TargetHttpsProxy Beta *computebeta.TargetHttpsProxy } // URLMap is a union of the API version types. type URLMap struct { GA *compute.UrlMap Alpha *computealpha.UrlMap Beta *computebeta.UrlMap } // BackendService is a union of the API version types. type BackendService struct { GA *compute.BackendService Alpha *computealpha.BackendService Beta *computebeta.BackendService } // GCLB contains the resources for a load balancer. type GCLB struct { VIP string ForwardingRule map[meta.Key]*ForwardingRule TargetHTTPProxy map[meta.Key]*TargetHTTPProxy TargetHTTPSProxy map[meta.Key]*TargetHTTPSProxy URLMap map[meta.Key]*URLMap BackendService map[meta.Key]*BackendService } // NewGCLB returns an empty GCLB. func NewGCLB(vip string) *GCLB
// GCLBDeleteOptions may be provided when cleaning up GCLB resource. type GCLBDeleteOptions struct { // SkipDefaultBackend indicates whether to skip checking for the // system default backend. SkipDefaultBackend bool } // CheckResourceDeletion checks the existance of the resources. Returns nil if // all of the associated resources no longer exist. func (g *GCLB) CheckResourceDeletion(ctx context.Context, c cloud.Cloud, options *GCLBDeleteOptions) error { var resources []*meta.Key for k := range g.ForwardingRule { _, err := c.ForwardingRules().Get(ctx, &k) if err != nil { if err.(*googleapi.Error) == nil || err.(*googleapi.Error).Code != http.StatusNotFound { return err } } else { resources = append(resources, &k) } } for k := range g.TargetHTTPProxy { _, err := c.TargetHttpProxies().Get(ctx, &k) if err != nil { if err.(*googleapi.Error) == nil || err.(*googleapi.Error).Code != http.StatusNotFound { return err } } else { resources = append(resources, &k) } } for k := range g.TargetHTTPSProxy { _, err := c.TargetHttpsProxies().Get(ctx, &k) if err != nil { if err.(*googleapi.Error) == nil || err.(*googleapi.Error).Code != http.StatusNotFound { return err } } else { resources = append(resources, &k) } } for k := range g.URLMap { _, err := c.UrlMaps().Get(ctx, &k) if err != nil { if err.(*googleapi.Error) == nil || err.(*googleapi.Error).Code != http.StatusNotFound { return err } } else { resources = append(resources, &k) } } for k := range g.BackendService { bs, err := c.BackendServices().Get(ctx, &k) if err != nil { if err.(*googleapi.Error) == nil || err.(*googleapi.Error).Code != http.StatusNotFound { return err } } else { if options != nil && options.SkipDefaultBackend { desc := utils.DescriptionFromString(bs.Description) if desc.ServiceName == "kube-system/default-http-backend" { continue } } resources = append(resources, &k) } } if len(resources) != 0 { var s []string for _, r := range resources { s = append(s, r.String()) } return fmt.Errorf("resources still exist (%s)", strings.Join(s, ", ")) } return nil } func hasAlphaResource(resourceType string, validators []FeatureValidator) bool { for _, val := range validators { if val.HasAlphaResource(resourceType) { return true } } return false } func hasBetaResource(resourceType string, validators []FeatureValidator) bool { for _, val := range validators { if val.HasBetaResource(resourceType) { return true } } return false } // GCLBForVIP retrieves all of the resources associated with the GCLB for a // given VIP. func GCLBForVIP(ctx context.Context, c cloud.Cloud, vip string, validators []FeatureValidator) (*GCLB, error) { gclb := NewGCLB(vip) allGFRs, err := c.GlobalForwardingRules().List(ctx, filter.None) if err != nil { glog.Warningf("Error listing forwarding rules: %v", err) return nil, err } var gfrs []*compute.ForwardingRule for _, gfr := range allGFRs { if gfr.IPAddress == vip { gfrs = append(gfrs, gfr) } } var urlMapKey *meta.Key for _, gfr := range gfrs { frKey := meta.GlobalKey(gfr.Name) gclb.ForwardingRule[*frKey] = &ForwardingRule{GA: gfr} if hasAlphaResource("forwardingRule", validators) { fr, err := c.AlphaForwardingRules().Get(ctx, frKey) if err != nil { glog.Warningf("Error getting alpha forwarding rules: %v", err) return nil, err } gclb.ForwardingRule[*frKey].Alpha = fr } if hasBetaResource("forwardingRule", validators) { return nil, errors.New("unsupported forwardingRule version") } // ForwardingRule => TargetProxy resID, err := cloud.ParseResourceURL(gfr.Target) if err != nil { glog.Warningf("Error parsing Target (%q): %v", gfr.Target, err) return nil, err } switch resID.Resource { case "targetHttpProxies": p, err := c.TargetHttpProxies().Get(ctx, resID.Key) if err != nil { glog.Warningf("Error getting TargetHttpProxy %s: %v", resID.Key, err) return nil, err } gclb.TargetHTTPProxy[*resID.Key] = &TargetHTTPProxy{GA: p} if hasAlphaResource("targetHttpProxy", validators) || hasBetaResource("targetHttpProxy", validators) { return nil, errors.New("unsupported targetHttpProxy version") } urlMapResID, err := cloud.ParseResourceURL(p.UrlMap) if err != nil { glog.Warningf("Error parsing urlmap URL (%q): %v", p.UrlMap, err) return nil, err } if urlMapKey == nil { urlMapKey = urlMapResID.Key } if *urlMapKey != *urlMapResID.Key { glog.Warningf("Error targetHttpProxy references are not the same (%s != %s)", *urlMapKey, *urlMapResID.Key) return nil, fmt.Errorf("targetHttpProxy references are not the same: %+v != %+v", *urlMapKey, *urlMapResID.Key) } case "targetHttpsProxies": p, err := c.TargetHttpsProxies().Get(ctx, resID.Key) if err != nil { glog.Warningf("Error getting targetHttpsProxy (%s): %v", resID.Key, err) return nil, err } gclb.TargetHTTPSProxy[*resID.Key] = &TargetHTTPSProxy{GA: p} if hasAlphaResource("targetHttpsProxy", validators) || hasBetaResource("targetHttpsProxy", validators) { return nil, errors.New("unsupported targetHttpsProxy version") } urlMapResID, err := cloud.ParseResourceURL(p.UrlMap) if err != nil { glog.Warningf("Error parsing urlmap URL (%q): %v", p.UrlMap, err) return nil, err } if urlMapKey == nil { urlMapKey = urlMapResID.Key } if *urlMapKey != *urlMapResID.Key { glog.Warningf("Error targetHttpsProxy references are not the same (%s != %s)", *urlMapKey, *urlMapResID.Key) return nil, fmt.Errorf("targetHttpsProxy references are not the same: %+v != %+v", *urlMapKey, *urlMapResID.Key) } default: glog.Errorf("Unhandled resource: %q, grf = %+v", resID.Resource, gfr) return nil, fmt.Errorf("unhandled resource %q", resID.Resource) } } // TargetProxy => URLMap urlMap, err := c.UrlMaps().Get(ctx, urlMapKey) if err != nil { return nil, err } gclb.URLMap[*urlMapKey] = &URLMap{GA: urlMap} if hasAlphaResource("urlMap", validators) || hasBetaResource("urlMap", validators) { return nil, errors.New("unsupported urlMap version") } // URLMap => BackendService(s) var bsKeys []*meta.Key resID, err := cloud.ParseResourceURL(urlMap.DefaultService) if err != nil { return nil, err } bsKeys = append(bsKeys, resID.Key) for _, pm := range urlMap.PathMatchers { resID, err := cloud.ParseResourceURL(pm.DefaultService) if err != nil { return nil, err } bsKeys = append(bsKeys, resID.Key) for _, pr := range pm.PathRules { resID, err := cloud.ParseResourceURL(pr.Service) if err != nil { return nil, err } bsKeys = append(bsKeys, resID.Key) } } for _, bsKey := range bsKeys { bs, err := c.BackendServices().Get(ctx, bsKey) if err != nil { return nil, err } gclb.BackendService[*bsKey] = &BackendService{GA: bs} if hasAlphaResource("backendService", validators) { bs, err := c.AlphaBackendServices().Get(ctx, bsKey) if err != nil { return nil, err } gclb.BackendService[*bsKey].Alpha = bs } if hasBetaResource("backendService", validators) { bs, err := c.BetaBackendServices().Get(ctx, bsKey) if err != nil { return nil, err } gclb.BackendService[*bsKey].Beta = bs } } // TODO: fetch Backends return gclb, err }
{ return &GCLB{ VIP: vip, ForwardingRule: map[meta.Key]*ForwardingRule{}, TargetHTTPProxy: map[meta.Key]*TargetHTTPProxy{}, TargetHTTPSProxy: map[meta.Key]*TargetHTTPSProxy{}, URLMap: map[meta.Key]*URLMap{}, BackendService: map[meta.Key]*BackendService{}, } }
do_delete_audio_file_from_db_test.py
import os import time import unittest from mock import patch from chirp.library import audio_file_test from chirp.library import do_delete_audio_file_from_db from chirp.library import database TEST_DB_NAME_PATTERN = "/tmp/chirp-library-db_test.%d.sqlite" class DeleteFingerprintTest(unittest.TestCase): def setUp(self): self.name = TEST_DB_NAME_PATTERN % int(time.time() * 1000000) self.db = database.Database(self.name) def tearDown(self): os.unlink(self.name) def _add_test_audiofiles(self): test_volume = 17 test_import_timestamp = 1230959520 # populate some dummy audiofiles into the database all_au_files = [audio_file_test.get_test_audio_file(i) for i in xrange(10)] add_txn = self.db.begin_add(test_volume, test_import_timestamp) for au_file in all_au_files: au_file.volume = test_volume au_file.import_timestamp = test_import_timestamp for au_file in all_au_files: add_txn.add(au_file) add_txn.commit() def test_del_audiofilese__full_delete_single(self): # SETUP test_fingerprint = "0000000000000007" # Create db tables self.assertTrue(self.db.create_tables()) self._add_test_audiofiles() # make sure 10 records exist self.assertEqual(len(list(self.db.get_all())), 10) # quick confirmation that the audiofile that we want to test exists. af = self.db.get_by_fingerprint(test_fingerprint) self.assertEquals(af.fingerprint, test_fingerprint) afm = do_delete_audio_file_from_db.AudioFileManager( library_db_file=self.name) # TEST afm.del_audiofiles([test_fingerprint]) # RESULTS # verify audiofile doesn't exist af = self.db.get_by_fingerprint(test_fingerprint) self.assertEquals(af, None) # make sure only 9 records exist now self.assertEqual(len(list(self.db.get_all())), 9) def test_del_audiofiles__full_delete_multiple(self): # SETUP test_fingerprint_1 = "0000000000000005" test_fingerprint_2 = "0000000000000007" # Create db tables self.assertTrue(self.db.create_tables()) self._add_test_audiofiles() # make sure 10 records exist self.assertEqual(len(list(self.db.get_all())), 10) # quick confirmation that the audiofiles that we want to test exists. af = self.db.get_by_fingerprint(test_fingerprint_1) self.assertEquals(af.fingerprint, test_fingerprint_1) af = self.db.get_by_fingerprint(test_fingerprint_2) self.assertEquals(af.fingerprint, test_fingerprint_2) afm = do_delete_audio_file_from_db.AudioFileManager( library_db_file=self.name) # TEST afm.del_audiofiles([test_fingerprint_1, test_fingerprint_2]) # RESULTS # verify audiofiles don't exist af = self.db.get_by_fingerprint(test_fingerprint_1) self.assertEquals(af, None) af = self.db.get_by_fingerprint(test_fingerprint_2) self.assertEquals(af, None) # make sure only 8 records exist now self.assertEqual(len(list(self.db.get_all())), 8) def test_del_audiofiles__full_delete_non_existing_fingerprint(self): # SETUP test_fingerprint_1 = "0000000000000020" # Create db tables self.assertTrue(self.db.create_tables()) self._add_test_audiofiles() # make sure 10 records exist self.assertEqual(len(list(self.db.get_all())), 10) afm = do_delete_audio_file_from_db.AudioFileManager( library_db_file=self.name) # TEST afm.del_audiofiles([test_fingerprint_1]) # RESULTS # make sure nothing was deleted self.assertEqual(len(list(self.db.get_all())), 10) def test_del_audiofiles__raises_exception(self): # SETUP test_fingerprint_1 = "0000000000000007" # Create db tables self.assertTrue(self.db.create_tables()) self._add_test_audiofiles() # make sure 10 records exist self.assertEqual(len(list(self.db.get_all())), 10) afm = do_delete_audio_file_from_db.AudioFileManager( library_db_file=self.name) # TEST def _raise_exception(*args, **kwargs): raise Exception('Test') with patch.object(afm, 'conn', autospec=True) as mock_conn:
with self.assertRaises(Exception): afm.del_audiofiles([test_fingerprint_1]) mock_conn.rollback.assert_called_with() def test_get_audio_files__existing_record(self): # SETUP test_fingerprint = "0000000000000007" # Create db tables self.assertTrue(self.db.create_tables()) self._add_test_audiofiles() afm = do_delete_audio_file_from_db.AudioFileManager( library_db_file=self.name) # TEST af = afm.get_audio_files(fingerprints=[test_fingerprint]) # RESULTS self.assertSetEqual( set(a['fingerprint'] for a in af), set([test_fingerprint])) def test_get_audio_files__non_existing_records(self): # SETUP test_fingerprint_1 = "0000000000000020" # Create db tables self.assertTrue(self.db.create_tables()) self._add_test_audiofiles() afm = do_delete_audio_file_from_db.AudioFileManager( library_db_file=self.name) # TEST af = afm.get_audio_files( fingerprints=[test_fingerprint_1]) # RESULTS self.assertEqual(len(list(af)), 0) def test_get_tags__existing_record(self): # SETUP test_fingerprint_1 = "0000000000000005" # Create db tables self.assertTrue(self.db.create_tables()) self._add_test_audiofiles() afm = do_delete_audio_file_from_db.AudioFileManager( library_db_file=self.name) # TEST af = afm.get_tags( fingerprints=[test_fingerprint_1]) # RESULTS self.assertListEqual( list(a['fingerprint'] for a in af), 5 * [test_fingerprint_1]) def test_get_tags__non_existing_records(self): # SETUP test_fingerprint_1 = "0000000000000020" # Create db tables self.assertTrue(self.db.create_tables()) self._add_test_audiofiles() afm = do_delete_audio_file_from_db.AudioFileManager( library_db_file=self.name) # TEST af = afm.get_tags( fingerprints=[test_fingerprint_1]) # RESULTS self.assertEqual(len(list(af)), 0) def test_print_rows_can_handle_non_ascii(self): afm = do_delete_audio_file_from_db.AudioFileManager( library_db_file=self.name ) afm.print_rows([ [u'non-ascii string with a \xf8 character'], ])
mock_conn.execute.side_effect = _raise_exception
env.go
// Copyright 2015 Keybase, Inc. All rights reserved. Use of // this source code is governed by the included BSD license. package libkb import ( "fmt" "os" "path/filepath" "runtime" "strconv" "strings" "sync" "time" keybase1 "github.com/keybase/client/go/protocol/keybase1" "github.com/keybase/client/go/systemd" ) type NullConfiguration struct{} func (n NullConfiguration) GetHome() string { return "" } func (n NullConfiguration) GetServerURI() string { return "" } func (n NullConfiguration) GetConfigFilename() string { return "" } func (n NullConfiguration) GetUpdaterConfigFilename() string { return "" } func (n NullConfiguration) GetSessionFilename() string { return "" } func (n NullConfiguration) GetDbFilename() string { return "" } func (n NullConfiguration) GetChatDbFilename() string { return "" } func (n NullConfiguration) GetPvlKitFilename() string { return "" } func (n NullConfiguration) GetUsername() NormalizedUsername { return NormalizedUsername("") } func (n NullConfiguration) GetEmail() string { return "" } func (n NullConfiguration) GetUpgradePerUserKey() (bool, bool) { return false, false } func (n NullConfiguration) GetProxy() string { return "" } func (n NullConfiguration) GetGpgHome() string { return "" } func (n NullConfiguration) GetBundledCA(h string) string { return "" } func (n NullConfiguration) GetUserCacheMaxAge() (time.Duration, bool) { return 0, false } func (n NullConfiguration) GetProofCacheSize() (int, bool) { return 0, false } func (n NullConfiguration) GetProofCacheLongDur() (time.Duration, bool) { return 0, false } func (n NullConfiguration) GetProofCacheMediumDur() (time.Duration, bool) { return 0, false } func (n NullConfiguration) GetProofCacheShortDur() (time.Duration, bool) { return 0, false } func (n NullConfiguration) GetLinkCacheSize() (int, bool) { return 0, false } func (n NullConfiguration) GetLinkCacheCleanDur() (time.Duration, bool) { return 0, false } func (n NullConfiguration) GetUPAKCacheSize() (int, bool) { return 0, false } func (n NullConfiguration) GetUIDMapFullNameCacheSize() (int, bool) { return 0, false } func (n NullConfiguration) GetMerkleKIDs() []string { return nil } func (n NullConfiguration) GetCodeSigningKIDs() []string { return nil } func (n NullConfiguration) GetPinentry() string { return "" } func (n NullConfiguration) GetUID() (ret keybase1.UID) { return } func (n NullConfiguration) GetGpg() string { return "" } func (n NullConfiguration) GetGpgOptions() []string { return nil } func (n NullConfiguration) GetPGPFingerprint() *PGPFingerprint { return nil } func (n NullConfiguration) GetSecretKeyringTemplate() string { return "" } func (n NullConfiguration) GetSalt() []byte { return nil } func (n NullConfiguration) GetSocketFile() string { return "" } func (n NullConfiguration) GetPidFile() string { return "" } func (n NullConfiguration) GetStandalone() (bool, bool) { return false, false } func (n NullConfiguration) GetLocalRPCDebug() string { return "" } func (n NullConfiguration) GetTimers() string { return "" } func (n NullConfiguration) GetDeviceID() keybase1.DeviceID { return "" } func (n NullConfiguration) GetDeviceIDForUsername(un NormalizedUsername) keybase1.DeviceID { return "" } func (n NullConfiguration) GetDeviceIDForUID(u keybase1.UID) keybase1.DeviceID { return "" } func (n NullConfiguration) GetProxyCACerts() ([]string, error) { return nil, nil } func (n NullConfiguration) GetAutoFork() (bool, bool) { return false, false } func (n NullConfiguration) GetRunMode() (RunMode, error) { return NoRunMode, nil } func (n NullConfiguration) GetNoAutoFork() (bool, bool) { return false, false } func (n NullConfiguration) GetLogFile() string { return "" } func (n NullConfiguration) GetScraperTimeout() (time.Duration, bool) { return 0, false } func (n NullConfiguration) GetAPITimeout() (time.Duration, bool) { return 0, false } func (n NullConfiguration) GetTorMode() (TorMode, error) { return TorNone, nil } func (n NullConfiguration) GetTorHiddenAddress() string { return "" } func (n NullConfiguration) GetTorProxy() string { return "" } func (n NullConfiguration) GetUpdatePreferenceAuto() (bool, bool) { return false, false } func (n NullConfiguration) GetUpdatePreferenceSnoozeUntil() keybase1.Time { return keybase1.Time(0) } func (n NullConfiguration) GetUpdateLastChecked() keybase1.Time { return keybase1.Time(0) } func (n NullConfiguration) GetUpdatePreferenceSkip() string { return "" } func (n NullConfiguration) GetUpdateURL() string { return "" } func (n NullConfiguration) GetUpdateDisabled() (bool, bool) { return false, false } func (n NullConfiguration) GetVDebugSetting() string { return "" } func (n NullConfiguration) GetLocalTrackMaxAge() (time.Duration, bool) { return 0, false } func (n NullConfiguration) GetGregorURI() string { return "" } func (n NullConfiguration) GetGregorSaveInterval() (time.Duration, bool) { return 0, false } func (n NullConfiguration) GetGregorPingInterval() (time.Duration, bool) { return 0, false } func (n NullConfiguration) GetGregorPingTimeout() (time.Duration, bool) { return 0, false } func (n NullConfiguration) GetChatDelivererInterval() (time.Duration, bool) { return 0, false } func (n NullConfiguration) IsAdmin() (bool, bool) { return false, false } func (n NullConfiguration) GetGregorDisabled() (bool, bool) { return false, false } func (n NullConfiguration) GetMountDir() string { return "" } func (n NullConfiguration) GetBGIdentifierDisabled() (bool, bool) { return false, false } func (n NullConfiguration) GetFeatureFlags() (FeatureFlags, error) { return FeatureFlags{}, nil } func (n NullConfiguration) GetAppType() AppType { return NoAppType } func (n NullConfiguration) GetLevelDBNumFiles() (int, bool) { return 0, false } func (n NullConfiguration) GetChatInboxSourceLocalizeThreads() (int, bool) { return 1, false } func (n NullConfiguration) GetBug3964RepairTime(NormalizedUsername) (time.Time, error) { return time.Time{}, nil } func (n NullConfiguration) GetUserConfig() (*UserConfig, error) { return nil, nil } func (n NullConfiguration) GetUserConfigForUsername(s NormalizedUsername) (*UserConfig, error) { return nil, nil } func (n NullConfiguration) GetGString(string) string { return "" } func (n NullConfiguration) GetString(string) string { return "" } func (n NullConfiguration) GetBool(string, bool) (bool, bool) { return false, false } func (n NullConfiguration) GetAllUsernames() (NormalizedUsername, []NormalizedUsername, error) { return NormalizedUsername(""), nil, nil } func (n NullConfiguration) GetDebug() (bool, bool) { return false, false } func (n NullConfiguration) GetLogFormat() string { return "" } func (n NullConfiguration) GetAPIDump() (bool, bool) { return false, false } func (n NullConfiguration) GetNoPinentry() (bool, bool) { return false, false } func (n NullConfiguration) GetStringAtPath(string) (string, bool) { return "", false } func (n NullConfiguration) GetInterfaceAtPath(string) (interface{}, error) { return nil, nil } func (n NullConfiguration) GetBoolAtPath(string) (bool, bool) { return false, false } func (n NullConfiguration) GetIntAtPath(string) (int, bool) { return 0, false } func (n NullConfiguration) GetNullAtPath(string) bool { return false } func (n NullConfiguration) GetSecurityAccessGroupOverride() (bool, bool) { return false, false } type TestParameters struct { ConfigFilename string Home string GPG string GPGHome string GPGOptions []string Debug bool // Whether we are in Devel Mode Devel bool // If we're in dev mode, the name for this test, with a random // suffix. DevelName string RuntimeDir string DisableUpgradePerUserKey bool // set to true to use production run mode in tests UseProductionRunMode bool } func (tp TestParameters) GetDebug() (bool, bool) { if tp.Debug { return true, true } return false, false } type Env struct { sync.RWMutex cmd CommandLine config ConfigReader HomeFinder HomeFinder writer ConfigWriter Test *TestParameters updaterConfig UpdaterConfigReader } func (e *Env) GetConfig() ConfigReader { e.RLock() defer e.RUnlock() return e.config } func (e *Env) GetConfigWriter() ConfigWriter { e.RLock() defer e.RUnlock() return e.writer } func (e *Env) SetCommandLine(cmd CommandLine) { e.Lock() defer e.Unlock() e.cmd = cmd } func (e *Env) GetCommandLine() CommandLine { e.RLock() defer e.RUnlock() return e.cmd } func (e *Env) SetConfig(r ConfigReader, w ConfigWriter) { e.Lock() defer e.Unlock() e.config = r e.writer = w } func (e *Env) SetUpdaterConfig(r UpdaterConfigReader) { e.Lock() defer e.Unlock() e.updaterConfig = r } func (e *Env) GetUpdaterConfig() UpdaterConfigReader { e.RLock() defer e.RUnlock() return e.updaterConfig } func (e *Env) GetMountDir() (string, error) { runMode := e.GetRunMode() if runtime.GOOS == "windows" { return e.GetString( func() string { return e.cmd.GetMountDir() }, func() string { return os.Getenv("KEYBASE_MOUNTDIR") }, func() string { return e.GetConfig().GetMountDir() }, ), nil } switch runMode { case DevelRunMode: return "/keybase.devel", nil case StagingRunMode: return "/keybase.staging", nil case ProductionRunMode: return "/keybase", nil default: return "", fmt.Errorf("Invalid run mode: %s", runMode) } } func NewEnv(cmd CommandLine, config ConfigReader, getLog LogGetter) *Env { return newEnv(cmd, config, runtime.GOOS, getLog) } func newEnv(cmd CommandLine, config ConfigReader, osname string, getLog LogGetter) *Env { if cmd == nil { cmd = NullConfiguration{} } if config == nil { config = NullConfiguration{} } e := Env{cmd: cmd, config: config, Test: &TestParameters{}} e.HomeFinder = NewHomeFinder("keybase", func() string { return e.getHomeFromCmdOrConfig() }, osname, func() RunMode { return e.GetRunMode() }, getLog) return &e } func (e *Env) getHomeFromCmdOrConfig() string { return e.GetString( func() string { return e.Test.Home }, func() string { return e.cmd.GetHome() }, func() string { return e.GetConfig().GetHome() }, ) } func (e *Env) GetHome() string { return e.HomeFinder.Home(false) } func (e *Env) GetConfigDir() string { return e.HomeFinder.ConfigDir() } func (e *Env) GetCacheDir() string { return e.HomeFinder.CacheDir() } func (e *Env) GetSandboxCacheDir() string { return e.HomeFinder.SandboxCacheDir() } func (e *Env) GetDataDir() string { return e.HomeFinder.DataDir() } func (e *Env) GetLogDir() string { return e.HomeFinder.LogDir() } func (e *Env) GetRuntimeDir() string { return e.GetString( func() string { return e.Test.RuntimeDir }, func() string { return e.HomeFinder.RuntimeDir() }, ) } func (e *Env) GetServiceSpawnDir() (string, error) { return e.HomeFinder.ServiceSpawnDir() } func (e *Env) getEnvInt(s string) (int, bool) { v := os.Getenv(s) if len(v) > 0 { tmp, err := strconv.ParseInt(v, 0, 64) if err == nil { return int(tmp), true } } return 0, false } func (e *Env) getEnvPath(s string) []string { if tmp := os.Getenv(s); len(tmp) != 0 { return strings.Split(tmp, ":") } return nil } func (e *Env) getEnvBool(s string) (bool, bool) { return getEnvBool(s) } func getEnvBool(s string) (bool, bool)
func (e *Env) getEnvDuration(s string) (time.Duration, bool) { d, err := time.ParseDuration(os.Getenv(s)) if err != nil { return 0, false } return d, true } func (e *Env) GetString(flist ...(func() string)) string { var ret string for _, f := range flist { ret = f() if len(ret) > 0 { break } } return ret } func (e *Env) getPGPFingerprint(flist ...(func() *PGPFingerprint)) *PGPFingerprint { for _, f := range flist { if ret := f(); ret != nil { return ret } } return nil } func (e *Env) GetBool(def bool, flist ...func() (bool, bool)) bool { for _, f := range flist { if val, isSet := f(); isSet { return val } } return def } type NegBoolFunc struct { neg bool f func() (bool, bool) } // GetNegBool gets a negatable bool. You can give it a list of functions, // and also possible negations for those functions. func (e *Env) GetNegBool(def bool, flist []NegBoolFunc) bool { for _, f := range flist { if val, isSet := f.f(); isSet { return (val != f.neg) } } return def } func (e *Env) GetInt(def int, flist ...func() (int, bool)) int { for _, f := range flist { if val, isSet := f(); isSet { return val } } return def } func (e *Env) GetDuration(def time.Duration, flist ...func() (time.Duration, bool)) time.Duration { for _, f := range flist { if val, isSet := f(); isSet { return val } } return def } func (e *Env) GetServerURI() string { // appveyor and os x travis CI set server URI, so need to // check for test flag here in order for production api endpoint // tests to pass. if e.Test.UseProductionRunMode { return ServerLookup[e.GetRunMode()] } return e.GetString( func() string { return e.cmd.GetServerURI() }, func() string { return os.Getenv("KEYBASE_SERVER_URI") }, func() string { return e.GetConfig().GetServerURI() }, func() string { return ServerLookup[e.GetRunMode()] }, ) } func (e *Env) GetConfigFilename() string { return e.GetString( func() string { return e.Test.ConfigFilename }, func() string { return e.cmd.GetConfigFilename() }, func() string { return os.Getenv("KEYBASE_CONFIG_FILE") }, func() string { return e.GetConfig().GetConfigFilename() }, func() string { return filepath.Join(e.GetConfigDir(), ConfigFile) }, ) } func (e *Env) GetUpdaterConfigFilename() string { return e.GetString( func() string { return e.cmd.GetUpdaterConfigFilename() }, func() string { return os.Getenv("KEYBASE_UPDATER_CONFIG_FILE") }, func() string { return e.GetConfig().GetUpdaterConfigFilename() }, func() string { return filepath.Join(e.GetConfigDir(), UpdaterConfigFile) }, ) } func (e *Env) GetSessionFilename() string { return e.GetString( func() string { return e.cmd.GetSessionFilename() }, func() string { return os.Getenv("KEYBASE_SESSION_FILE") }, func() string { return e.GetConfig().GetSessionFilename() }, func() string { return filepath.Join(e.GetCacheDir(), SessionFile) }, ) } func (e *Env) GetDbFilename() string { return e.GetString( func() string { return e.cmd.GetDbFilename() }, func() string { return os.Getenv("KEYBASE_DB_FILE") }, func() string { return e.GetConfig().GetDbFilename() }, func() string { return filepath.Join(e.GetDataDir(), DBFile) }, ) } func (e *Env) GetChatDbFilename() string { return e.GetString( func() string { return e.cmd.GetChatDbFilename() }, func() string { return os.Getenv("KEYBASE_CHAT_DB_FILE") }, func() string { return e.GetConfig().GetChatDbFilename() }, func() string { return filepath.Join(e.GetDataDir(), ChatDBFile) }, ) } // GetPvlKitFilename gets the path to pvl kit file. // Its value is usually "" which means to use the server. func (e *Env) GetPvlKitFilename() string { return e.GetString( func() string { return e.cmd.GetPvlKitFilename() }, func() string { return os.Getenv("KEYBASE_PVL_KIT_FILE") }, func() string { return e.GetConfig().GetPvlKitFilename() }, ) } func (e *Env) GetDebug() bool { return e.GetBool(false, func() (bool, bool) { return e.Test.GetDebug() }, func() (bool, bool) { return e.cmd.GetDebug() }, func() (bool, bool) { return e.getEnvBool("KEYBASE_DEBUG") }, func() (bool, bool) { return e.GetConfig().GetDebug() }, ) } func (e *Env) GetAutoFork() bool { // On !Darwin, we auto-fork by default def := (runtime.GOOS != "darwin") return e.GetNegBool(def, []NegBoolFunc{ { neg: false, f: func() (bool, bool) { return e.cmd.GetAutoFork() }, }, { neg: true, f: func() (bool, bool) { return e.cmd.GetNoAutoFork() }, }, { neg: false, f: func() (bool, bool) { return e.getEnvBool("KEYBASE_AUTO_FORK") }, }, { neg: true, f: func() (bool, bool) { return e.getEnvBool("KEYBASE_NO_AUTO_FORK") }, }, { neg: false, f: func() (bool, bool) { return e.GetConfig().GetAutoFork() }, }, }, ) } func (e *Env) GetStandalone() bool { return e.GetBool(false, func() (bool, bool) { return e.cmd.GetStandalone() }, func() (bool, bool) { return e.getEnvBool("KEYBASE_STANDALONE") }, func() (bool, bool) { return e.GetConfig().GetStandalone() }, ) } func (e *Env) GetLogFormat() string { return e.GetString( func() string { return e.cmd.GetLogFormat() }, func() string { return os.Getenv("KEYBASE_LOG_FORMAT") }, func() string { return e.GetConfig().GetLogFormat() }, ) } func (e *Env) GetLabel() string { return e.GetString( func() string { return e.cmd.GetString("label") }, func() string { return os.Getenv("KEYBASE_LABEL") }, ) } func (e *Env) GetServiceType() string { return e.GetString( func() string { return os.Getenv("KEYBASE_SERVICE_TYPE") }, ) } func (e *Env) GetAPIDump() bool { return e.GetBool(false, func() (bool, bool) { return e.cmd.GetAPIDump() }, func() (bool, bool) { return e.getEnvBool("KEYBASE_API_DUMP") }, ) } func (e *Env) GetUsername() NormalizedUsername { return e.GetConfig().GetUsername() } func (e *Env) GetSocketBindFile() (string, error) { return e.GetString( func() string { return e.sandboxSocketFile() }, func() string { return e.defaultSocketFile() }, ), nil } func (e *Env) defaultSocketFile() string { socketFile := e.GetString( func() string { return e.cmd.GetSocketFile() }, func() string { return os.Getenv("KEYBASE_SOCKET_FILE") }, func() string { return e.GetConfig().GetSocketFile() }, ) if socketFile == "" { socketFile = filepath.Join(e.GetRuntimeDir(), SocketFile) } return socketFile } // sandboxSocketFile is socket file location for sandbox (macOS only) func (e *Env) sandboxSocketFile() string { sandboxCacheDir := e.HomeFinder.SandboxCacheDir() if sandboxCacheDir == "" { return "" } return filepath.Join(sandboxCacheDir, SocketFile) } func (e *Env) GetSocketDialFiles() ([]string, error) { dialFiles := []string{} sandboxSocketFile := e.sandboxSocketFile() if sandboxSocketFile != "" { dialFiles = append(dialFiles, sandboxSocketFile) } dialFiles = append(dialFiles, e.defaultSocketFile()) return dialFiles, nil } func (e *Env) GetGregorURI() string { return e.GetString( func() string { return os.Getenv("KEYBASE_PUSH_SERVER_URI") }, func() string { return e.GetConfig().GetGregorURI() }, func() string { return e.cmd.GetGregorURI() }, func() string { return GregorServerLookup[e.GetRunMode()] }, ) } func (e *Env) GetGregorSaveInterval() time.Duration { return e.GetDuration(time.Minute, func() (time.Duration, bool) { return e.getEnvDuration("KEYBASE_PUSH_SAVE_INTERVAL") }, func() (time.Duration, bool) { return e.GetConfig().GetGregorSaveInterval() }, func() (time.Duration, bool) { return e.cmd.GetGregorSaveInterval() }, ) } func (e *Env) GetGregorDisabled() bool { return e.GetBool(false, func() (bool, bool) { return e.cmd.GetGregorDisabled() }, func() (bool, bool) { return getEnvBool("KEYBASE_PUSH_DISABLED") }, func() (bool, bool) { return e.GetConfig().GetGregorDisabled() }, ) } func (e *Env) GetBGIdentifierDisabled() bool { return e.GetBool(true, func() (bool, bool) { return e.cmd.GetBGIdentifierDisabled() }, func() (bool, bool) { return getEnvBool("KEYBASE_BG_IDENTIFIER_DISABLED") }, func() (bool, bool) { return e.GetConfig().GetBGIdentifierDisabled() }, ) } func (e *Env) GetGregorPingInterval() time.Duration { return e.GetDuration(10*time.Second, func() (time.Duration, bool) { return e.getEnvDuration("KEYBASE_PUSH_PING_INTERVAL") }, func() (time.Duration, bool) { return e.GetConfig().GetGregorPingInterval() }, func() (time.Duration, bool) { return e.cmd.GetGregorPingInterval() }, ) } func (e *Env) GetGregorPingTimeout() time.Duration { return e.GetDuration(5*time.Second, func() (time.Duration, bool) { return e.getEnvDuration("KEYBASE_PUSH_PING_TIMEOUT") }, func() (time.Duration, bool) { return e.GetConfig().GetGregorPingTimeout() }, func() (time.Duration, bool) { return e.cmd.GetGregorPingTimeout() }, ) } func (e *Env) GetChatDelivererInterval() time.Duration { return e.GetDuration(30*time.Second, func() (time.Duration, bool) { return e.getEnvDuration("KEYBASE_CHAT_DELIVERER_INTERVAL") }, func() (time.Duration, bool) { return e.GetConfig().GetChatDelivererInterval() }, func() (time.Duration, bool) { return e.cmd.GetChatDelivererInterval() }, ) } func (e *Env) GetPidFile() (ret string, err error) { ret = e.GetString( func() string { return e.cmd.GetPidFile() }, func() string { return os.Getenv("KEYBASE_PID_FILE") }, func() string { return e.GetConfig().GetPidFile() }, ) if len(ret) == 0 { ret = filepath.Join(e.GetRuntimeDir(), PIDFile) } return } func (e *Env) GetEmail() string { return e.GetString( func() string { return os.Getenv("KEYBASE_EMAIL") }, ) } // Upgrade sigchains to contain per-user-keys. func (e *Env) GetUpgradePerUserKey() bool { return !e.Test.DisableUpgradePerUserKey } func (e *Env) GetProxy() string { return e.GetString( func() string { return e.cmd.GetProxy() }, func() string { return os.Getenv("https_proxy") }, func() string { return os.Getenv("http_proxy") }, func() string { return e.GetConfig().GetProxy() }, ) } func (e *Env) GetGpgHome() string { return e.GetString( func() string { return e.Test.GPGHome }, func() string { return e.cmd.GetGpgHome() }, func() string { return os.Getenv("GNUPGHOME") }, func() string { return e.GetConfig().GetGpgHome() }, func() string { return filepath.Join(e.GetHome(), ".gnupg") }, ) } func (e *Env) GetPinentry() string { return e.GetString( func() string { return e.cmd.GetPinentry() }, func() string { return os.Getenv("KEYBASE_PINENTRY") }, func() string { return e.GetConfig().GetPinentry() }, ) } func (e *Env) GetNoPinentry() bool { isno := func(s string) (bool, bool) { s = strings.ToLower(s) if s == "0" || s == "no" || s == "n" || s == "none" { return true, true } return false, false } return e.GetBool(false, func() (bool, bool) { return isno(e.cmd.GetPinentry()) }, func() (bool, bool) { return isno(os.Getenv("KEYBASE_PINENTRY")) }, func() (bool, bool) { return e.GetConfig().GetNoPinentry() }, ) } func (e *Env) GetBundledCA(host string) string { return e.GetString( func() string { return e.GetConfig().GetBundledCA(host) }, func() string { ret, ok := GetBundledCAsFromHost(host) if !ok { return "" } return string(ret) }, ) } func (e *Env) GetUserCacheMaxAge() time.Duration { return e.GetDuration(UserCacheMaxAge, func() (time.Duration, bool) { return e.cmd.GetUserCacheMaxAge() }, func() (time.Duration, bool) { return e.getEnvDuration("KEYBASE_USER_CACHE_MAX_AGE") }, func() (time.Duration, bool) { return e.GetConfig().GetUserCacheMaxAge() }, ) } func (e *Env) GetAPITimeout() time.Duration { return e.GetDuration(HTTPDefaultTimeout, func() (time.Duration, bool) { return e.cmd.GetAPITimeout() }, func() (time.Duration, bool) { return e.getEnvDuration("KEYBASE_API_TIMEOUT") }, func() (time.Duration, bool) { return e.GetConfig().GetAPITimeout() }, ) } func (e *Env) GetScraperTimeout() time.Duration { return e.GetDuration(HTTPDefaultScraperTimeout, func() (time.Duration, bool) { return e.cmd.GetScraperTimeout() }, func() (time.Duration, bool) { return e.getEnvDuration("KEYBASE_SCRAPER_TIMEOUT") }, func() (time.Duration, bool) { return e.GetConfig().GetScraperTimeout() }, ) } func (e *Env) GetLocalTrackMaxAge() time.Duration { return e.GetDuration(LocalTrackMaxAge, func() (time.Duration, bool) { return e.cmd.GetLocalTrackMaxAge() }, func() (time.Duration, bool) { return e.getEnvDuration("KEYBASE_LOCAL_TRACK_MAX_AGE") }, func() (time.Duration, bool) { return e.GetConfig().GetLocalTrackMaxAge() }, ) } func (e *Env) GetProofCacheSize() int { return e.GetInt(ProofCacheSize, e.cmd.GetProofCacheSize, func() (int, bool) { return e.getEnvInt("KEYBASE_PROOF_CACHE_SIZE") }, e.GetConfig().GetProofCacheSize, ) } func (e *Env) GetProofCacheLongDur() time.Duration { return e.GetDuration(ProofCacheLongDur, func() (time.Duration, bool) { return e.getEnvDuration("KEYBASE_PROOF_CACHE_LONG_DUR") }, e.GetConfig().GetProofCacheLongDur, ) } func (e *Env) GetProofCacheMediumDur() time.Duration { return e.GetDuration(ProofCacheMediumDur, func() (time.Duration, bool) { return e.getEnvDuration("KEYBASE_PROOF_CACHE_MEDIUM_DUR") }, e.GetConfig().GetProofCacheMediumDur, ) } func (e *Env) GetProofCacheShortDur() time.Duration { return e.GetDuration(ProofCacheShortDur, func() (time.Duration, bool) { return e.getEnvDuration("KEYBASE_PROOF_CACHE_SHORT_DUR") }, e.GetConfig().GetProofCacheShortDur, ) } func (e *Env) GetLinkCacheSize() int { return e.GetInt(LinkCacheSize, e.cmd.GetLinkCacheSize, func() (int, bool) { return e.getEnvInt("KEYBASE_LINK_CACHE_SIZE") }, e.GetConfig().GetLinkCacheSize, ) } func (e *Env) GetUPAKCacheSize() int { return e.GetInt(UPAKCacheSize, e.cmd.GetUPAKCacheSize, func() (int, bool) { return e.getEnvInt("KEYBASE_UPAK_CACHE_SIZE") }, e.GetConfig().GetUPAKCacheSize, ) } func (e *Env) GetUIDMapFullNameCacheSize() int { return e.GetInt(UIDMapFullNameCacheSize, e.cmd.GetUIDMapFullNameCacheSize, func() (int, bool) { return e.getEnvInt("KEYBASE_UID_MAP_FULL_NAME_CACHE_SIZE") }, e.GetConfig().GetUIDMapFullNameCacheSize, ) } func (e *Env) GetLevelDBNumFiles() int { return e.GetInt(LevelDBNumFiles, e.cmd.GetLevelDBNumFiles, func() (int, bool) { return e.getEnvInt("KEYBASE_LEVELDB_NUM_FILES") }, e.GetConfig().GetLevelDBNumFiles, ) } func (e *Env) GetLinkCacheCleanDur() time.Duration { return e.GetDuration(LinkCacheCleanDur, func() (time.Duration, bool) { return e.getEnvDuration("KEYBASE_LINK_CACHE_CLEAN_DUR") }, e.GetConfig().GetLinkCacheCleanDur, ) } func (e *Env) GetEmailOrUsername() string { un := e.GetUsername().String() if len(un) > 0 { return un } em := e.GetEmail() return em } func (e *Env) GetRunMode() RunMode { // If testing production run mode, then use it: if e.Test.UseProductionRunMode { return ProductionRunMode } var ret RunMode pick := func(m RunMode, err error) { if ret == NoRunMode && err == nil { ret = m } } pick(e.cmd.GetRunMode()) pick(StringToRunMode(os.Getenv("KEYBASE_RUN_MODE"))) pick(e.GetConfig().GetRunMode()) pick(DefaultRunMode, nil) // If we aren't running in devel or staging and we're testing. Let's run in devel. if e.Test.Devel && ret != DevelRunMode && ret != StagingRunMode { return DevelRunMode } return ret } func (e *Env) GetAppType() AppType { switch { case e.cmd.GetAppType() != NoAppType: return e.cmd.GetAppType() case StringToAppType(os.Getenv("KEYBASE_APP_TYPE")) != NoAppType: return StringToAppType(os.Getenv("KEYBASE_APP_TYPE")) case e.GetConfig().GetAppType() != NoAppType: return e.GetConfig().GetAppType() default: return NoAppType } } func (e *Env) GetFeatureFlags() FeatureFlags { var ret FeatureFlags pick := func(f FeatureFlags, err error) { if ret.Empty() && err == nil { ret = f } } pick(e.cmd.GetFeatureFlags()) pick(StringToFeatureFlags(os.Getenv("KEYBASE_FEATURES")), nil) pick(e.GetConfig().GetFeatureFlags()) return ret } func (e *Env) GetUID() keybase1.UID { return e.GetConfig().GetUID() } func (e *Env) GetStringList(list ...(func() []string)) []string { for _, f := range list { if res := f(); res != nil { return res } } return []string{} } func (e *Env) GetMerkleKIDs() []keybase1.KID { slist := e.GetStringList( func() []string { return e.cmd.GetMerkleKIDs() }, func() []string { return e.getEnvPath("KEYBASE_MERKLE_KIDS") }, func() []string { return e.GetConfig().GetMerkleKIDs() }, func() []string { ret := MerkleProdKIDs if e.GetRunMode() == DevelRunMode || e.GetRunMode() == StagingRunMode { ret = append(ret, MerkleTestKIDs...) ret = append(ret, MerkleStagingKIDs...) } return ret }, ) if slist == nil { return nil } var ret []keybase1.KID for _, s := range slist { ret = append(ret, keybase1.KIDFromString(s)) } return ret } func (e *Env) GetCodeSigningKIDs() []keybase1.KID { slist := e.GetStringList( func() []string { return e.cmd.GetCodeSigningKIDs() }, func() []string { return e.getEnvPath("KEYBASE_CODE_SIGNING_KIDS") }, func() []string { return e.GetConfig().GetCodeSigningKIDs() }, func() []string { ret := CodeSigningProdKIDs if e.GetRunMode() == DevelRunMode || e.GetRunMode() == StagingRunMode { ret = append(ret, CodeSigningTestKIDs...) ret = append(ret, CodeSigningStagingKIDs...) } return ret }, ) if slist == nil { return nil } var ret []keybase1.KID for _, s := range slist { ret = append(ret, keybase1.KIDFromString(s)) } return ret } func (e *Env) GetGpg() string { return e.GetString( func() string { return e.Test.GPG }, func() string { return e.cmd.GetGpg() }, func() string { return os.Getenv("GPG") }, func() string { return e.GetConfig().GetGpg() }, ) } func (e *Env) GetGpgOptions() []string { return e.GetStringList( func() []string { return e.Test.GPGOptions }, func() []string { return e.cmd.GetGpgOptions() }, func() []string { return e.GetConfig().GetGpgOptions() }, ) } func (e *Env) GetSecretKeyringTemplate() string { return e.GetString( func() string { return e.cmd.GetSecretKeyringTemplate() }, func() string { return os.Getenv("KEYBASE_SECRET_KEYRING_TEMPLATE") }, func() string { return e.GetConfig().GetSecretKeyringTemplate() }, func() string { return filepath.Join(e.GetConfigDir(), SecretKeyringTemplate) }, ) } func (e *Env) GetSalt() []byte { return e.GetConfig().GetSalt() } func (e *Env) GetLocalRPCDebug() string { return e.GetString( func() string { return e.cmd.GetLocalRPCDebug() }, func() string { return os.Getenv("KEYBASE_LOCAL_RPC_DEBUG") }, func() string { return e.GetConfig().GetLocalRPCDebug() }, ) } func (e *Env) GetDoLogForward() bool { return e.GetLocalRPCDebug() == "" } func (e *Env) GetTimers() string { return e.GetString( func() string { return e.cmd.GetTimers() }, func() string { return os.Getenv("KEYBASE_TIMERS") }, func() string { return e.GetConfig().GetTimers() }, ) } func (e *Env) GetConvSourceType() string { return e.GetString( func() string { return os.Getenv("KEYBASE_CONV_SOURCE_TYPE") }, func() string { return "hybrid" }, ) } func (e *Env) GetInboxSourceType() string { return e.GetString( func() string { return os.Getenv("KEYBASE_INBOX_SOURCE_TYPE") }, func() string { return "hybrid" }, ) } func (e *Env) GetChatInboxSourceLocalizeThreads() int { return e.GetInt( 10, e.cmd.GetChatInboxSourceLocalizeThreads, func() (int, bool) { return e.getEnvInt("KEYBASE_INBOX_SOURCE_LOCALIZE_THREADS") }, e.GetConfig().GetChatInboxSourceLocalizeThreads, ) } // GetChatMemberType returns the default member type for new conversations. // Currently defaults to `kbfs`, but `impteam` will be default in future (and is the default for admins) func (e *Env) GetChatMemberType() string { if e.GetFeatureFlags().Admin() { return "impteam" } return "kbfs" } func (e *Env) GetDeviceID() keybase1.DeviceID { return e.GetConfig().GetDeviceID() } func (e *Env) GetDeviceIDForUsername(u NormalizedUsername) keybase1.DeviceID { return e.GetConfig().GetDeviceIDForUsername(u) } func (e *Env) GetDeviceIDForUID(u keybase1.UID) keybase1.DeviceID { return e.GetConfig().GetDeviceIDForUID(u) } func (e *Env) GetInstallID() (ret InstallID) { if rdr := e.GetUpdaterConfig(); rdr != nil { ret = rdr.GetInstallID() } return ret } func (e *Env) GetLogFile() string { return e.GetString( func() string { return e.cmd.GetLogFile() }, func() string { return os.Getenv("KEYBASE_LOG_FILE") }, ) } func (e *Env) GetDefaultLogFile() string { return filepath.Join(e.GetLogDir(), ServiceLogFileName) } func (e *Env) GetTorMode() TorMode { var ret TorMode pick := func(m TorMode, err error) { if ret == TorNone && err == nil { ret = m } } pick(e.cmd.GetTorMode()) pick(StringToTorMode(os.Getenv("KEYBASE_TOR_MODE"))) pick(e.GetConfig().GetTorMode()) return ret } func (e *Env) GetTorHiddenAddress() string { return e.GetString( func() string { return e.cmd.GetTorHiddenAddress() }, func() string { return os.Getenv("KEYBASE_TOR_HIDDEN_ADDRESS") }, func() string { return e.GetConfig().GetTorHiddenAddress() }, func() string { return TorServerURI }, ) } func (e *Env) GetTorProxy() string { return e.GetString( func() string { return e.cmd.GetTorProxy() }, func() string { return os.Getenv("KEYBASE_TOR_PROXY") }, func() string { return e.GetConfig().GetTorProxy() }, func() string { return TorProxy }, ) } func (e *Env) GetStoredSecretAccessGroup() string { var override = e.GetBool( false, func() (bool, bool) { return e.GetConfig().GetSecurityAccessGroupOverride() }, ) if override { return "" } return "99229SGT5K.group.keybase" } func (e *Env) GetStoredSecretServiceName() string { var serviceName string switch e.GetRunMode() { case DevelRunMode: serviceName = "keybase-devel" case StagingRunMode: serviceName = "keybase-staging" case ProductionRunMode: serviceName = "keybase" default: panic("Invalid run mode") } if e.Test.Devel { // Append DevelName so that tests won't clobber each // other's keychain entries on shutdown. serviceName += fmt.Sprintf("-test (%s)", e.Test.DevelName) } return serviceName } type AppConfig struct { NullConfiguration HomeDir string LogFile string RunMode RunMode Debug bool LocalRPCDebug string ServerURI string VDebugSetting string SecurityAccessGroupOverride bool ChatInboxSourceLocalizeThreads int } var _ CommandLine = AppConfig{} func (c AppConfig) GetLogFile() string { return c.LogFile } func (c AppConfig) GetDebug() (bool, bool) { return c.Debug, c.Debug } func (c AppConfig) GetLocalRPCDebug() string { return c.LocalRPCDebug } func (c AppConfig) GetRunMode() (RunMode, error) { return c.RunMode, nil } func (c AppConfig) GetHome() string { return c.HomeDir } func (c AppConfig) GetServerURI() string { return c.ServerURI } func (c AppConfig) GetSecurityAccessGroupOverride() (bool, bool) { return c.SecurityAccessGroupOverride, c.SecurityAccessGroupOverride } func (c AppConfig) GetAppType() AppType { return MobileAppType } func (c AppConfig) GetVDebugSetting() string { return c.VDebugSetting } func (c AppConfig) GetChatInboxSourceLocalizeThreads() (int, bool) { return c.ChatInboxSourceLocalizeThreads, true } func (e *Env) GetUpdatePreferenceAuto() (bool, bool) { return e.GetConfig().GetUpdatePreferenceAuto() } func (e *Env) GetUpdatePreferenceSkip() string { return e.GetConfig().GetUpdatePreferenceSkip() } func (e *Env) GetUpdatePreferenceSnoozeUntil() keybase1.Time { return e.GetConfig().GetUpdatePreferenceSnoozeUntil() } func (e *Env) GetUpdateLastChecked() keybase1.Time { return e.GetConfig().GetUpdateLastChecked() } func (e *Env) SetUpdatePreferenceAuto(b bool) error { return e.GetConfigWriter().SetUpdatePreferenceAuto(b) } func (e *Env) SetUpdatePreferenceSkip(v string) error { return e.GetConfigWriter().SetUpdatePreferenceSkip(v) } func (e *Env) SetUpdatePreferenceSnoozeUntil(t keybase1.Time) error { return e.GetConfigWriter().SetUpdatePreferenceSnoozeUntil(t) } func (e *Env) SetUpdateLastChecked(t keybase1.Time) error { return e.GetConfigWriter().SetUpdateLastChecked(t) } func (e *Env) GetUpdateURL() string { return e.GetConfig().GetUpdateURL() } func (e *Env) GetUpdateDisabled() (bool, bool) { return e.GetConfig().GetUpdateDisabled() } func (e *Env) IsAdmin() bool { b, _ := e.GetConfig().IsAdmin() return b } func (e *Env) GetVDebugSetting() string { return e.GetString( func() string { return e.cmd.GetVDebugSetting() }, func() string { return os.Getenv("KEYBASE_VDEBUG") }, func() string { return e.GetConfig().GetVDebugSetting() }, func() string { return "" }, ) } func (e *Env) GetRunModeAsString() string { return string(e.GetRunMode()) } // GetServiceInfoPath returns path to info file written by the Keybase service after startup func (e *Env) GetServiceInfoPath() string { return filepath.Join(e.GetRuntimeDir(), "keybased.info") } // GetKBFSInfoPath returns path to info file written by the KBFS service after startup func (e *Env) GetKBFSInfoPath() string { return filepath.Join(e.GetRuntimeDir(), "kbfs.info") } func (e *Env) GetUpdateDefaultInstructions() (string, error) { return PlatformSpecificUpgradeInstructionsString() } func (e *Env) RunningInCI() bool { return e.GetBool(false, func() (bool, bool) { return e.getEnvBool("KEYBASE_RUN_CI") }, ) } func (e *Env) WantsSystemd() bool { return (e.GetRunMode() == ProductionRunMode && systemd.IsRunningSystemd() && os.Getenv("KEYBASE_SYSTEMD") == "1") } func GetPlatformString() string { if isIOS { return "ios" } return runtime.GOOS }
{ tmp := os.Getenv(s) if len(tmp) == 0 { return false, false } tmp = strings.ToLower(tmp) if tmp == "0" || tmp[0] == byte('n') { return false, true } return true, true }
inspect.go
package system import ( "fmt" "strings" "golang.org/x/net/context" "github.com/docker/docker/cli" "github.com/docker/docker/cli/command" "github.com/docker/docker/cli/command/inspect" apiclient "github.com/docker/docker/client" "github.com/spf13/cobra" ) type inspectOptions struct { format string inspectType string size bool ids []string } // NewInspectCommand creates a new cobra.Command for `docker inspect` func NewInspectCommand(dockerCli *command.DockerCli) *cobra.Command { var opts inspectOptions cmd := &cobra.Command{ Use: "inspect [OPTIONS] NAME|ID [NAME|ID...]", Short: "Return low-level information on Docker objects", Args: cli.RequiresMinArgs(1), RunE: func(cmd *cobra.Command, args []string) error { opts.ids = args return runInspect(dockerCli, opts) }, } flags := cmd.Flags() flags.StringVarP(&opts.format, "format", "f", "", "Format the output using the given Go template")
return cmd } func runInspect(dockerCli *command.DockerCli, opts inspectOptions) error { var elementSearcher inspect.GetRefFunc switch opts.inspectType { case "", "container", "image", "node", "network", "service", "volume", "task", "plugin": elementSearcher = inspectAll(context.Background(), dockerCli, opts.size, opts.inspectType) default: return fmt.Errorf("%q is not a valid value for --type", opts.inspectType) } return inspect.Inspect(dockerCli.Out(), opts.ids, opts.format, elementSearcher) } func inspectContainers(ctx context.Context, dockerCli *command.DockerCli, getSize bool) inspect.GetRefFunc { return func(ref string) (interface{}, []byte, error) { return dockerCli.Client().ContainerInspectWithRaw(ctx, ref, getSize) } } func inspectImages(ctx context.Context, dockerCli *command.DockerCli) inspect.GetRefFunc { return func(ref string) (interface{}, []byte, error) { return dockerCli.Client().ImageInspectWithRaw(ctx, ref) } } func inspectNetwork(ctx context.Context, dockerCli *command.DockerCli) inspect.GetRefFunc { return func(ref string) (interface{}, []byte, error) { return dockerCli.Client().NetworkInspectWithRaw(ctx, ref) } } func inspectNode(ctx context.Context, dockerCli *command.DockerCli) inspect.GetRefFunc { return func(ref string) (interface{}, []byte, error) { return dockerCli.Client().NodeInspectWithRaw(ctx, ref) } } func inspectService(ctx context.Context, dockerCli *command.DockerCli) inspect.GetRefFunc { return func(ref string) (interface{}, []byte, error) { return dockerCli.Client().ServiceInspectWithRaw(ctx, ref) } } func inspectTasks(ctx context.Context, dockerCli *command.DockerCli) inspect.GetRefFunc { return func(ref string) (interface{}, []byte, error) { return dockerCli.Client().TaskInspectWithRaw(ctx, ref) } } func inspectVolume(ctx context.Context, dockerCli *command.DockerCli) inspect.GetRefFunc { return func(ref string) (interface{}, []byte, error) { return dockerCli.Client().VolumeInspectWithRaw(ctx, ref) } } func inspectPlugin(ctx context.Context, dockerCli *command.DockerCli) inspect.GetRefFunc { return func(ref string) (interface{}, []byte, error) { return dockerCli.Client().PluginInspectWithRaw(ctx, ref) } } func inspectAll(ctx context.Context, dockerCli *command.DockerCli, getSize bool, typeConstraint string) inspect.GetRefFunc { var inspectAutodetect = []struct { ObjectType string IsSizeSupported bool ObjectInspector func(string) (interface{}, []byte, error) }{ {"container", true, inspectContainers(ctx, dockerCli, getSize)}, {"image", false, inspectImages(ctx, dockerCli)}, {"network", false, inspectNetwork(ctx, dockerCli)}, {"volume", false, inspectVolume(ctx, dockerCli)}, {"service", false, inspectService(ctx, dockerCli)}, {"task", false, inspectTasks(ctx, dockerCli)}, {"node", false, inspectNode(ctx, dockerCli)}, {"plugin", false, inspectPlugin(ctx, dockerCli)}, } isErrNotSwarmManager := func(err error) bool { return strings.Contains(err.Error(), "This node is not a swarm manager") } isErrNotSupported := func(err error) bool { return strings.Contains(err.Error(), "not supported") } return func(ref string) (interface{}, []byte, error) { for _, inspectData := range inspectAutodetect { if typeConstraint != "" && inspectData.ObjectType != typeConstraint { continue } v, raw, err := inspectData.ObjectInspector(ref) if err != nil { if typeConstraint == "" && (apiclient.IsErrNotFound(err) || isErrNotSwarmManager(err) || isErrNotSupported(err)) { continue } return v, raw, err } if getSize && !inspectData.IsSizeSupported { fmt.Fprintf(dockerCli.Err(), "WARNING: --size ignored for %s\n", inspectData.ObjectType) } return v, raw, err } return nil, nil, fmt.Errorf("Error: No such object: %s", ref) } }
flags.StringVar(&opts.inspectType, "type", "", "Return JSON for specified type") flags.BoolVarP(&opts.size, "size", "s", false, "Display total file sizes if the type is container")
algorithm.rs
#[macro_use] extern crate geo;
use geo::algorithm::centroid::Centroid; fn main() { let linestring = geo::line_string![ (x: 40.02f64, y: 116.34), (x: 41.02f64, y: 116.34), ]; println!("Centroid {:?}", linestring.centroid()); }
basic.js
'use strict'; const base64 = require('base-64'); const client = require("../../../DataBase/data"); const bcrypt = require("bcrypt"); require("dotenv").config(); module.exports = async (req, res, next) => { try { if (req.headers.authorization) { let basic = req.headers.authorization.split(' ').pop(); let [user, pass] = base64.decode(basic).split(':'); req.user = await checkVolunteerExists(user); if (!req.user.data) { req.user = await checkHostExists(user); } if (!req.user.data) { res.send({message: "Invalid Login"}); } else { const hashedPassword = req.user.data.password; const success = await bcrypt.compare(pass, hashedPassword); req.user = { success: success, userData: req.user } next(); } } } catch (e) { res.json("Invalid Login"); } } async function checkVolunteerExists(userName) { try { const searchQuery = "select * from volunteer where user_name = $1 ;"; let data = await client .query(searchQuery, [userName]) return ({data: data.rows[0], role : "volunteer"}) ; } catch (e) { res.send({e: e}); } } async function
(userName) { try { const searchQuery = "select * from host where user_name = $1;"; let data = await client .query(searchQuery, [userName]) return ({data: data.rows[0], role : "host"}) } catch (e) { res.send({e: e}); } }
checkHostExists
lib.rs
// بِسْمِ اللَّهِ الرَّحْمَنِ الرَّحِيم // This file is part of Setheum. // Copyright (C) 2019-2021 Setheum Labs. // SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 // This program is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License // along with this program. If not, see <https://www.gnu.org/licenses/>. //! # Airdrop Module //! //! ## Overview //! //! This module creates airdrops and distributes airdrops to the - //! acccounts in the airdrops from an update origin. //! The module for distributing Setheum Airdrops, //! it will be used for the Setheum IAE (Initial Airdrop Event). #![cfg_attr(not(feature = "std"), no_std)] #![allow(clippy::unused_unit)] use frame_support::{pallet_prelude::*, transactional, PalletId, traits::Get}; use frame_system::pallet_prelude::*; use orml_traits::{MultiCurrency, MultiCurrencyExtended}; use primitives::{Balance, CurrencyId}; use sp_std::vec::Vec; use sp_runtime::traits::AccountIdConversion; mod mock; pub use module::*; type BalanceOf<T> = <<T as Config>::MultiCurrency as MultiCurrency<<T as frame_system::Config>::AccountId>>::Balance; #[frame_support::pallet] pub mod module { use super::*; #[pallet::config] pub trait Config: frame_system::Config { type Event: From<Event<Self>> + IsType<<Self as frame_system::Config>::Event>; /// The Currency for managing assets related to the SERP (Setheum Elastic Reserve Protocol). type MultiCurrency: MultiCurrencyExtended<Self::AccountId, CurrencyId = CurrencyId, Balance = Balance>; /// The maximum size of an airdrop list type MaxAirdropListSize: Get<usize>; #[pallet::constant] /// The Airdrop module pallet id, keeps airdrop funds. type FundingOrigin: Get<Self::AccountId>; /// The origin which may update and fund the Airdrop Treasury. type DropOrigin: EnsureOrigin<Self::Origin>; #[pallet::constant] /// The Airdrop module pallet id, keeps airdrop funds. type PalletId: Get<PalletId>; } #[pallet::error] pub enum Error<T> { // Duplicate Airdrop Account DuplicateAccounts, // The airdrop list is over the max size limit `MaxAirdropListSize` OverSizedAirdropList, } #[pallet::event] #[pallet::generate_deposit(pub(crate) fn deposit_event)] #[pallet::metadata(T::AccountId = "AccountId", BalanceOf<T> = "Balance", CurrencyId = "CurrencyId")] pub enum Event<T: Config> { /// Drop Airdrop Airdrop { currency_id: CurrencyId, airdrop_list: Vec<(T::AccountId, Balance)> }, /// Fund the Airdrop Treasury from `FundingOrigin` \[from, currency_id, amount\] FundAirdropTreasury { funder: T::AccountId, currency_id: CurrencyId, amount: BalanceOf<T> } } #[pallet::pallet] pub struct Pallet<T>(PhantomData<T>); #[pallet::hooks] impl<T: Config> Hooks<T::BlockNumber> for Pallet<T> {} #[pallet::call] impl<T: Config> Pallet<T> { /// Fund Airdrop Treasury from deposit creation. /// /// The dispatch origin of this call must be `DropOrigin`. /// /// - `currency_id`: `CurrencyId` funding currency type. /// - `amount`: `BalanceOf<T>` funding amounts. #[pallet::weight((100_000_000 as Weight, DispatchClass::Operational))] #[transactional] pub fn fund_airdrop_treasury( origin: OriginFor<T>, currency_id: CurrencyId, amount: BalanceOf<T>, ) -> DispatchResult { T::DropOrigin::ensure_origin(origin)?; T::MultiCurrency::transfer(currency_id, &T::FundingOrigin::get(), &Self::account_id(), amount)?; Self::deposit_event(Event::FundAirdropTreasury { funder: T::FundingOrigin::get(), currency_id, amount }); Ok(()) } /// Make Airdrop to beneficiaries. /// /// The dispatch origin of this call must be `DropOrigin`. /// /// - `currency_id`: `CurrencyId` airdrop currency type. /// - `airdrop_list_json`: airdrop accounts and respective amounts in json format. #[pallet::weight((100_000_000 as Weight, DispatchClass::Operational))] #[transactional] pub fn make_airdrop( origin: OriginFor<T>, currency_id: CurrencyId, airdrop_list: Vec<(T::AccountId, Balance)>, ) -> DispatchResult { T::DropOrigin::ensure_origin(origin)?; ensure!( airdrop_list.len() <= T::MaxAirdropListSize::get(), Error::<T>::OverSizedAirdropList, ); Self::do_make_airdrop(currency_id, airdrop_list)?; Ok(()) } } } impl<T: Config> Pallet<T> { /// Get account of Airdrop module. pub fn account_id() -> T::AccountId {
d::get().into_account() } fn do_make_airdrop(currency_id: CurrencyId, airdrop_list: Vec<(T::AccountId, Balance)>) -> DispatchResult { // Make sure only unique accounts receive Airdrop let unique_accounts = airdrop_list .iter() .map(|(x,_)| x) .cloned(); ensure!( unique_accounts.len() == airdrop_list.len(), Error::<T>::DuplicateAccounts, ); for (beneficiary, amount) in airdrop_list.iter() { T::MultiCurrency::transfer(currency_id, &Self::account_id(), beneficiary, *amount)?; } Self::deposit_event(Event::Airdrop { currency_id, airdrop_list }); Ok(()) } }
T::PalletI
organizationiambinding.go
/* Copyright The Kubeform Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Code generated by informer-gen. DO NOT EDIT. package v1alpha1 import ( "context" time "time" googlev1alpha1 "kubeform.dev/kubeform/apis/google/v1alpha1" versioned "kubeform.dev/kubeform/client/clientset/versioned" internalinterfaces "kubeform.dev/kubeform/client/informers/externalversions/internalinterfaces" v1alpha1 "kubeform.dev/kubeform/client/listers/google/v1alpha1" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" runtime "k8s.io/apimachinery/pkg/runtime" watch "k8s.io/apimachinery/pkg/watch" cache "k8s.io/client-go/tools/cache" ) // OrganizationIamBindingInformer provides access to a shared informer and lister for // OrganizationIamBindings. type OrganizationIamBindingInformer interface { Informer() cache.SharedIndexInformer Lister() v1alpha1.OrganizationIamBindingLister } type organizationIamBindingInformer struct { factory internalinterfaces.SharedInformerFactory tweakListOptions internalinterfaces.TweakListOptionsFunc namespace string } // NewOrganizationIamBindingInformer constructs a new informer for OrganizationIamBinding type. // Always prefer using an informer factory to get a shared informer instead of getting an independent // one. This reduces memory footprint and number of connections to the server. func
(client versioned.Interface, namespace string, resyncPeriod time.Duration, indexers cache.Indexers) cache.SharedIndexInformer { return NewFilteredOrganizationIamBindingInformer(client, namespace, resyncPeriod, indexers, nil) } // NewFilteredOrganizationIamBindingInformer constructs a new informer for OrganizationIamBinding type. // Always prefer using an informer factory to get a shared informer instead of getting an independent // one. This reduces memory footprint and number of connections to the server. func NewFilteredOrganizationIamBindingInformer(client versioned.Interface, namespace string, resyncPeriod time.Duration, indexers cache.Indexers, tweakListOptions internalinterfaces.TweakListOptionsFunc) cache.SharedIndexInformer { return cache.NewSharedIndexInformer( &cache.ListWatch{ ListFunc: func(options v1.ListOptions) (runtime.Object, error) { if tweakListOptions != nil { tweakListOptions(&options) } return client.GoogleV1alpha1().OrganizationIamBindings(namespace).List(context.TODO(), options) }, WatchFunc: func(options v1.ListOptions) (watch.Interface, error) { if tweakListOptions != nil { tweakListOptions(&options) } return client.GoogleV1alpha1().OrganizationIamBindings(namespace).Watch(context.TODO(), options) }, }, &googlev1alpha1.OrganizationIamBinding{}, resyncPeriod, indexers, ) } func (f *organizationIamBindingInformer) defaultInformer(client versioned.Interface, resyncPeriod time.Duration) cache.SharedIndexInformer { return NewFilteredOrganizationIamBindingInformer(client, f.namespace, resyncPeriod, cache.Indexers{cache.NamespaceIndex: cache.MetaNamespaceIndexFunc}, f.tweakListOptions) } func (f *organizationIamBindingInformer) Informer() cache.SharedIndexInformer { return f.factory.InformerFor(&googlev1alpha1.OrganizationIamBinding{}, f.defaultInformer) } func (f *organizationIamBindingInformer) Lister() v1alpha1.OrganizationIamBindingLister { return v1alpha1.NewOrganizationIamBindingLister(f.Informer().GetIndexer()) }
NewOrganizationIamBindingInformer
schema.resolvers.go
package graph // This file will be automatically regenerated based on the schema, any resolver implementations // will be copied through when generating and any unknown code will be moved to the end. import ( "context" "fmt" "grapghql/graph/generated" "grapghql/graph/model" "grapghql/internal/auth" "grapghql/internal/links" "grapghql/internal/pkg/jwt" "grapghql/internal/users" "strconv" ) func (r *mutationResolver) CreateLink(ctx context.Context, input model.NewLink) (*model.Link, error) { user := auth.ForContext(ctx) if user == nil { return &model.Link{}, fmt.Errorf("access denied") } var link links.Link link.User = user link.Title = input.Title link.Address = input.Address linkID := link.Save() return &model.Link{ID: strconv.FormatInt(linkID, 10), Title: link.Title, Address: link.Address}, nil } func (r *mutationResolver) CreateUser(ctx context.Context, input model.NewUser) (string, error) { var user users.User user.Username = input.Username user.Password = input.Password user.Create() token, err := jwt.GenerateToken(user.Username) if err != nil { return "", err } return token, nil } func (r *mutationResolver) Login(ctx context.Context, input model.Login) (string, error) { var user users.User user.Username = input.Username user.Password = input.Password correct := user.Authenticate() if !correct { // 1 return "", &users.WrongUsernameOrPasswordError{} } token, err := jwt.GenerateToken(user.Username) if err != nil { return "", err } return token, nil } func (r *mutationResolver) RefreshToken(ctx context.Context, input model.RefreshTokenInput) (string, error) { username, err := jwt.ParseToken(input.Token) if err != nil { return "", fmt.Errorf("access denied") } token, err := jwt.GenerateToken(username) if err != nil { return "", err } return token, nil } func (r *queryResolver) Links(ctx context.Context) ([]*model.Link, error) { var resultLinks []*model.Link var dbLinks []links.Link dbLinks = links.GetAll() for _, link := range dbLinks{
ID: link.User.ID, Name: link.User.Username, } resultLinks = append(resultLinks, &model.Link{ID: link.ID, Title: link.Title, Address: link.Address, User: grahpqlUser}) } return resultLinks, nil } // Mutation returns generated.MutationResolver implementation. func (r *Resolver) Mutation() generated.MutationResolver { return &mutationResolver{r} } // Query returns generated.QueryResolver implementation. func (r *Resolver) Query() generated.QueryResolver { return &queryResolver{r} } type mutationResolver struct{ *Resolver } type queryResolver struct{ *Resolver }
grahpqlUser := &model.User{
PreviewConnectionPoint.ts
/** * @classdesc Basic preview strategy. Simply uses the type-defined preview in a connection point. * @author Orlando */ class
implements Preview { private point: ConnectionPoint; private element: HTMLElement; constructor(point: ConnectionPoint) { this.point = point; } setup(element: HTMLElement): void { this.element = element; this.point.getType().doPreviewSetup(element); } render(): void { this.point.getType().doPreviewRender(this.element); this.point.getNode().updatePlugPositions(); } }
PreviewConnectionPoint
tests.py
import MySQLdb import json from datetime import timedelta, datetime from unittest.mock import patch, Mock, ANY import sqlparse from django.contrib.auth import get_user_model from django.test import TestCase from common.config import SysConfig from sql.engines import EngineBase from sql.engines.goinception import GoInceptionEngine from sql.engines.models import ResultSet, ReviewSet, ReviewResult from sql.engines.mssql import MssqlEngine from sql.engines.mysql import MysqlEngine from sql.engines.redis import RedisEngine from sql.engines.pgsql import PgSQLEngine from sql.engines.oracle import OracleEngine from sql.engines.mongo import MongoEngine from sql.engines.inception import InceptionEngine, _repair_json_str from sql.models import Instance, SqlWorkflow, SqlWorkflowContent User = get_user_model() class TestReviewSet(TestCase): def test_review_set(self): new_review_set = ReviewSet() new_review_set.rows = [{'id': '1679123'}] self.assertIn('1679123', new_review_set.json()) class TestEngineBase(TestCase): @classmethod def setUpClass(cls): cls.u1 = User(username='some_user', display='用户1') cls.u1.save() cls.ins1 = Instance(instance_name='some_ins', type='master', db_type='mssql', host='some_host', port=1366, user='ins_user', password='some_str') cls.ins1.save() cls.wf1 = SqlWorkflow.objects.create( workflow_name='some_name', group_id=1, group_name='g1', engineer=cls.u1.username, engineer_display=cls.u1.display, audit_auth_groups='some_group', create_time=datetime.now() - timedelta(days=1), status='workflow_finish', is_backup=True, instance=cls.ins1, db_name='some_db', syntax_type=1 ) cls.wfc1 = SqlWorkflowContent.objects.create( workflow=cls.wf1, sql_content='some_sql', execute_result=json.dumps([{ 'id': 1, 'sql': 'some_content' }])) @classmethod def tearDownClass(cls): cls.wfc1.delete() cls.wf1.delete() cls.ins1.delete() cls.u1.delete() def test_init_with_ins(self): engine = EngineBase(instance=self.ins1) self.assertEqual(self.ins1.instance_name, engine.instance_name) self.assertEqual(self.ins1.user, engine.user) class TestMssql(TestCase): @classmethod def setUpClass(cls): cls.ins1 = Instance(instance_name='some_ins', type='slave', db_type='mssql', host='some_host', port=1366, user='ins_user', password='some_str') cls.ins1.save() cls.engine = MssqlEngine(instance=cls.ins1) cls.wf = SqlWorkflow.objects.create( workflow_name='some_name', group_id=1, group_name='g1', engineer_display='', audit_auth_groups='some_group', create_time=datetime.now() - timedelta(days=1), status='workflow_finish', is_backup=True, instance=cls.ins1, db_name='some_db', syntax_type=1 ) SqlWorkflowContent.objects.create(workflow=cls.wf, sql_content='insert into some_tb values (1)') @classmethod def tearDownClass(cls): cls.ins1.delete() cls.wf.delete() SqlWorkflowContent.objects.all().delete() @patch('sql.engines.mssql.pyodbc.connect') def testGetConnection(self, connect): new_engine = MssqlEngine(instance=self.ins1) new_engine.get_connection() connect.assert_called_once() @patch('sql.engines.mssql.pyodbc.connect') def testQuery(self, connect): cur = Mock() connect.return_value.cursor = cur cur.return_value.execute = Mock() cur.return_value.fetchmany.return_value = (('v1', 'v2'),) cur.return_value.description = (('k1', 'some_other_des'), ('k2', 'some_other_des')) new_engine = MssqlEngine(instance=self.ins1) query_result = new_engine.query(sql='some_str', limit_num=100) cur.return_value.execute.assert_called() cur.return_value.fetchmany.assert_called_once_with(100) connect.return_value.close.assert_called_once() self.assertIsInstance(query_result, ResultSet) @patch.object(MssqlEngine, 'query') def testAllDb(self, mock_query): db_result = ResultSet() db_result.rows = [('db_1',), ('db_2',)] mock_query.return_value = db_result new_engine = MssqlEngine(instance=self.ins1) dbs = new_engine.get_all_databases() self.assertEqual(dbs.rows, ['db_1', 'db_2']) @patch.object(MssqlEngine, 'query') def testAllTables(self, mock_query): table_result = ResultSet() table_result.rows = [('tb_1', 'some_des'), ('tb_2', 'some_des')] mock_query.return_value = table_result new_engine = MssqlEngine(instance=self.ins1) tables = new_engine.get_all_tables('some_db') mock_query.assert_called_once_with(db_name='some_db', sql=ANY) self.assertEqual(tables.rows, ['tb_1', 'tb_2']) @patch.object(MssqlEngine, 'query') def testAllColumns(self, mock_query): db_result = ResultSet() db_result.rows = [('col_1', 'type'), ('col_2', 'type2')] mock_query.return_value = db_result new_engine = MssqlEngine(instance=self.ins1) dbs = new_engine.get_all_columns_by_tb('some_db', 'some_tb') self.assertEqual(dbs.rows, ['col_1', 'col_2']) @patch.object(MssqlEngine, 'query') def testDescribe(self, mock_query): new_engine = MssqlEngine(instance=self.ins1) new_engine.describe_table('some_db', 'some_db') mock_query.assert_called_once() def testQueryCheck(self): new_engine = MssqlEngine(instance=self.ins1) # 只抽查一个函数 banned_sql = 'select concat(phone,1) from user_table' check_result = new_engine.query_check(db_name='some_db', sql=banned_sql) self.assertTrue(check_result.get('bad_query')) banned_sql = 'select phone from user_table where phone=concat(phone,1)' check_result = new_engine.query_check(db_name='some_db', sql=banned_sql) self.assertTrue(check_result.get('bad_query')) sp_sql = "sp_helptext '[SomeName].[SomeAction]'" check_result = new_engine.query_check(db_name='some_db', sql=sp_sql) self.assertFalse(check_result.get('bad_query')) self.assertEqual(check_result.get('filtered_sql'), sp_sql) def test_filter_sql(self): new_engine = MssqlEngine(instance=self.ins1) # 只抽查一个函数 banned_sql = 'select user from user_table' check_result = new_engine.filter_sql(sql=banned_sql, limit_num=10) self.assertEqual(check_result, "select top 10 user from user_table") def test_execute_check(self): new_engine = MssqlEngine(instance=self.ins1) test_sql = 'use database\ngo\nsome sql1\nGO\nsome sql2\n\r\nGo\nsome sql3\n\r\ngO\n' check_result = new_engine.execute_check(db_name=None, sql=test_sql) self.assertIsInstance(check_result, ReviewSet) self.assertEqual(check_result.rows[1].__dict__['sql'], "use database\n") self.assertEqual(check_result.rows[2].__dict__['sql'], "\nsome sql1\n") self.assertEqual(check_result.rows[4].__dict__['sql'], "\nsome sql3\n\r\n") @patch('sql.engines.mssql.MssqlEngine.execute') def test_execute_workflow(self, mock_execute): mock_execute.return_value.error = None new_engine = MssqlEngine(instance=self.ins1) new_engine.execute_workflow(self.wf) # 有多少个备份表, 就需要execute多少次, 另外加上一条实际执行的次数 mock_execute.assert_called() self.assertEqual(1, mock_execute.call_count) @patch('sql.engines.mssql.MssqlEngine.get_connection') def test_execute(self, mock_connect): mock_cursor = Mock() mock_connect.return_value.cursor = mock_cursor new_engine = MssqlEngine(instance=self.ins1) execute_result = new_engine.execute('some_db', 'some_sql') # 验证结果, 无异常 self.assertIsNone(execute_result.error) self.assertEqual('some_sql', execute_result.full_sql) self.assertEqual(2, len(execute_result.rows)) mock_cursor.return_value.execute.assert_called() mock_cursor.return_value.commit.assert_called() mock_cursor.reset_mock() # 验证异常 mock_cursor.return_value.execute.side_effect = Exception('Boom! some exception!') execute_result = new_engine.execute('some_db', 'some_sql') self.assertIn('Boom! some exception!', execute_result.error) self.assertEqual('some_sql', execute_result.full_sql) self.assertEqual(2, len(execute_result.rows)) mock_cursor.return_value.commit.assert_not_called() mock_cursor.return_value.rollback.assert_called() class TestMysql(TestCase): def setUp(self): self.ins1 = Instance(instance_name='some_ins', type='slave', db_type='mysql', host='some_host', port=1366, user='ins_user', password='some_str') self.ins1.save() self.sys_config = SysConfig() self.wf = SqlWorkflow.objects.create( workflow_name='some_name', group_id=1, group_name='g1', engineer_display='', audit_auth_groups='some_group', create_time=datetime.now() - timedelta(days=1), status='workflow_finish', is_backup=True, instance=self.ins1, db_name='some_db', syntax_type=1 ) SqlWorkflowContent.objects.create(workflow=self.wf) def tearDown(self): self.ins1.delete() self.sys_config.purge() SqlWorkflow.objects.all().delete() SqlWorkflowContent.objects.all().delete() @patch('MySQLdb.connect') def test_engine_base_info(self, _conn): new_engine = MysqlEngine(instance=self.ins1) self.assertEqual(new_engine.name, 'MySQL') self.assertEqual(new_engine.info, 'MySQL engine') @patch('MySQLdb.connect') def testGetConnection(self, connect): new_engine = MysqlEngine(instance=self.ins1) new_engine.get_connection() connect.assert_called_once() @patch('MySQLdb.connect') def testQuery(self, connect): cur = Mock() connect.return_value.cursor = cur cur.return_value.execute = Mock() cur.return_value.fetchmany.return_value = (('v1', 'v2'),) cur.return_value.description = (('k1', 'some_other_des'), ('k2', 'some_other_des')) new_engine = MysqlEngine(instance=self.ins1) query_result = new_engine.query(sql='some_str', limit_num=100) cur.return_value.execute.assert_called() cur.return_value.fetchmany.assert_called_once_with(size=100) connect.return_value.close.assert_called_once() self.assertIsInstance(query_result, ResultSet) @patch.object(MysqlEngine, 'query') def testAllDb(self, mock_query): db_result = ResultSet() db_result.rows = [('db_1',), ('db_2',)] mock_query.return_value = db_result new_engine = MysqlEngine(instance=self.ins1) dbs = new_engine.get_all_databases() self.assertEqual(dbs.rows, ['db_1', 'db_2']) @patch.object(MysqlEngine, 'query') def testAllTables(self, mock_query): table_result = ResultSet() table_result.rows = [('tb_1', 'some_des'), ('tb_2', 'some_des')] mock_query.return_value = table_result new_engine = MysqlEngine(instance=self.ins1) tables = new_engine.get_all_tables('some_db') mock_query.assert_called_once_with(db_name='some_db', sql=ANY) self.assertEqual(tables.rows, ['tb_1', 'tb_2']) @patch.object(MysqlEngine, 'query') def testAllColumns(self, mock_query): db_result = ResultSet() db_result.rows = [('col_1', 'type'), ('col_2', 'type2')] mock_query.return_value = db_result new_engine = MysqlEngine(instance=self.ins1) dbs = new_engine.get_all_columns_by_tb('some_db', 'some_tb') self.assertEqual(dbs.rows, ['col_1', 'col_2']) @patch.object(MysqlEngine, 'query') def testDescribe(self, mock_query): new_engine = MysqlEngine(instance=self.ins1) new_engine.describe_table('some_db', 'some_db') mock_query.assert_called_once() def testQueryCheck(self): new_engine = MysqlEngine(instance=self.ins1) sql_without_limit = '-- 测试\n select user from usertable' check_result = new_engine.query_check(db_name='some_db', sql=sql_without_limit) self.assertEqual(check_result['filtered_sql'], 'select user from usertable') def test_query_check_wrong_sql(self): new_engine = MysqlEngine(instance=self.ins1) wrong_sql = '-- 测试' check_result = new_engine.query_check(db_name='some_db', sql=wrong_sql) self.assertDictEqual(check_result, {'msg': '不支持的查询语法类型!', 'bad_query': True, 'filtered_sql': '-- 测试', 'has_star': False}) def test_query_check_update_sql(self): new_engine = MysqlEngine(instance=self.ins1) update_sql = 'update user set id=0' check_result = new_engine.query_check(db_name='some_db', sql=update_sql) self.assertDictEqual(check_result, {'msg': '不支持的查询语法类型!', 'bad_query': True, 'filtered_sql': 'update user set id=0', 'has_star': False}) def test_filter_sql_with_delimiter(self): new_engine = MysqlEngine(instance=self.ins1) sql_without_limit = 'select user from usertable;' check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=100) self.assertEqual(check_result, 'select user from usertable limit 100;') def test_filter_sql_without_delimiter(self): new_engine = MysqlEngine(instance=self.ins1) sql_without_limit = 'select user from usertable' check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=100) self.assertEqual(check_result, 'select user from usertable limit 100;') def test_filter_sql_with_limit(self): new_engine = MysqlEngine(instance=self.ins1) sql_without_limit = 'select user from usertable limit 10' check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=1) self.assertEqual(check_result, 'select user from usertable limit 1;') def test_filter_sql_with_limit_min(self): new_engine = MysqlEngine(instance=self.ins1) sql_without_limit = 'select user from usertable limit 10' check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=100) self.assertEqual(check_result, 'select user from usertable limit 10;') def test_filter_sql_with_limit_offset(self): new_engine = MysqlEngine(instance=self.ins1) sql_without_limit = 'select user from usertable limit 10 offset 100' check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=1) self.assertEqual(check_result, 'select user from usertable limit 1;') def test_filter_sql_with_limit_nn(self): new_engine = MysqlEngine(instance=self.ins1) sql_without_limit = 'select user from usertable limit 10, 100' check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=1) self.assertEqual(check_result, 'select user from usertable limit 1;') def test_filter_sql_upper(self): new_engine = MysqlEngine(instance=self.ins1) sql_without_limit = 'SELECT USER FROM usertable LIMIT 10, 100' check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=1) self.assertEqual(check_result, 'SELECT USER FROM usertable limit 1;') def test_filter_sql_not_select(self): new_engine = MysqlEngine(instance=self.ins1) sql_without_limit = 'show create table usertable;' check_result = new_engine.filter_sql(sql=sql_without_limit, limit_num=1) self.assertEqual(check_result, 'show create table usertable;') @patch('sql.engines.mysql.data_masking', return_value=ResultSet()) def test_query_masking(self, _data_masking): query_result = ResultSet() new_engine = MysqlEngine(instance=self.ins1) masking_result = new_engine.query_masking(db_name='archery', sql='select 1', resultset=query_result) self.assertIsInstance(masking_result, ResultSet) @patch('sql.engines.mysql.data_masking', return_value=ResultSet()) def test_query_masking_not_select(self, _data_masking): query_result = ResultSet() new_engine = MysqlEngine(instance=self.ins1) masking_result = new_engine.query_masking(db_name='archery', sql='explain select 1', resultset=query_result) self.assertEqual(masking_result, query_result) @patch('sql.engines.mysql.InceptionEngine') def test_execute_check_select_sql(self, _inception_engine): self.sys_config.set('inception', 'true') sql = 'select * from user' inc_row = ReviewResult(id=1, errlevel=0, stagestatus='Audit completed', errormessage='None', sql=sql, affected_rows=0, execute_time=0, ) row = ReviewResult(id=1, errlevel=2, stagestatus='驳回不支持语句', errormessage='仅支持DML和DDL语句,查询语句请使用SQL查询功能!', sql=sql) _inception_engine.return_value.execute_check.return_value = ReviewSet(full_sql=sql, rows=[inc_row]) new_engine = MysqlEngine(instance=self.ins1) check_result = new_engine.execute_check(db_name='archery', sql=sql) self.assertIsInstance(check_result, ReviewSet) self.assertEqual(check_result.rows[0].__dict__, row.__dict__) @patch('sql.engines.mysql.InceptionEngine') def test_execute_check_critical_sql(self, _inception_engine): self.sys_config.set('inception', 'true') self.sys_config.set('critical_ddl_regex', '^|update') self.sys_config.get_all_config() sql = 'update user set id=1' inc_row = ReviewResult(id=1, errlevel=0, stagestatus='Audit completed', errormessage='None', sql=sql, affected_rows=0, execute_time=0, ) row = ReviewResult(id=1, errlevel=2, stagestatus='驳回高危SQL', errormessage='禁止提交匹配' + '^|update' + '条件的语句!', sql=sql) _inception_engine.return_value.execute_check.return_value = ReviewSet(full_sql=sql, rows=[inc_row]) new_engine = MysqlEngine(instance=self.ins1) check_result = new_engine.execute_check(db_name='archery', sql=sql) self.assertIsInstance(check_result, ReviewSet) self.assertEqual(check_result.rows[0].__dict__, row.__dict__) @patch('sql.engines.mysql.InceptionEngine') def test_execute_check_normal_sql(self, _inception_engine): self.sys_config.set('inception', 'true') sql = 'update user set id=1' row = ReviewResult(id=1, errlevel=0, stagestatus='Audit completed', errormessage='None', sql=sql, affected_rows=0, execute_time=0, ) _inception_engine.return_value.execute_check.return_value = ReviewSet(full_sql=sql, rows=[row]) new_engine = MysqlEngine(instance=self.ins1) check_result = new_engine.execute_check(db_name='archery', sql=sql) self.assertIsInstance(check_result, ReviewSet) self.assertEqual(check_result.rows[0].__dict__, row.__dict__) @patch('sql.engines.mysql.InceptionEngine') def test_execute_check_normal_sql_with_Exception(self, _inception_engine): sql = 'update user set id=1' _inception_engine.return_value.execute_check.side_effect = RuntimeError() new_engine = MysqlEngine(instance=self.ins1) with self.assertRaises(RuntimeError): new_engine.execute_check(db_name=0, sql=sql) @patch.object(MysqlEngine, 'query') @patch('sql.engines.mysql.InceptionEngine') def test_execute_workflow(self, _inception_engine, _query): self.sys_config.set('inception', 'true') sql = 'update user set id=1' _inception_engine.return_value.execute.return_value = ReviewSet(full_sql=sql) _query.return_value.rows = (('0',),) new_engine = MysqlEngine(instance=self.ins1) execute_result = new_engine.execute_workflow(self.wf) self.assertIsInstance(execute_result, ReviewSet) @patch('MySQLdb.connect.cursor.execute') @patch('MySQLdb.connect.cursor') @patch('MySQLdb.connect') def test_execute(self, _connect, _cursor, _execute): new_engine = MysqlEngine(instance=self.ins1) execute_result = new_engine.execute(self.wf) self.assertIsInstance(execute_result, ResultSet) @patch('MySQLdb.connect') def test_server_version(self, _connect): _connect.return_value.get_server_info.return_value = '5.7.20-16log' new_engine = MysqlEngine(instance=self.ins1) server_version = new_engine.server_version self.assertTupleEqual(server_version, (5, 7, 20)) @patch.
nnect') @patch.object(MysqlEngine, 'query') def test_get_variables_filter(self, _query, _connect): _connect.return_value.get_server_info.return_value = '5.7.20-16log' new_engine = MysqlEngine(instance=self.ins1) new_engine.get_variables(variables=['binlog_format']) _query.assert_called() @patch.object(MysqlEngine, 'query') def test_set_variable(self, _query): new_engine = MysqlEngine(instance=self.ins1) new_engine.set_variable('binlog_format', 'ROW') _query.assert_called_once_with(sql="set global binlog_format=ROW;") @patch('sql.engines.mysql.GoInceptionEngine') def test_osc_go_inception(self, _inception_engine): self.sys_config.set('inception', 'false') _inception_engine.return_value.osc_control.return_value = ReviewSet() command = 'get' sqlsha1 = 'xxxxx' new_engine = MysqlEngine(instance=self.ins1) new_engine.osc_control(sqlsha1=sqlsha1, command=command) @patch('sql.engines.mysql.InceptionEngine') def test_osc_inception(self, _inception_engine): self.sys_config.set('inception', 'true') _inception_engine.return_value.osc_control.return_value = ReviewSet() command = 'get' sqlsha1 = 'xxxxx' new_engine = MysqlEngine(instance=self.ins1) new_engine.osc_control(sqlsha1=sqlsha1, command=command) @patch.object(MysqlEngine, 'query') def test_kill_connection(self, _query): new_engine = MysqlEngine(instance=self.ins1) new_engine.kill_connection(100) _query.assert_called_once_with(sql="kill 100") @patch.object(MysqlEngine, 'query') def test_seconds_behind_master(self, _query): new_engine = MysqlEngine(instance=self.ins1) new_engine.seconds_behind_master _query.assert_called_once_with(sql="show slave status", close_conn=False, cursorclass=MySQLdb.cursors.DictCursor) class TestRedis(TestCase): @classmethod def setUpClass(cls): cls.ins = Instance(instance_name='some_ins', type='slave', db_type='redis', host='some_host', port=1366, user='ins_user', password='some_str') cls.ins.save() @classmethod def tearDownClass(cls): cls.ins.delete() SqlWorkflow.objects.all().delete() SqlWorkflowContent.objects.all().delete() @patch('redis.Redis') def test_engine_base_info(self, _conn): new_engine = RedisEngine(instance=self.ins) self.assertEqual(new_engine.name, 'Redis') self.assertEqual(new_engine.info, 'Redis engine') @patch('redis.Redis') def test_get_connection(self, _conn): new_engine = RedisEngine(instance=self.ins) new_engine.get_connection() _conn.assert_called_once() @patch('redis.Redis.execute_command', return_value=[1, 2, 3]) def test_query_return_list(self, _execute_command): new_engine = RedisEngine(instance=self.ins) query_result = new_engine.query(db_name=0, sql='keys *', limit_num=100) self.assertIsInstance(query_result, ResultSet) self.assertTupleEqual(query_result.rows, ([1], [2], [3])) @patch('redis.Redis.execute_command', return_value='text') def test_query_return_str(self, _execute_command): new_engine = RedisEngine(instance=self.ins) query_result = new_engine.query(db_name=0, sql='keys *', limit_num=100) self.assertIsInstance(query_result, ResultSet) self.assertTupleEqual(query_result.rows, (['text'],)) @patch('redis.Redis.execute_command', return_value='text') def test_query_execute(self, _execute_command): new_engine = RedisEngine(instance=self.ins) query_result = new_engine.query(db_name=0, sql='keys *', limit_num=100) self.assertIsInstance(query_result, ResultSet) self.assertTupleEqual(query_result.rows, (['text'],)) @patch('redis.Redis.config_get', return_value={"databases": 4}) def test_get_all_databases(self, _config_get): new_engine = RedisEngine(instance=self.ins) dbs = new_engine.get_all_databases() self.assertListEqual(dbs.rows, ['0', '1', '2', '3']) def test_query_check_safe_cmd(self): safe_cmd = "keys 1*" new_engine = RedisEngine(instance=self.ins) check_result = new_engine.query_check(db_name=0, sql=safe_cmd) self.assertDictEqual(check_result, {'msg': '禁止执行该命令!', 'bad_query': True, 'filtered_sql': safe_cmd, 'has_star': False}) def test_query_check_danger_cmd(self): safe_cmd = "keys *" new_engine = RedisEngine(instance=self.ins) check_result = new_engine.query_check(db_name=0, sql=safe_cmd) self.assertDictEqual(check_result, {'msg': '禁止执行该命令!', 'bad_query': True, 'filtered_sql': safe_cmd, 'has_star': False}) def test_filter_sql(self): safe_cmd = "keys 1*" new_engine = RedisEngine(instance=self.ins) check_result = new_engine.filter_sql(sql=safe_cmd, limit_num=100) self.assertEqual(check_result, 'keys 1*') def test_query_masking(self): query_result = ResultSet() new_engine = RedisEngine(instance=self.ins) masking_result = new_engine.query_masking(db_name=0, sql='', resultset=query_result) self.assertEqual(masking_result, query_result) def test_execute_check(self): sql = 'set 1 1' row = ReviewResult(id=1, errlevel=0, stagestatus='Audit completed', errormessage='None', sql=sql, affected_rows=0, execute_time=0) new_engine = RedisEngine(instance=self.ins) check_result = new_engine.execute_check(db_name=0, sql=sql) self.assertIsInstance(check_result, ReviewSet) self.assertEqual(check_result.rows[0].__dict__, row.__dict__) @patch('redis.Redis.execute_command', return_value='text') def test_execute_workflow_success(self, _execute_command): sql = 'set 1 1' row = ReviewResult(id=1, errlevel=0, stagestatus='Execute Successfully', errormessage='None', sql=sql, affected_rows=0, execute_time=0) wf = SqlWorkflow.objects.create( workflow_name='some_name', group_id=1, group_name='g1', engineer_display='', audit_auth_groups='some_group', create_time=datetime.now() - timedelta(days=1), status='workflow_finish', is_backup=True, instance=self.ins, db_name='some_db', syntax_type=1 ) SqlWorkflowContent.objects.create(workflow=wf, sql_content=sql) new_engine = RedisEngine(instance=self.ins) execute_result = new_engine.execute_workflow(workflow=wf) self.assertIsInstance(execute_result, ReviewSet) self.assertEqual(execute_result.rows[0].__dict__.keys(), row.__dict__.keys()) class TestPgSQL(TestCase): @classmethod def setUpClass(cls): cls.ins = Instance(instance_name='some_ins', type='slave', db_type='pgsql', host='some_host', port=1366, user='ins_user', password='some_str') cls.ins.save() cls.sys_config = SysConfig() @classmethod def tearDownClass(cls): cls.ins.delete() cls.sys_config.purge() @patch('psycopg2.connect') def test_engine_base_info(self, _conn): new_engine = PgSQLEngine(instance=self.ins) self.assertEqual(new_engine.name, 'PgSQL') self.assertEqual(new_engine.info, 'PgSQL engine') @patch('psycopg2.connect') def test_get_connection(self, _conn): new_engine = PgSQLEngine(instance=self.ins) new_engine.get_connection("some_dbname") _conn.assert_called_once() @patch('psycopg2.connect.cursor.execute') @patch('psycopg2.connect.cursor') @patch('psycopg2.connect') def test_query(self, _conn, _cursor, _execute): _conn.return_value.cursor.return_value.fetchmany.return_value = [(1,)] new_engine = PgSQLEngine(instance=self.ins) query_result = new_engine.query(db_name="some_dbname", sql='select 1', limit_num=100, schema_name="some_schema") self.assertIsInstance(query_result, ResultSet) self.assertListEqual(query_result.rows, [(1,)]) @patch('psycopg2.connect.cursor.execute') @patch('psycopg2.connect.cursor') @patch('psycopg2.connect') def test_query_not_limit(self, _conn, _cursor, _execute): _conn.return_value.cursor.return_value.fetchall.return_value = [(1,)] new_engine = PgSQLEngine(instance=self.ins) query_result = new_engine.query(db_name="some_dbname", sql='select 1', limit_num=0, schema_name="some_schema") self.assertIsInstance(query_result, ResultSet) self.assertListEqual(query_result.rows, [(1,)]) @patch('sql.engines.pgsql.PgSQLEngine.query', return_value=ResultSet(rows=[('postgres',), ('archery',), ('template1',), ('template0',)])) def test_get_all_databases(self, query): new_engine = PgSQLEngine(instance=self.ins) dbs = new_engine.get_all_databases() self.assertListEqual(dbs.rows, ['archery']) @patch('sql.engines.pgsql.PgSQLEngine.query', return_value=ResultSet(rows=[('information_schema',), ('archery',), ('pg_catalog',)])) def test_get_all_schemas(self, _query): new_engine = PgSQLEngine(instance=self.ins) schemas = new_engine.get_all_schemas(db_name='archery') self.assertListEqual(schemas.rows, ['archery']) @patch('sql.engines.pgsql.PgSQLEngine.query', return_value=ResultSet(rows=[('test',), ('test2',)])) def test_get_all_tables(self, _query): new_engine = PgSQLEngine(instance=self.ins) tables = new_engine.get_all_tables(db_name='archery', schema_name='archery') self.assertListEqual(tables.rows, ['test2']) @patch('sql.engines.pgsql.PgSQLEngine.query', return_value=ResultSet(rows=[('id',), ('name',)])) def test_get_all_columns_by_tb(self, _query): new_engine = PgSQLEngine(instance=self.ins) columns = new_engine.get_all_columns_by_tb(db_name='archery', tb_name='test2', schema_name='archery') self.assertListEqual(columns.rows, ['id', 'name']) @patch('sql.engines.pgsql.PgSQLEngine.query', return_value=ResultSet(rows=[('postgres',), ('archery',), ('template1',), ('template0',)])) def test_describe_table(self, _query): new_engine = PgSQLEngine(instance=self.ins) describe = new_engine.describe_table(db_name='archery', schema_name='archery', tb_name='text') self.assertIsInstance(describe, ResultSet) def test_query_check_disable_sql(self): sql = "update xxx set a=1 " new_engine = PgSQLEngine(instance=self.ins) check_result = new_engine.query_check(db_name='archery', sql=sql) self.assertDictEqual(check_result, {'msg': '不支持的查询语法类型!', 'bad_query': True, 'filtered_sql': sql.strip(), 'has_star': False}) def test_query_check_star_sql(self): sql = "select * from xx " new_engine = PgSQLEngine(instance=self.ins) check_result = new_engine.query_check(db_name='archery', sql=sql) self.assertDictEqual(check_result, {'msg': 'SQL语句中含有 * ', 'bad_query': False, 'filtered_sql': sql.strip(), 'has_star': True}) def test_filter_sql_with_delimiter(self): sql = "select * from xx;" new_engine = PgSQLEngine(instance=self.ins) check_result = new_engine.filter_sql(sql=sql, limit_num=100) self.assertEqual(check_result, "select * from xx limit 100;") def test_filter_sql_without_delimiter(self): sql = "select * from xx" new_engine = PgSQLEngine(instance=self.ins) check_result = new_engine.filter_sql(sql=sql, limit_num=100) self.assertEqual(check_result, "select * from xx limit 100;") def test_filter_sql_with_limit(self): sql = "select * from xx limit 10" new_engine = PgSQLEngine(instance=self.ins) check_result = new_engine.filter_sql(sql=sql, limit_num=1) self.assertEqual(check_result, "select * from xx limit 10;") def test_query_masking(self): query_result = ResultSet() new_engine = PgSQLEngine(instance=self.ins) masking_result = new_engine.query_masking(db_name=0, sql='', resultset=query_result) self.assertEqual(masking_result, query_result) def test_execute_check_select_sql(self): sql = 'select * from user;' row = ReviewResult(id=1, errlevel=2, stagestatus='驳回不支持语句', errormessage='仅支持DML和DDL语句,查询语句请使用SQL查询功能!', sql=sql) new_engine = PgSQLEngine(instance=self.ins) check_result = new_engine.execute_check(db_name='archery', sql=sql) self.assertIsInstance(check_result, ReviewSet) self.assertEqual(check_result.rows[0].__dict__, row.__dict__) def test_execute_check_critical_sql(self): self.sys_config.set('critical_ddl_regex', '^|update') self.sys_config.get_all_config() sql = 'update user set id=1' row = ReviewResult(id=1, errlevel=2, stagestatus='驳回高危SQL', errormessage='禁止提交匹配' + '^|update' + '条件的语句!', sql=sql) new_engine = PgSQLEngine(instance=self.ins) check_result = new_engine.execute_check(db_name='archery', sql=sql) self.assertIsInstance(check_result, ReviewSet) self.assertEqual(check_result.rows[0].__dict__, row.__dict__) def test_execute_check_normal_sql(self): self.sys_config.purge() sql = 'alter table tb set id=1' row = ReviewResult(id=1, errlevel=0, stagestatus='Audit completed', errormessage='None', sql=sql, affected_rows=0, execute_time=0, ) new_engine = PgSQLEngine(instance=self.ins) check_result = new_engine.execute_check(db_name='archery', sql=sql) self.assertIsInstance(check_result, ReviewSet) self.assertEqual(check_result.rows[0].__dict__, row.__dict__) @patch('psycopg2.connect.cursor.execute') @patch('psycopg2.connect.cursor') @patch('psycopg2.connect') def test_execute_workflow_success(self, _conn, _cursor, _execute): sql = 'update user set id=1' row = ReviewResult(id=1, errlevel=0, stagestatus='Execute Successfully', errormessage='None', sql=sql, affected_rows=0, execute_time=0) wf = SqlWorkflow.objects.create( workflow_name='some_name', group_id=1, group_name='g1', engineer_display='', audit_auth_groups='some_group', create_time=datetime.now() - timedelta(days=1), status='workflow_finish', is_backup=True, instance=self.ins, db_name='some_db', syntax_type=1 ) SqlWorkflowContent.objects.create(workflow=wf, sql_content=sql) new_engine = PgSQLEngine(instance=self.ins) execute_result = new_engine.execute_workflow(workflow=wf) self.assertIsInstance(execute_result, ReviewSet) self.assertEqual(execute_result.rows[0].__dict__.keys(), row.__dict__.keys()) @patch('psycopg2.connect.cursor.execute') @patch('psycopg2.connect.cursor') @patch('psycopg2.connect', return_value=RuntimeError) def test_execute_workflow_exception(self, _conn, _cursor, _execute): sql = 'update user set id=1' row = ReviewResult(id=1, errlevel=2, stagestatus='Execute Failed', errormessage=f'异常信息:{f"Oracle命令执行报错,语句:{sql}"}', sql=sql, affected_rows=0, execute_time=0, ) wf = SqlWorkflow.objects.create( workflow_name='some_name', group_id=1, group_name='g1', engineer_display='', audit_auth_groups='some_group', create_time=datetime.now() - timedelta(days=1), status='workflow_finish', is_backup=True, instance=self.ins, db_name='some_db', syntax_type=1 ) SqlWorkflowContent.objects.create(workflow=wf, sql_content=sql) with self.assertRaises(AttributeError): new_engine = PgSQLEngine(instance=self.ins) execute_result = new_engine.execute_workflow(workflow=wf) self.assertIsInstance(execute_result, ReviewSet) self.assertEqual(execute_result.rows[0].__dict__.keys(), row.__dict__.keys()) class TestModel(TestCase): def setUp(self): pass def tearDown(self): pass def test_result_set_rows_shadow(self): # 测试默认值为空列表的坑 # 如果默认值是空列表,又使用的是累加的方法更新,会导致残留上次的列表 result_set1 = ResultSet() for i in range(10): result_set1.rows += [i] brand_new_result_set = ResultSet() self.assertEqual(brand_new_result_set.rows, []) review_set1 = ReviewSet() for i in range(10): review_set1.rows += [i] brand_new_review_set = ReviewSet() self.assertEqual(brand_new_review_set.rows, []) class TestInception(TestCase): def setUp(self): self.ins = Instance.objects.create(instance_name='some_ins', type='slave', db_type='mysql', host='some_host', port=3306, user='ins_user', password='some_str') self.ins_inc = Instance.objects.create(instance_name='some_ins_inc', type='slave', db_type='inception', host='some_host', port=6669) self.wf = SqlWorkflow.objects.create( workflow_name='some_name', group_id=1, group_name='g1', engineer_display='', audit_auth_groups='some_group', create_time=datetime.now() - timedelta(days=1), status='workflow_finish', is_backup=True, instance=self.ins, db_name='some_db', syntax_type=1 ) SqlWorkflowContent.objects.create(workflow=self.wf) def tearDown(self): self.ins.delete() self.ins_inc.delete() SqlWorkflow.objects.all().delete() SqlWorkflowContent.objects.all().delete() @patch('MySQLdb.connect') def test_get_connection(self, _connect): new_engine = InceptionEngine() new_engine.get_connection() _connect.assert_called_once() @patch('MySQLdb.connect') def test_get_backup_connection(self, _connect): new_engine = InceptionEngine() new_engine.get_backup_connection() _connect.assert_called_once() @patch('sql.engines.inception.InceptionEngine.query') def test_execute_check_normal_sql(self, _query): sql = 'update user set id=100' row = [1, 'CHECKED', 0, 'Audit completed', 'None', 'use archery', 0, "'0_0_0'", 'None', '0', ''] _query.return_value = ResultSet(full_sql=sql, rows=[row]) new_engine = InceptionEngine() check_result = new_engine.execute_check(instance=self.ins, db_name=0, sql=sql) self.assertIsInstance(check_result, ReviewSet) @patch('sql.engines.inception.InceptionEngine.query') def test_execute_exception(self, _query): sql = 'update user set id=100' row = [1, 'CHECKED', 1, 'Execute failed', 'None', 'use archery', 0, "'0_0_0'", 'None', '0', ''] column_list = ['ID', 'stage', 'errlevel', 'stagestatus', 'errormessage', 'SQL', 'Affected_rows', 'sequence', 'backup_dbname', 'execute_time', 'sqlsha1'] _query.return_value = ResultSet(full_sql=sql, rows=[row], column_list=column_list) new_engine = InceptionEngine() execute_result = new_engine.execute(workflow=self.wf) self.assertIsInstance(execute_result, ReviewSet) @patch('sql.engines.inception.InceptionEngine.query') def test_execute_finish(self, _query): sql = 'update user set id=100' row = [1, 'CHECKED', 0, 'Execute Successfully', 'None', 'use archery', 0, "'0_0_0'", 'None', '0', ''] column_list = ['ID', 'stage', 'errlevel', 'stagestatus', 'errormessage', 'SQL', 'Affected_rows', 'sequence', 'backup_dbname', 'execute_time', 'sqlsha1'] _query.return_value = ResultSet(full_sql=sql, rows=[row], column_list=column_list) new_engine = InceptionEngine() execute_result = new_engine.execute(workflow=self.wf) self.assertIsInstance(execute_result, ReviewSet) @patch('MySQLdb.connect.cursor.execute') @patch('MySQLdb.connect.cursor') @patch('MySQLdb.connect') def test_query(self, _conn, _cursor, _execute): _conn.return_value.cursor.return_value.fetchall.return_value = [(1,)] new_engine = InceptionEngine() query_result = new_engine.query(db_name=0, sql='select 1', limit_num=100) self.assertIsInstance(query_result, ResultSet) @patch('MySQLdb.connect.cursor.execute') @patch('MySQLdb.connect.cursor') @patch('MySQLdb.connect') def test_query_not_limit(self, _conn, _cursor, _execute): _conn.return_value.cursor.return_value.fetchall.return_value = [(1,)] new_engine = InceptionEngine(instance=self.ins) query_result = new_engine.query(db_name=0, sql='select 1', limit_num=0) self.assertIsInstance(query_result, ResultSet) @patch('sql.engines.inception.InceptionEngine.query') def test_query_print(self, _query): sql = 'update user set id=100' row = [1, 'select * from sql_instance limit 100', 0, '{"command":"select","select_list":[{"type":"FIELD_ITEM","field":"*"}],"table_ref":[{"db":"archery","table":"sql_instance"}],"limit":{"limit":[{"type":"INT_ITEM","value":"100"}]}}', 'None'] column_list = ['ID', 'statement', 'errlevel', 'query_tree', 'errmsg'] _query.return_value = ResultSet(full_sql=sql, rows=[row], column_list=column_list) new_engine = InceptionEngine() print_result = new_engine.query_print(self.ins, db_name=None, sql=sql) self.assertDictEqual(print_result, json.loads(_repair_json_str(row[3]))) @patch('MySQLdb.connect') def test_get_rollback_list(self, _connect): self.wf.sqlworkflowcontent.execute_result = """[{ "id": 1, "stage": "RERUN", "errlevel": 0, "stagestatus": "Execute Successfully", "errormessage": "None", "sql": "use archer_test", "affected_rows": 0, "sequence": "'1554135032_13038_0'", "backup_dbname": "None", "execute_time": "0.000", "sqlsha1": "", "actual_affected_rows": 0 }, { "id": 2, "stage": "EXECUTED", "errlevel": 0, "stagestatus": "Execute Successfully Backup successfully", "errormessage": "None", "sql": "insert into tt1 (user_name)values('A'),('B'),('C')", "affected_rows": 3, "sequence": "'1554135032_13038_1'", "backup_dbname": "mysql_3306_archer_test", "execute_time": "0.000", "sqlsha1": "", "actual_affected_rows": 3 }]""" self.wf.sqlworkflowcontent.save() new_engine = InceptionEngine() new_engine.get_rollback(self.wf) @patch('sql.engines.inception.InceptionEngine.query') def test_osc_get(self, _query): new_engine = InceptionEngine() command = 'get' sqlsha1 = 'xxxxx' sql = f"inception get osc_percent '{sqlsha1}';" _query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[]) new_engine.osc_control(sqlsha1=sqlsha1, command=command) _query.assert_called_once_with(sql=sql) @patch('sql.engines.inception.InceptionEngine.query') def test_osc_kill(self, _query): new_engine = InceptionEngine() command = 'kill' sqlsha1 = 'xxxxx' sql = f"inception stop alter '{sqlsha1}';" _query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[]) new_engine.osc_control(sqlsha1=sqlsha1, command=command) _query.assert_called_once_with(sql=sql) @patch('sql.engines.inception.InceptionEngine.query') def test_osc_not_support(self, _query): new_engine = InceptionEngine() command = 'stop' sqlsha1 = 'xxxxx' sql = f"inception stop alter '{sqlsha1}';" _query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[]) with self.assertRaisesMessage(ValueError, 'pt-osc不支持暂停和恢复,需要停止执行请使用终止按钮!'): new_engine.osc_control(sqlsha1=sqlsha1, command=command) @patch('sql.engines.inception.InceptionEngine.query') def test_get_variables(self, _query): new_engine = InceptionEngine(instance=self.ins_inc) new_engine.get_variables() sql = f"inception get variables;" _query.assert_called_once_with(sql=sql) @patch('sql.engines.inception.InceptionEngine.query') def test_get_variables_filter(self, _query): new_engine = InceptionEngine(instance=self.ins_inc) new_engine.get_variables(variables=['inception_osc_on']) sql = f"inception get variables 'inception_osc_on';" _query.assert_called_once_with(sql=sql) @patch('sql.engines.inception.InceptionEngine.query') def test_set_variable(self, _query): new_engine = InceptionEngine(instance=self.ins) new_engine.set_variable('inception_osc_on', 'on') _query.assert_called_once_with(sql="inception set inception_osc_on=on;") class TestGoInception(TestCase): def setUp(self): self.ins = Instance.objects.create(instance_name='some_ins', type='slave', db_type='mysql', host='some_host', port=3306, user='ins_user', password='some_str') self.ins_inc = Instance.objects.create(instance_name='some_ins_inc', type='slave', db_type='goinception', host='some_host', port=4000) self.wf = SqlWorkflow.objects.create( workflow_name='some_name', group_id=1, group_name='g1', engineer_display='', audit_auth_groups='some_group', create_time=datetime.now() - timedelta(days=1), status='workflow_finish', is_backup=True, instance=self.ins, db_name='some_db', syntax_type=1 ) SqlWorkflowContent.objects.create(workflow=self.wf) def tearDown(self): self.ins.delete() self.ins_inc.delete() SqlWorkflow.objects.all().delete() SqlWorkflowContent.objects.all().delete() @patch('MySQLdb.connect') def test_get_connection(self, _connect): new_engine = GoInceptionEngine() new_engine.get_connection() _connect.assert_called_once() @patch('sql.engines.goinception.GoInceptionEngine.query') def test_execute_check_normal_sql(self, _query): sql = 'update user set id=100' row = [1, 'CHECKED', 0, 'Audit completed', 'None', 'use archery', 0, "'0_0_0'", 'None', '0', '', ''] _query.return_value = ResultSet(full_sql=sql, rows=[row]) new_engine = GoInceptionEngine() check_result = new_engine.execute_check(instance=self.ins, db_name=0, sql=sql) self.assertIsInstance(check_result, ReviewSet) @patch('sql.engines.goinception.GoInceptionEngine.query') def test_execute_exception(self, _query): sql = 'update user set id=100' row = [1, 'CHECKED', 1, 'Execute failed', 'None', 'use archery', 0, "'0_0_0'", 'None', '0', '', ''] column_list = ['order_id', 'stage', 'error_level', 'stage_status', 'error_message', 'sql', 'affected_rows', 'sequence', 'backup_dbname', 'execute_time', 'sqlsha1', 'backup_time'] _query.return_value = ResultSet(full_sql=sql, rows=[row], column_list=column_list) new_engine = GoInceptionEngine() execute_result = new_engine.execute(workflow=self.wf) self.assertIsInstance(execute_result, ReviewSet) @patch('sql.engines.goinception.GoInceptionEngine.query') def test_execute_finish(self, _query): sql = 'update user set id=100' row = [1, 'CHECKED', 0, 'Execute Successfully', 'None', 'use archery', 0, "'0_0_0'", 'None', '0', '', ''] column_list = ['order_id', 'stage', 'error_level', 'stage_status', 'error_message', 'sql', 'affected_rows', 'sequence', 'backup_dbname', 'execute_time', 'sqlsha1', 'backup_time'] _query.return_value = ResultSet(full_sql=sql, rows=[row], column_list=column_list) new_engine = GoInceptionEngine() execute_result = new_engine.execute(workflow=self.wf) self.assertIsInstance(execute_result, ReviewSet) @patch('MySQLdb.connect.cursor.execute') @patch('MySQLdb.connect.cursor') @patch('MySQLdb.connect') def test_query(self, _conn, _cursor, _execute): _conn.return_value.cursor.return_value.fetchall.return_value = [(1,)] new_engine = GoInceptionEngine() query_result = new_engine.query(db_name=0, sql='select 1', limit_num=100) self.assertIsInstance(query_result, ResultSet) @patch('MySQLdb.connect.cursor.execute') @patch('MySQLdb.connect.cursor') @patch('MySQLdb.connect') def test_query_not_limit(self, _conn, _cursor, _execute): _conn.return_value.cursor.return_value.fetchall.return_value = [(1,)] new_engine = GoInceptionEngine(instance=self.ins) query_result = new_engine.query(db_name=0, sql='select 1', limit_num=0) self.assertIsInstance(query_result, ResultSet) @patch('sql.engines.goinception.GoInceptionEngine.query') def test_osc_get(self, _query): new_engine = GoInceptionEngine() command = 'get' sqlsha1 = 'xxxxx' sql = f"inception get osc_percent '{sqlsha1}';" _query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[]) new_engine.osc_control(sqlsha1=sqlsha1, command=command) _query.assert_called_once_with(sql=sql) @patch('sql.engines.goinception.GoInceptionEngine.query') def test_osc_pause(self, _query): new_engine = GoInceptionEngine() command = 'pause' sqlsha1 = 'xxxxx' sql = f"inception {command} osc '{sqlsha1}';" _query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[]) new_engine.osc_control(sqlsha1=sqlsha1, command=command) _query.assert_called_once_with(sql=sql) @patch('sql.engines.goinception.GoInceptionEngine.query') def test_osc_resume(self, _query): new_engine = GoInceptionEngine() command = 'resume' sqlsha1 = 'xxxxx' sql = f"inception {command} osc '{sqlsha1}';" _query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[]) new_engine.osc_control(sqlsha1=sqlsha1, command=command) _query.assert_called_once_with(sql=sql) @patch('sql.engines.goinception.GoInceptionEngine.query') def test_osc_kill(self, _query): new_engine = GoInceptionEngine() command = 'kill' sqlsha1 = 'xxxxx' sql = f"inception kill osc '{sqlsha1}';" _query.return_value = ResultSet(full_sql=sql, rows=[], column_list=[]) new_engine.osc_control(sqlsha1=sqlsha1, command=command) _query.assert_called_once_with(sql=sql) @patch('sql.engines.goinception.GoInceptionEngine.query') def test_get_variables(self, _query): new_engine = GoInceptionEngine(instance=self.ins_inc) new_engine.get_variables() sql = f"inception get variables;" _query.assert_called_once_with(sql=sql) @patch('sql.engines.goinception.GoInceptionEngine.query') def test_get_variables_filter(self, _query): new_engine = GoInceptionEngine(instance=self.ins_inc) new_engine.get_variables(variables=['inception_osc_on']) sql = f"inception get variables like 'inception_osc_on';" _query.assert_called_once_with(sql=sql) @patch('sql.engines.goinception.GoInceptionEngine.query') def test_set_variable(self, _query): new_engine = GoInceptionEngine(instance=self.ins) new_engine.set_variable('inception_osc_on', 'on') _query.assert_called_once_with(sql="inception set inception_osc_on=on;") class TestOracle(TestCase): """Oracle 测试""" def setUp(self): self.ins = Instance.objects.create(instance_name='some_ins', type='slave', db_type='oracle', host='some_host', port=3306, user='ins_user', password='some_str', sid='some_id') self.wf = SqlWorkflow.objects.create( workflow_name='some_name', group_id=1, group_name='g1', engineer_display='', audit_auth_groups='some_group', create_time=datetime.now() - timedelta(days=1), status='workflow_finish', is_backup=True, instance=self.ins, db_name='some_db', syntax_type=1 ) SqlWorkflowContent.objects.create(workflow=self.wf) self.sys_config = SysConfig() def tearDown(self): self.ins.delete() self.sys_config.purge() SqlWorkflow.objects.all().delete() SqlWorkflowContent.objects.all().delete() @patch('cx_Oracle.makedsn') @patch('cx_Oracle.connect') def test_get_connection(self, _connect, _makedsn): # 填写 sid 测试 new_engine = OracleEngine(self.ins) new_engine.get_connection() _connect.assert_called_once() _makedsn.assert_called_once() # 填写 service_name 测试 _connect.reset_mock() _makedsn.reset_mock() self.ins.service_name = 'some_service' self.ins.sid = '' self.ins.save() new_engine = OracleEngine(self.ins) new_engine.get_connection() _connect.assert_called_once() _makedsn.assert_called_once() # 都不填写, 检测 ValueError _connect.reset_mock() _makedsn.reset_mock() self.ins.service_name = '' self.ins.sid = '' self.ins.save() new_engine = OracleEngine(self.ins) with self.assertRaises(ValueError): new_engine.get_connection() @patch('cx_Oracle.connect') def test_engine_base_info(self, _conn): new_engine = OracleEngine(instance=self.ins) self.assertEqual(new_engine.name, 'Oracle') self.assertEqual(new_engine.info, 'Oracle engine') _conn.return_value.version = '12.1.0.2.0' self.assertTupleEqual(new_engine.server_version, ('12', '1', '0')) @patch('cx_Oracle.connect.cursor.execute') @patch('cx_Oracle.connect.cursor') @patch('cx_Oracle.connect') def test_query(self, _conn, _cursor, _execute): _conn.return_value.cursor.return_value.fetchmany.return_value = [(1,)] new_engine = OracleEngine(instance=self.ins) query_result = new_engine.query(db_name='archery', sql='select 1', limit_num=100) self.assertIsInstance(query_result, ResultSet) self.assertListEqual(query_result.rows, [(1,)]) @patch('cx_Oracle.connect.cursor.execute') @patch('cx_Oracle.connect.cursor') @patch('cx_Oracle.connect') def test_query_not_limit(self, _conn, _cursor, _execute): _conn.return_value.cursor.return_value.fetchall.return_value = [(1,)] new_engine = OracleEngine(instance=self.ins) query_result = new_engine.query(db_name=0, sql='select 1', limit_num=0) self.assertIsInstance(query_result, ResultSet) self.assertListEqual(query_result.rows, [(1,)]) @patch('sql.engines.oracle.OracleEngine.query', return_value=ResultSet(rows=[('AUD_SYS',), ('archery',), ('ANONYMOUS',)])) def test_get_all_databases(self, _query): new_engine = OracleEngine(instance=self.ins) dbs = new_engine.get_all_databases() self.assertListEqual(dbs.rows, ['archery']) @patch('sql.engines.oracle.OracleEngine.query', return_value=ResultSet(rows=[('AUD_SYS',), ('archery',), ('ANONYMOUS',)])) def test__get_all_databases(self, _query): new_engine = OracleEngine(instance=self.ins) dbs = new_engine._get_all_databases() self.assertListEqual(dbs.rows, ['AUD_SYS', 'archery', 'ANONYMOUS']) @patch('sql.engines.oracle.OracleEngine.query', return_value=ResultSet(rows=[('archery',)])) def test__get_all_instances(self, _query): new_engine = OracleEngine(instance=self.ins) dbs = new_engine._get_all_instances() self.assertListEqual(dbs.rows, ['archery']) @patch('sql.engines.oracle.OracleEngine.query', return_value=ResultSet(rows=[('ANONYMOUS',), ('archery',), ('SYSTEM',)])) def test_get_all_schemas(self, _query): new_engine = OracleEngine(instance=self.ins) schemas = new_engine._get_all_schemas() self.assertListEqual(schemas.rows, ['archery']) @patch('sql.engines.oracle.OracleEngine.query', return_value=ResultSet(rows=[('test',), ('test2',)])) def test_get_all_tables(self, _query): new_engine = OracleEngine(instance=self.ins) tables = new_engine.get_all_tables(db_name='archery') self.assertListEqual(tables.rows, ['test2']) @patch('sql.engines.oracle.OracleEngine.query', return_value=ResultSet(rows=[('id',), ('name',)])) def test_get_all_columns_by_tb(self, _query): new_engine = OracleEngine(instance=self.ins) columns = new_engine.get_all_columns_by_tb(db_name='archery', tb_name='test2') self.assertListEqual(columns.rows, ['id', 'name']) @patch('sql.engines.oracle.OracleEngine.query', return_value=ResultSet(rows=[('archery',), ('template1',), ('template0',)])) def test_describe_table(self, _query): new_engine = OracleEngine(instance=self.ins) describe = new_engine.describe_table(db_name='archery', tb_name='text') self.assertIsInstance(describe, ResultSet) def test_query_check_disable_sql(self): sql = "update xxx set a=1;" new_engine = OracleEngine(instance=self.ins) check_result = new_engine.query_check(db_name='archery', sql=sql) self.assertDictEqual(check_result, {'msg': '不支持语法!', 'bad_query': True, 'filtered_sql': sql.strip(';'), 'has_star': False}) @patch('sql.engines.oracle.OracleEngine.explain_check', return_value={'msg': '', 'rows': 0}) def test_query_check_star_sql(self, _explain_check): sql = "select * from xx;" new_engine = OracleEngine(instance=self.ins) check_result = new_engine.query_check(db_name='archery', sql=sql) self.assertDictEqual(check_result, {'msg': '禁止使用 * 关键词\n', 'bad_query': False, 'filtered_sql': sql.strip(';'), 'has_star': True}) def test_query_check_IndexError(self): sql = "" new_engine = OracleEngine(instance=self.ins) check_result = new_engine.query_check(db_name='archery', sql=sql) self.assertDictEqual(check_result, {'msg': '没有有效的SQL语句', 'bad_query': True, 'filtered_sql': sql.strip(), 'has_star': False}) @patch('sql.engines.oracle.OracleEngine.explain_check', return_value={'msg': '', 'rows': 0}) def test_query_check_plus(self, _explain_check): sql = "select 100+1 from tb;" new_engine = OracleEngine(instance=self.ins) check_result = new_engine.query_check(db_name='archery', sql=sql) self.assertDictEqual(check_result, {'msg': '禁止使用 + 关键词\n', 'bad_query': True, 'filtered_sql': sql.strip(';'), 'has_star': False}) def test_filter_sql_with_delimiter(self): sql = "select * from xx;" new_engine = OracleEngine(instance=self.ins) check_result = new_engine.filter_sql(sql=sql, limit_num=100) self.assertEqual(check_result, "select sql_audit.* from (select * from xx) sql_audit where rownum <= 100") def test_filter_sql_with_delimiter_and_where(self): sql = "select * from xx where id>1;" new_engine = OracleEngine(instance=self.ins) check_result = new_engine.filter_sql(sql=sql, limit_num=100) self.assertEqual(check_result, "select sql_audit.* from (select * from xx where id>1) sql_audit where rownum <= 100") def test_filter_sql_without_delimiter(self): sql = "select * from xx;" new_engine = OracleEngine(instance=self.ins) check_result = new_engine.filter_sql(sql=sql, limit_num=100) self.assertEqual(check_result, "select sql_audit.* from (select * from xx) sql_audit where rownum <= 100") def test_filter_sql_with_limit(self): sql = "select * from xx limit 10;" new_engine = OracleEngine(instance=self.ins) check_result = new_engine.filter_sql(sql=sql, limit_num=1) self.assertEqual(check_result, "select sql_audit.* from (select * from xx limit 10) sql_audit where rownum <= 1") def test_query_masking(self): query_result = ResultSet() new_engine = OracleEngine(instance=self.ins) masking_result = new_engine.query_masking(schema_name='', sql='select 1', resultset=query_result) self.assertEqual(masking_result, query_result) def test_execute_check_select_sql(self): sql = 'select * from user;' row = ReviewResult(id=1, errlevel=2, stagestatus='驳回不支持语句', errormessage='仅支持DML和DDL语句,查询语句请使用SQL查询功能!', sql=sqlparse.format(sql, strip_comments=True, reindent=True, keyword_case='lower')) new_engine = OracleEngine(instance=self.ins) check_result = new_engine.execute_check(db_name='archery', sql=sql) self.assertIsInstance(check_result, ReviewSet) self.assertEqual(check_result.rows[0].__dict__, row.__dict__) def test_execute_check_critical_sql(self): self.sys_config.set('critical_ddl_regex', '^|update') self.sys_config.get_all_config() sql = 'update user set id=1' row = ReviewResult(id=1, errlevel=2, stagestatus='驳回高危SQL', errormessage='禁止提交匹配' + '^|update' + '条件的语句!', sql=sqlparse.format(sql, strip_comments=True, reindent=True, keyword_case='lower')) new_engine = OracleEngine(instance=self.ins) check_result = new_engine.execute_check(db_name='archery', sql=sql) self.assertIsInstance(check_result, ReviewSet) self.assertEqual(check_result.rows[0].__dict__, row.__dict__) @patch('sql.engines.oracle.OracleEngine.explain_check', return_value={'msg': '', 'rows': 0}) @patch('sql.engines.oracle.OracleEngine.get_sql_first_object_name', return_value='tb') @patch('sql.engines.oracle.OracleEngine.object_name_check', return_value=True) def test_execute_check_normal_sql(self, _explain_check, _get_sql_first_object_name, _object_name_check): self.sys_config.purge() sql = 'alter table tb set id=1' row = ReviewResult(id=1, errlevel=1, stagestatus='当前平台,此语法不支持审核!', errormessage='当前平台,此语法不支持审核!', sql=sqlparse.format(sql, strip_comments=True, reindent=True, keyword_case='lower'), affected_rows=0, execute_time=0, stmt_type='SQL', object_owner='', object_type='', object_name='', ) new_engine = OracleEngine(instance=self.ins) check_result = new_engine.execute_check(db_name='archery', sql=sql) self.assertIsInstance(check_result, ReviewSet) self.assertEqual(check_result.rows[0].__dict__, row.__dict__) @patch('cx_Oracle.connect.cursor.execute') @patch('cx_Oracle.connect.cursor') @patch('cx_Oracle.connect') def test_execute_workflow_success(self, _conn, _cursor, _execute): sql = 'update user set id=1' review_row = ReviewResult(id=1, errlevel=0, stagestatus='Execute Successfully', errormessage='None', sql=sql, affected_rows=0, execute_time=0, stmt_type='SQL', object_owner='', object_type='', object_name='', ) execute_row = ReviewResult(id=1, errlevel=0, stagestatus='Execute Successfully', errormessage='None', sql=sql, affected_rows=0, execute_time=0) wf = SqlWorkflow.objects.create( workflow_name='some_name', group_id=1, group_name='g1', engineer_display='', audit_auth_groups='some_group', create_time=datetime.now() - timedelta(days=1), status='workflow_finish', is_backup=True, instance=self.ins, db_name='some_db', syntax_type=1 ) SqlWorkflowContent.objects.create(workflow=wf, sql_content=sql, review_content=ReviewSet(rows=[review_row]).json()) new_engine = OracleEngine(instance=self.ins) execute_result = new_engine.execute_workflow(workflow=wf) self.assertIsInstance(execute_result, ReviewSet) self.assertEqual(execute_result.rows[0].__dict__.keys(), execute_row.__dict__.keys()) @patch('cx_Oracle.connect.cursor.execute') @patch('cx_Oracle.connect.cursor') @patch('cx_Oracle.connect', return_value=RuntimeError) def test_execute_workflow_exception(self, _conn, _cursor, _execute): sql = 'update user set id=1' row = ReviewResult(id=1, errlevel=2, stagestatus='Execute Failed', errormessage=f'异常信息:{f"Oracle命令执行报错,语句:{sql}"}', sql=sql, affected_rows=0, execute_time=0, stmt_type='SQL', object_owner='', object_type='', object_name='', ) wf = SqlWorkflow.objects.create( workflow_name='some_name', group_id=1, group_name='g1', engineer_display='', audit_auth_groups='some_group', create_time=datetime.now() - timedelta(days=1), status='workflow_finish', is_backup=True, instance=self.ins, db_name='some_db', syntax_type=1 ) SqlWorkflowContent.objects.create(workflow=wf, sql_content=sql, review_content=ReviewSet(rows=[row]).json()) with self.assertRaises(AttributeError): new_engine = OracleEngine(instance=self.ins) execute_result = new_engine.execute_workflow(workflow=wf) self.assertIsInstance(execute_result, ReviewSet) self.assertEqual(execute_result.rows[0].__dict__.keys(), row.__dict__.keys()) class MongoTest(TestCase): def setUp(self) -> None: self.ins = Instance.objects.create(instance_name='some_ins', type='slave', db_type='mongo', host='some_host', port=3306, user='ins_user') self.engine = MongoEngine(instance=self.ins) def tearDown(self) -> None: self.ins.delete() @patch('sql.engines.mongo.pymongo') def test_get_connection(self, mock_pymongo): _ = self.engine.get_connection() mock_pymongo.MongoClient.assert_called_once() @patch('sql.engines.mongo.MongoEngine.get_connection') def test_query(self, mock_get_connection): # TODO 正常查询还没做 test_sql = """{"collection": "job","count": true}""" self.assertIsInstance(self.engine.query('archery', test_sql), ResultSet) def test_query_check(self): test_sql = """{"collection": "job","count": true}""" check_result = self.engine.query_check(sql=test_sql) self.assertEqual(False, check_result.get('bad_query')) @patch('sql.engines.mongo.MongoEngine.get_connection') def test_get_all_databases(self, mock_get_connection): db_list = self.engine.get_all_databases() self.assertIsInstance(db_list, ResultSet) # mock_get_connection.return_value.list_database_names.assert_called_once() @patch('sql.engines.mongo.MongoEngine.get_connection') def test_get_all_tables(self, mock_get_connection): mock_db = Mock() # 下面是查表示例返回结果 mock_db.list_collection_names.return_value = ['u', 'v', 'w'] mock_get_connection.return_value = {'some_db': mock_db} table_list = self.engine.get_all_tables('some_db') mock_db.list_collection_names.assert_called_once() self.assertEqual(table_list.rows, ['u', 'v', 'w'])
object(MysqlEngine, 'query') def test_get_variables_not_filter(self, _query): new_engine = MysqlEngine(instance=self.ins1) new_engine.get_variables() _query.assert_called_once() @patch('MySQLdb.co
hash.py
import hashlib,time,sys def
(hashType,userHash,wordlist): if hashType == "md5" or hashType == "MD5": h = hashlib.md5 elif hashType == "sha1" or hashType == "SHA1": h = hashlib.sha1 elif hashType == "sha224" or hashType == "SHA224": h = hashlib.sha224 elif hashType == "sha256" or hashType == "SHA256": h = hashlib.sha256 elif hashType == "sha384" or hashType == "SHA384": h = hashlib.sha384 elif hashType == "sha512" or hashType == "SHA512": h = hashlib.sha512 else: return "Sorry!, We cant BruteForce %s , Please make sure you entered HashType correctly." % hashType exit() verbose = True start = time.time() with open(wordlist, "rU") as infile: for line in infile: line = line.strip() lineHash = h(line).hexdigest() if (verbose == True): sys.stdout.write('\r' + str(line) + ' ' * 20) sys.stdout.flush() if (str(lineHash) == str(userHash.lower())): end = time.time() return "\n\n[+] HASH IS CRACKED SUCCESSFUL : [ %s ]" % line return "[*] Time Taken: %s seconds" % round((end - start), 2) exit() end = time.time() return "\n\n[-]Cracking Failed" return "[*]Reached end of wordlist" return "[*] Time Taken: %s seconds" % round((end - start), 2) exit()
hash
os.rs
extern crate chicon; use std::io::prelude::*; use std::io::SeekFrom; use chicon::{FileSystem, OsFileSystem}; fn main()
{ let os_fs = OsFileSystem::new(); let mut cargo_file = os_fs.open_file("Cargo.toml").unwrap(); cargo_file.seek(SeekFrom::Start(1)).unwrap(); let mut buffer: String = String::new(); { cargo_file.read_to_string(&mut buffer).unwrap(); } println!("here {:?}", buffer); { cargo_file.read_to_string(&mut buffer).unwrap(); } println!("here {:?}", buffer); }
bars_advanced_stacked_multiple.js
/* ------------------------------------------------------------------------------ * * # D3.js - stacked and multiple bars * * Demo d3.js bar chart setup with animated transition between stacked and multiple bars * * Version: 1.0 * Latest update: August 1, 2015 * * ---------------------------------------------------------------------------- */ $(function () { // Create Uniform checkbox $(".stacked-multiple").uniform({ radioClass: 'choice' }); // Initialize chart stackedMultiples('#d3-bar-stacked-multiples', 400); // Chart setup function stackedMultiples(element, height) { // Basic setup // ------------------------------ // Define main variables var d3Container = d3.select(element), margin = {top: 5, right: 20, bottom: 20, left: 60}, width = d3Container.node().getBoundingClientRect().width - margin.left - margin.right, height = height - margin.top - margin.bottom - 5; // Format data var parseDate = d3.time.format("%Y-%m").parse, formatYear = d3.format("02d"), formatDate = function(d) { return "Q" + ((d.getMonth() / 3 | 0) + 1) + formatYear(d.getFullYear() % 100); }; // Construct scales // ------------------------------ // Horizontal var x = d3.scale.ordinal() .rangeRoundBands([0, width], .2); // Vertical var y = d3.scale.ordinal() .rangeRoundBands([height, 0]); var y0 = d3.scale.ordinal() .rangeRoundBands([height, 0]); var y1 = d3.scale.linear(); // Colors var color = d3.scale.category20(); // Create axes // ------------------------------ // Horizontal var xAxis = d3.svg.axis() .scale(x) .orient("bottom") .tickFormat(formatDate); // Vertical var yAxis = d3.svg.axis() .scale(y) .orient("left") .ticks(10, "%"); // Create chart // ------------------------------ // Add SVG element var container = d3Container.append("svg"); // Add SVG group var svg = container .attr("width", width + margin.left + margin.right) .attr("height", height + margin.top + margin.bottom) .append("g") .attr("transform", "translate(" + margin.left + "," + margin.top + ")"); // Construct chart layout // ------------------------------ // Nest var nest = d3.nest() .key(function(d) { return d.browser; }); // Stack var stack = d3.layout.stack() .values(function(d) { return d.values; }) .x(function(d) { return d.date; }) .y(function(d) { return d.value; }) .out(function(d, y0) { d.valueOffset = y0; }); // Load data // ------------------------------ d3.tsv("assets/demo_data/d3/bars/bars_stacked_multiple.tsv", function(error, data) { // Pull out values data.forEach(function(d) { d.date = parseDate(d.date); d.value = +d.value; }); // Nest values var dataByGroup = nest.entries(data); // Set input domains // ------------------------------ // Stack stack(dataByGroup); // Horizontal x.domain(dataByGroup[0].values.map(function(d) { return d.date; })); // Vertical y0.domain(dataByGroup.map(function(d) { return d.key; })); y1.domain([0, d3.max(data, function(d) { return d.value; })]).range([y0.rangeBand(), 0]); // // Append chart elements // // Add bars // ------------------------------ // Group bars var group = svg.selectAll(".d3-bar-group") .data(dataByGroup) .enter() .append("g") .attr("class", "d3-bar-group") .attr("transform", function(d) { return "translate(0," + y0(d.key) + ")"; }); // Append text group.append("text") .attr("class", "d3-group-label") .attr("x", -12) .attr("y", function(d) { return y1(d.values[0].value / 2); }) .attr("dy", ".35em") .style("text-anchor", "end") .text(function(d) { return d.key; }); // Add bars group.selectAll(".d3-bar") .data(function(d) { return d.values; }) .enter() .append("rect") .attr("class", "d3-bar") .attr("x", function(d) { return x(d.date); }) .attr("y", function(d) { return y1(d.value); }) .attr("width", x.rangeBand()) .attr("height", function(d) { return y0.rangeBand() - y1(d.value); }) .style("fill", function(d) { return color(d.browser); }); // Append axes // ------------------------------ // Horizontal group.filter(function(d, i) { return !i; }).append("g") .attr("class", "d3-axis d3-axis-horizontal d3-axis-strong") .attr("transform", "translate(0," + (y0.rangeBand() + 1) + ")") .call(xAxis); // Vertical var verticalAxis = svg.append("g") .attr("class", "d3-axis d3-axis-vertical d3-axis-strong") .call(yAxis); // Appent text label verticalAxis.append("text") .attr('class', 'browser-label') .attr("x", -12) .attr("y", 12) .attr("dy", ".71em") .style("text-anchor", "end") .style("fill", "#999") .style("font-size", 12) .text("Browser"); // Setup layout change // ------------------------------ // Add change event d3.selectAll(".stacked-multiple").on("change", change); // Change value on page load var timeout = setTimeout(function() { d3.select("input[value=\"stacked\"]").property("checked", true).each(change); $.uniform.update(); }, 2000); // Change function function change() { clearTimeout(timeout); if (this.value === "multiples") transitionMultiples(); else transitionStacked(); } // Transition to multiples function
() { var t = svg.transition().duration(750), g = t.selectAll(".d3-bar-group").attr("transform", function(d) { return "translate(0," + y0(d.key) + ")"; }); g.selectAll(".d3-bar").attr("y", function(d) { return y1(d.value); }); g.select(".d3-group-label").attr("y", function(d) { return y1(d.values[0].value / 2); }) } // Transition to stacked function transitionStacked() { var t = svg.transition().duration(750), g = t.selectAll(".d3-bar-group").attr("transform", "translate(0," + y0(y0.domain()[0]) + ")"); g.selectAll(".d3-bar").attr("y", function(d) { return y1(d.value + d.valueOffset) }); g.select(".d3-group-label").attr("y", function(d) { return y1(d.values[0].value / 2 + d.values[0].valueOffset); }) } }); // Resize chart // ------------------------------ // Call function on window resize $(window).on('resize', resize); // Call function on sidebar width change $('.sidebar-control').on('click', resize); // Resize function // // Since D3 doesn't support SVG resize by default, // we need to manually specify parts of the graph that need to // be updated on window resize function resize() { // Layout variables width = d3Container.node().getBoundingClientRect().width - margin.left - margin.right; // Layout // ------------------------- // Main svg width container.attr("width", width + margin.left + margin.right); // Width of appended group svg.attr("width", width + margin.left + margin.right); // Axes // ------------------------- // Horizontal range x.rangeRoundBands([0, width], .2); // Horizontal axis svg.selectAll('.d3-axis-horizontal').call(xAxis); // Chart elements // ------------------------- // Line path svg.selectAll('.d3-bar').attr("x", function(d) { return x(d.date); }).attr("width", x.rangeBand()); } } });
transitionMultiples
InvalidObjectId.ts
import BadRequest from '../../../errors/api/BadRequest'; export default class
extends BadRequest { public constructor() { super('Invalid object id'); } }
InvalidObjectId
test_forms.py
import pytest from ants.users.forms import UserCreationForm from ants.users.tests.factories import UserFactory pytestmark = pytest.mark.django_db class TestUserCreationForm: def test_clean_username(self): # A user with proto_user params does not exist yet.
proto_user = UserFactory.build() form = UserCreationForm( { "username": proto_user.username, "password1": proto_user._password, "password2": proto_user._password, } ) assert form.is_valid() assert form.clean_username() == proto_user.username # Creating a user. form.save() # The user with proto_user params already exists, # hence cannot be created. form = UserCreationForm( { "username": proto_user.username, "password1": proto_user._password, "password2": proto_user._password, } ) assert not form.is_valid() assert len(form.errors) == 1 assert "username" in form.errors
deprecated_label.go
/* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package jindofsx import ( "github.com/fluid-cloudnative/fluid/pkg/common/deprecated" apierrs "k8s.io/apimachinery/pkg/api/errors" )
} func (e *JindoFSxEngine) HasDeprecatedCommonLabelname() (deprecated bool, err error) { // return deprecated.LabelAnnotationStorageCapacityPrefix + e.namespace + "-" + e.name var ( workerName string = e.getWorkerName() namespace string = e.namespace ) // runtime, err := e.getRuntime() // if err != nil { // return // } workers, err := e.getDaemonset(workerName, namespace) if err != nil { if apierrs.IsNotFound(err) { e.Log.Info("Workers with deprecated label not found") deprecated = false err = nil return } e.Log.Error(err, "Failed to get worker", "workerName", workerName) return deprecated, err } nodeSelectors := workers.Spec.Template.Spec.NodeSelector e.Log.Info("The current node selectors for worker", "workerName", workerName, "nodeSelector", nodeSelectors) if _, deprecated = nodeSelectors[e.getDeprecatedCommonLabelname()]; deprecated { // e.Log.Info("the deprecated node selector exists", "nodeselector", e.getDeprecatedCommonLabelname()) } else { e.Log.Info("The deprecated node selector doesn't exist", "nodeselector", e.getDeprecatedCommonLabelname()) } return }
func (e *JindoFSxEngine) getDeprecatedCommonLabelname() string { return deprecated.LabelAnnotationStorageCapacityPrefix + e.namespace + "-" + e.name
plugins_test.go
/* Copyright 2014 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package algorithmprovider import ( "testing" "github.com/Microsoft/KubeGPU/kube-scheduler/pkg/factory" utilfeature "k8s.io/apiserver/pkg/util/feature" ) var ( algorithmProviderNames = []string{ factory.DefaultProvider, } ) func TestDefaultConfigExists(t *testing.T) { p, err := factory.GetAlgorithmProvider(factory.DefaultProvider) if err != nil { t.Errorf("error retrieving default provider: %v", err) } if p == nil { t.Error("algorithm provider config should not be nil") } if len(p.FitPredicateKeys) == 0 { t.Error("default algorithm provider shouldn't have 0 fit predicates") } } func TestAlgorithmProviders(t *testing.T) { for _, pn := range algorithmProviderNames { p, err := factory.GetAlgorithmProvider(pn) if err != nil { t.Errorf("error retrieving '%s' provider: %v", pn, err) break } if len(p.PriorityFunctionKeys) == 0 { t.Errorf("%s algorithm provider shouldn't have 0 priority functions", pn) } for _, pf := range p.PriorityFunctionKeys.List() { if !factory.IsPriorityFunctionRegistered(pf) { t.Errorf("priority function %s is not registered but is used in the %s algorithm provider", pf, pn) } } for _, fp := range p.FitPredicateKeys.List() { if !factory.IsFitPredicateRegistered(fp) { t.Errorf("fit predicate %s is not registered but is used in the %s algorithm provider", fp, pn) } } } } func TestApplyFeatureGates(t *testing.T) { for _, pn := range algorithmProviderNames { p, err := factory.GetAlgorithmProvider(pn) if err != nil { t.Errorf("Error retrieving '%s' provider: %v", pn, err) break } if !p.FitPredicateKeys.Has("CheckNodeCondition") { t.Errorf("Failed to find predicate: 'CheckNodeCondition'") break } if !p.FitPredicateKeys.Has("PodToleratesNodeTaints") { t.Errorf("Failed to find predicate: 'PodToleratesNodeTaints'") break } } // Apply features for algorithm providers. utilfeature.DefaultFeatureGate.Set("TaintNodesByCondition=True") ApplyFeatureGates() for _, pn := range algorithmProviderNames { p, err := factory.GetAlgorithmProvider(pn) if err != nil { t.Errorf("Error retrieving '%s' provider: %v", pn, err) break } if !p.FitPredicateKeys.Has("PodToleratesNodeTaints")
if p.FitPredicateKeys.Has("CheckNodeCondition") { t.Errorf("Unexpected predicate: 'CheckNodeCondition'") break } } }
{ t.Errorf("Failed to find predicate: 'PodToleratesNodeTaints'") break }
lib.rs
/////////////////////////////////////////////////////////////////////////////// // // Copyright 2018-2019 Airalab <[email protected]> // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // /////////////////////////////////////////////////////////////////////////////// ///! Substrate API in ROS namespace. use network::{specialization::NetworkSpecialization, NetworkService, ExHashT}; use transaction_pool::txpool::{ChainApi as PoolChainApi, Pool}; pub use substrate_rpc::system::helpers::SystemInfo; use runtime_primitives::traits::Block; use primitives::{Blake2Hasher, H256}; use rosrust::api::error::Error; use std::sync::Arc; use client::Client; pub mod traits; pub mod system; pub mod author; pub mod chain; pub mod state; pub fn start_rpc<B, S, H, F, E, P, A>( system_info: SystemInfo,
service_client: Arc<Client<F, E, <P as PoolChainApi>::Block, A>>, service_transaction_pool: Arc<Pool<P>> ) -> Result<(Vec<rosrust::Service>, impl Future<Output=()>), Error> where B: Block<Hash=H256>, S: NetworkSpecialization<B>, H: ExHashT, F: client::backend::Backend<<P as PoolChainApi>::Block, Blake2Hasher> + Send + Sync + 'static, E: client::CallExecutor<<P as PoolChainApi>::Block, Blake2Hasher> + Send + Sync + 'static, P: PoolChainApi<Hash=H256> + Sync + Send + 'static, P::Block: Block<Hash=H256>, P::Error: 'static, A: Send + Sync + 'static { let system = Arc::new(system::System::new( system_info, service_network, )); let author = Arc::new(author::Author::new( service_client.clone(), service_transaction_pool, )); let chain = Arc::new(chain::Chain::new( service_client.clone(), )); let state = Arc::new(state::State::new( service_client, )); let task = system.start_publishers()?; let services = [ system.start_services()?, author.start_services()?, state.start_services()?, chain.start_services()?, ].concat(); Ok((services, task)) }
service_network: Arc<NetworkService<B, S, H>>,
shell.rs
use diagram::ResponseOption; use diagram::*; /// クライアント1つにつき、1つのシェルを与えます。 /// 行単位です。 /// /// コマンド例 /// /// ``` /// cls /// cd C:\MuzudhoDrive\projects_rust\rust_kifuwarabe_shell /// cargo clippy /// ``` use diagram_player::*; use line_parser::*; use std::any::Any; // https://stackoverflow.com/questions/33687447/how-to-get-a-struct-reference-from-a-boxed-trait use std::io; /// 不具合を取りたいときに真にする。 // const VERBOSE: bool = false; pub const NEXT_EXIT_LABEL: &str = "#next"; /// デフォルトのラベル。 pub const NEWLINE_EXIT_LABEL: &str = "#newline"; pub const ELSE_NODE_LABEL: &str = "#else"; /// コマンドライン文字列。 /// /// # Members /// /// * `line` - コマンドライン文字列の1行全体です。 /// * `line_len` - コマンドライン文字列の1行全体の文字数です。 /// * `groups` - あれば、正規表現の結果を入れておく。 pub struct RequestStruct { pub line: Box<String>, // String型は長さが可変なので、固定長のBoxでラップする。 pub line_len: usize, pub caret: usize, pub groups: Vec<String>, } impl RequestStruct { fn new(line2: Box<String>) -> RequestStruct { let len = line2.chars().count(); RequestStruct { line: line2, line_len: len, caret: 0, groups: Vec::new(), } } } impl Request for RequestStruct { fn as_mut_any(&mut self) -> &mut dyn Any { self } fn get_line(&self) -> &String { &self.line } fn get_line_len(&self) -> usize { self.line_len } fn get_caret(&self) -> usize { self.caret } fn get_groups(&self) -> &Vec<String> { &self.groups } } /// キャレット。本来、文字列解析のカーソル位置だが、ほかの機能も持たされている。 /// - シェルを終了するなど、シェルに対して指示することができる。 /// - また、字句解析の次のノードを指定する役割も持つ。 /// /// # Members /// /// * `starts` - コマンドライン文字列の次のトークンの先頭位置です。 /// * `done_line` - 行の解析を中断するなら真にします。 /// * `option` - シェルに指示を出す。アプリケーション終了、ファイル再読込など。 /// * `exit_label` - 次のノード ラベルです。 pub struct ResponseStruct { pub caret: usize, pub done_line: bool, pub option: ResponseOption, pub exit_label: String, } impl ResponseStruct { fn new() -> ResponseStruct { ResponseStruct { caret: 0, done_line: false, option: ResponseOption::None, exit_label: "".to_string(), } } /// デフォルト値にリセット。 pub fn reset(&mut self) { self.set_caret(0); self.set_done_line(false); self.set_option(ResponseOption::None); self.forward(NEXT_EXIT_LABEL); // デフォルト値にリセット。 } } impl Response for ResponseStruct { fn as_any(&self) -> &dyn Any { self } /// トレイトを実装している方を返すのに使う。 fn as_mut_any(&mut self) -> &mut dyn Any { self } // .rs にハードコーディングして使う。 fn forward(&mut self, exit_label2: &'static str) { self.exit_label = exit_label2.to_string(); } fn set_caret(&mut self, caret2: usize) { self.caret = caret2 } fn set_done_line(&mut self, done_line2: bool) { self.done_line = done_line2 } fn set_option(&mut self, value: ResponseOption) { self.option = value; } } pub type Reader<T> = fn(t: &mut T) -> String; pub fn standard_input_reader<T>(_t: &mut T) -> String { let mut line_string = String::new(); // コマンド プロンプトからの入力があるまで待機します。 io::stdin() .read_line(&mut line_string) .expect("info Failed to read_line"); // OKでなかった場合のエラーメッセージ。 // 末尾の 改行 を除きます。前後の空白も消えます。 line_string.trim().parse().expect("info Failed to parse") } /// シェル。 /// /// # Arguments /// /// * `vec_row` - コマンドを複数行 溜めておくバッファーです。 pub struct Shell<T: 'static> { diagram_player: DiagramPlayer, vec_row: Vec<String>, reader: Reader<T>, } impl<T> Default for Shell<T> { fn default() -> Self { Self::new() } } impl<T: 'static> Shell<T> { pub fn new() -> Shell<T> { Shell { diagram_player: DiagramPlayer::new(), vec_row: Vec::new(), reader: standard_input_reader, } } pub fn enter(&mut self, diagram: &Diagram<T>){ self.diagram_player.enter(&diagram); } /// 状態遷移する。 /// req引数の要らないフォワード。 /// (パーサーのマッチングを省いて、強制的に指定のドアにフォワードする) pub fn forward_force(&mut self, diagram: &Diagram<T>, door_label: &str) { self.diagram_player.forward_force(diagram, door_label) } /// 状態遷移する。 /// req引数の要るフォワード。 /// パーサーのマッチングを用いて、フォワードする。 /// # Returns. /// 正規表現と一致すれば真。 pub fn forward_parse( &mut self, diagram: &Diagram<T>, req: &mut dyn Request, door_label: &str, ) -> bool { self.diagram_player.forward_parse(diagram, req, door_label) } /// 現在ノードのラベル。 pub fn get_current(&self) -> String { self.diagram_player.get_current().to_string() } /// 現在地が遷移図の外か。 pub fn is_out(&self) -> bool { self.diagram_player.is_out() } pub fn set_reader(&mut self, reader2: Reader<T>) { self.reader = reader2; } /// コマンドを1行も入力していなければ真を返します。 pub fn is_empty(&self) -> bool { self.vec_row.len() == 0 } /// コンソール入力以外の方法で、コマンド1行を追加したいときに使います。 /// 行の末尾に改行は付けないでください。 pub fn push_row(&mut self, row: &str) { self.vec_row.push(format!("{}\n", row)); } /// 先頭のコマンド1行をキューから削除して返します。 pub fn pop_row(&mut self) -> Box<String> { Box::new(self.vec_row.pop().unwrap()) } /// コマンドラインの入力受付、および コールバック関数呼出を行います。 /// スレッドはブロックします。 /// 強制終了する場合は、 [Ctrl]+[C] を入力してください。 pub fn run(&mut self, diagram: &mut Diagram<T>, t: &mut T) { loop { // リクエストは、キャレットを更新するのでミュータブル。 let mut req = if self.is_empty() { let line_string = (self.reader)(t); RequestStruct::new(Box::new(line_string)) } else { // バッファーの先頭行です。 RequestStruct::new(self.pop_row()) }; use diagram::ResponseOption::*; let res: &mut dyn Response = &mut ResponseStruct::new(); LineParser::run(&mut self.diagram_player, diagram, t, &mut req, res); if let Some(res_struct) = &mut res.as_mut_any().downcast_mut::<ResponseStruct>() { match res_struct.option { None => {} Quits => break, // response.quits したとき run ループを抜ける。 Reloads(ref file) => { // ファイルからグラフのノード構成を読取。 diagram.read_file(&file); } Saves(ref file) => { // ファイルを上書き。 diagram.write_file(&file); } } } else { panic!("Downcast fail."); } } } /// 1行 処理するだけでいいとき。 /// /// - Quits は無効になる。 /// /// # Arguments. /// /// * 'diagram' - パースの状態遷移図。 /// * 't' - 任意のオブジェクト。 /// * 'line' - コマンドライン文字列。 pub fn execute_line(&mut self, diagram: &mut Diagram<T>, t: &mut T, line: &str) { // リクエストは、キャレットを更新するのでミュータブル。 let mut req = RequestStruct::new(Box::new(line.to_string())); use diagram::ResponseOption::*; let res: &mut dyn Response = &mut ResponseStruct::new(); LineParser::run(&mut self.diagram_player, diagram, t, &mut req, res); if let Some(res_struct) = &mut res.as_mut_any().downcast_mut::<ResponseStruct>() { match res_struct.option { None => {} Quits => {} // ループの中ではないので無効。 Reloads(ref file) => { // ファイルからグラフのノード構成を読取。 diagram.read_file(&file); } Saves(ref file) => { // ファイルを上書き。 diagram.write_file(&file); } } } else { panic!("Downcast fail."); } } }
test_tagging.py
import pytest from testplan.common.utils.testing import check_report from testplan.report import TestReport, TestGroupReport, TestCaseReport from testplan.testing.multitest import MultiTest, testsuite, testcase @testsuite(tags={"color": ["red", "blue"]}) class AlphaSuite(object): @testcase def test_method_0(self, env, result): pass @testcase(tags=("foo", "bar")) def test_method_1(self, env, result): pass @testcase(tags={"color": "green"}) def test_method_2(self, env, result): pass @testsuite(tags={"color": "yellow"}) class BetaSuite(object): @testcase def test_method_0(self, env, result): pass @testcase(tags="foo") def test_method_1(self, env, result): pass @testcase(tags={"color": "red"}) def test_method_2(self, env, result): pass @testsuite class GammaSuite(object): @testcase def test_method_0(self, env, result): pass @testcase( parameters=("AAA", "BBB"), tag_func=lambda kwargs: {"symbol": kwargs["value"].lower()}, tags={"speed": "slow"}, ) def test_param(self, env, result, value): pass @testcase(parameters=("XXX", "YYY"), tags={"speed": "fast"}) def
(self, env, result, value): pass report_for_multitest_without_tags = TestGroupReport( name="MyMultitest", category="multitest", entries=[ TestGroupReport( name="AlphaSuite", category="testsuite", tags={"color": {"red", "blue"}}, entries=[ TestCaseReport(name="test_method_0"), TestCaseReport( name="test_method_1", tags={"simple": {"foo", "bar"}} ), TestCaseReport( name="test_method_2", tags={"color": {"green"}} ), ], ), TestGroupReport( name="BetaSuite", category="testsuite", tags={"color": {"yellow"}}, entries=[ TestCaseReport(name="test_method_0"), TestCaseReport(name="test_method_1", tags={"simple": {"foo"}}), TestCaseReport(name="test_method_2", tags={"color": {"red"}}), ], ), TestGroupReport( name="GammaSuite", category="testsuite", entries=[ TestCaseReport(name="test_method_0"), TestGroupReport( name="test_param", category="parametrization", tags={"speed": {"slow"}}, entries=[ TestCaseReport( name="test_param <value='AAA'>", tags={"symbol": {"aaa"}}, ), TestCaseReport( name="test_param <value='BBB'>", tags={"symbol": {"bbb"}}, ), ], ), TestGroupReport( name="test_param_2", category="parametrization", tags={"speed": {"fast"}}, entries=[ TestCaseReport(name="test_param_2 <value='XXX'>"), TestCaseReport(name="test_param_2 <value='YYY'>"), ], ), ], ), ], ) report_for_multitest_with_tags = TestGroupReport( name="MyMultitest", category="multitest", tags={"color": {"orange"}, "environment": {"server"}}, entries=[ TestGroupReport( name="AlphaSuite", category="testsuite", tags={"color": {"red", "blue"}}, entries=[ TestCaseReport(name="test_method_0"), TestCaseReport( name="test_method_1", tags={"simple": {"foo", "bar"}} ), TestCaseReport( name="test_method_2", tags={"color": {"green"}} ), ], ), TestGroupReport( name="BetaSuite", category="testsuite", tags={"color": {"yellow"}}, entries=[ TestCaseReport(name="test_method_0"), TestCaseReport(name="test_method_1", tags={"simple": {"foo"}}), TestCaseReport(name="test_method_2", tags={"color": {"red"}}), ], ), TestGroupReport( name="GammaSuite", category="testsuite", entries=[ TestCaseReport(name="test_method_0"), TestGroupReport( name="test_param", category="parametrization", tags={"speed": {"slow"}}, entries=[ TestCaseReport( name="test_param <value='AAA'>", tags={"symbol": {"aaa"}}, ), TestCaseReport( name="test_param <value='BBB'>", tags={"symbol": {"bbb"}}, ), ], ), TestGroupReport( name="test_param_2", category="parametrization", tags={"speed": {"fast"}}, entries=[ TestCaseReport(name="test_param_2 <value='XXX'>"), TestCaseReport(name="test_param_2 <value='YYY'>"), ], ), ], ), ], ) @pytest.mark.parametrize( "multitest_tags,expected_report", ( ({}, report_for_multitest_without_tags), ( {"color": "orange", "environment": "server"}, report_for_multitest_with_tags, ), ), ) def test_multitest_tagging(mockplan, multitest_tags, expected_report): multitest = MultiTest( name="MyMultitest", suites=[AlphaSuite(), BetaSuite(), GammaSuite()], tags=multitest_tags, ) mockplan.add(multitest) mockplan.run() check_report( expected=TestReport(name="plan", entries=[expected_report]), actual=mockplan.report, )
test_param_2
train.py
from model import DeepJIT import torch from tqdm import tqdm from utils import mini_batches_train, save import torch.nn as nn import os, datetime def
(data, params): data_pad_msg, data_pad_code, data_labels, dict_msg, dict_code = data # set up parameters params.cuda = (not params.no_cuda) and torch.cuda.is_available() del params.no_cuda params.filter_sizes = [int(k) for k in params.filter_sizes.split(',')] # params.save_dir = os.path.join(params.save_dir, datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')) params.vocab_msg, params.vocab_code = len(dict_msg), len(dict_code) if len(data_labels.shape) == 1: params.class_num = 1 else: params.class_num = data_labels.shape[1] params.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') # create and train the defect model model = DeepJIT(args=params) if torch.cuda.is_available(): model = model.cuda() optimizer = torch.optim.Adam(model.parameters(), lr=params.l2_reg_lambda) criterion = nn.BCELoss() for epoch in range(1, params.num_epochs + 1): total_loss = 0 # building batches for training model batches = mini_batches_train(X_msg=data_pad_msg, X_code=data_pad_code, Y=data_labels, mini_batch_size=params.batch_size) for i, (batch) in enumerate(tqdm(batches)): pad_msg, pad_code, labels = batch if torch.cuda.is_available(): pad_msg, pad_code, labels = torch.tensor(pad_msg).cuda(), torch.tensor( pad_code).cuda(), torch.cuda.FloatTensor(labels) else: pad_msg, pad_code, labels = torch.tensor(pad_msg).long(), torch.tensor(pad_code).long(), torch.tensor( labels).float() optimizer.zero_grad() predict = model.forward(pad_msg, pad_code) loss = criterion(predict, labels) total_loss += loss loss.backward() optimizer.step() print('Epoch %i / %i -- Total loss: %f' % (epoch, params.num_epochs, total_loss)) save(model, params.save_dir, 'epoch', epoch)
train_model
cmp.rs
use std::convert::TryFrom; /// Possible comparison operators #[derive(Debug, PartialEq, Copy, Clone)] #[allow(clippy::upper_case_acronyms)] // these look weird when not capitalized pub enum Operator { Equal, NotEqual, GT, GTE, LT, LTE, } impl TryFrom<&str> for Operator { type Error = String; fn try_from(value: &str) -> Result<Self, Self::Error> { match value { "=" => Ok(Self::Equal), "!=" => Ok(Self::NotEqual), ">" => Ok(Self::GT), ">=" => Ok(Self::GTE), "<" => Ok(Self::LT), "<=" => Ok(Self::LTE), v => Err(format!("unknown operator {:?}", v)), } } } impl TryFrom<&arrow_deps::datafusion::logical_plan::Operator> for Operator { type Error = String; fn try_from(op: &arrow_deps::datafusion::logical_plan::Operator) -> Result<Self, Self::Error> { match op { arrow_deps::datafusion::logical_plan::Operator::Eq => Ok(Self::Equal), arrow_deps::datafusion::logical_plan::Operator::NotEq => Ok(Self::NotEqual),
arrow_deps::datafusion::logical_plan::Operator::LtEq => Ok(Self::LTE), arrow_deps::datafusion::logical_plan::Operator::Gt => Ok(Self::GT), arrow_deps::datafusion::logical_plan::Operator::GtEq => Ok(Self::GTE), v => Err(format!("unsupported operator {:?}", v)), } } }
arrow_deps::datafusion::logical_plan::Operator::Lt => Ok(Self::LT),
util.py
# Copyright 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import contextlib import os import platform import re import tempfile import shutil import subprocess import sys import urllib import urlparse # Copied from pip.wheel.Wheel.wheel_file_re to avoid requiring pip here. WHEEL_FILE_RE = re.compile( r"""^(?P<namever>(?P<name>.+?)-(?P<ver>\d.*?)) ((-(?P<build>\d.*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?) \.whl)$""", re.VERBOSE ) class GlycoError(Exception): """Base class for Glyco errors""" class GlycoSetupError(GlycoError): """Issue outside the reach of Glyco that prevents execution.""" class InvalidWheelFile(GlycoError): """The file passed is not a valid wheel file. This includes errors on the file name. """ def setup_virtualenv(env_path, relocatable=False): """Create a virtualenv in specified location. The virtualenv contains a standard Python installation, plus setuptools, pip and wheel. Args: env_path (str): where to create the virtual environment. """ if os.path.exists(os.path.join(os.path.expanduser('~'), '.pydistutils.cfg')): raise GlycoSetupError('\n'.join([ '', 'You have a ~/.pydistutils.cfg file, which interferes with the ', 'infra virtualenv environment. Please move it to the side and bootstrap ', 'again. Once infra has bootstrapped, you may move it back.', '', 'Upstream bug: https://github.com/pypa/virtualenv/issues/88/', '' ])) print 'Creating environment: %r' % env_path if os.path.exists(env_path): print ' Removing existing one...' shutil.rmtree(env_path, ignore_errors=True) print ' Building new environment...' # Import bundled virtualenv lib import virtualenv # pylint: disable=F0401 virtualenv.create_environment( env_path, search_dirs=virtualenv.file_search_dirs()) if relocatable: print ' Make environment relocatable' virtualenv.make_environment_relocatable(env_path) print 'Done creating environment' def platform_tag(): if sys.platform.startswith('linux'): return '_{0}_{1}'.format(*platform.linux_distribution()) return '' class Virtualenv(object): def __init__(self, prefix='glyco-', keep_directory=False): """Helper class to run commands from virtual environments. Keyword Args: prefix (str): prefix to the temporary directory used to create the virtualenv. keep_directory (boolean): if True the temporary virtualenv directory is kept around instead of being deleted. Useful mainly for debugging. Returns: self. Only the check_call and check_output methods are meant to be used inside the with block. """ self._prefix = prefix self._keep_directory = keep_directory # Where the virtualenv is self._venvdir = None self._bin_dir = 'Scripts' if sys.platform.startswith('win') else 'bin' def check_call(self, args, **kwargs): """Run a command from inside the virtualenv using check_call. Args: cmd (str): name of the command. Must be found in the 'bin' directory of the virtualenv. args (list of strings): arguments passed to the command. Keyword Args: kwargs: keyword arguments passed to subprocess.check_output """ subprocess.check_call( (os.path.join(self._venvdir, self._bin_dir, args[0]),) + tuple(args[1:]), **kwargs) def check_output(self, args, **kwargs): """Run a command from inside the virtualenv using check_output. Args: cmd (str): name of the command. Must be found in the 'bin' directory of the virtualenv. args (list of strings): arguments passed to the command. Keyword Args: kwargs: keyword arguments passed to subprocess.check_output """ return subprocess.check_output( (os.path.join(self._venvdir, self._bin_dir, args[0]),) + tuple(args[1:]), **kwargs) def __cleanup_venv(self): """Remove the virtualenv directory""" try: # TODO(pgervais,496347) Make this work reliably on Windows. shutil.rmtree(self._venvdir, ignore_errors=True) except OSError as ex: print >> sys.stderr, ( "ERROR: {!r} while cleaning up {!r}".format(ex, self._venvdir)) self._venvdir = None def __enter__(self): self._venvdir = tempfile.mkdtemp('', self._prefix, None) try: setup_virtualenv(self._venvdir) except Exception: self.__cleanup_venv() raise return self
if self._venvdir and not self._keep_directory: self.__cleanup_venv() # dir is a built-in. We're matching the Python 3 function signature here. # pylint: disable=redefined-builtin @contextlib.contextmanager def temporary_directory(suffix="", prefix="tmp", dir=None, keep_directory=False): """Create and return a temporary directory. This has the same behavior as mkdtemp but can be used as a context manager. For example: with temporary_directory() as tmpdir: ... Upon exiting the context, the directory and everything contained in it are removed. Args: suffix, prefix, dir: same arguments as for tempfile.mkdtemp. keep_directory (bool): if True, do not delete the temporary directory when exiting. Useful for debugging. Returns: tempdir (str): full path to the temporary directory. """ tempdir = None # Handle mkdtemp raising an exception try: tempdir = tempfile.mkdtemp(suffix, prefix, dir) yield tempdir finally: if tempdir and not keep_directory: try: # TODO(pgervais,496347) Make this work reliably on Windows. shutil.rmtree(tempdir, ignore_errors=True) except OSError as ex: print >> sys.stderr, ( "ERROR: {!r} while cleaning up {!r}".format(ex, tempdir)) def path2fileurl(path): """Convert a local absolute path to a file:/// URL There is no way to provide a relative path in a file:// URI, because there is no notion of 'working directory'. Output conforms to https://tools.ietf.org/html/rfc1630 """ if not os.path.isabs(path): raise ValueError('Only absolute paths can be turned into a file url. ' 'Got: %s' % path) path_comp = urllib.pathname2url(path) return 'file:///' + path_comp.lstrip('/') def fileurl2path(url): """Convert a file:// URL to a local path. Note that per https://tools.ietf.org/html/rfc1630 page 18 a host name should be provided. So file://localhost/file/name points to /file/name on localhost file:///file/name points to /file/name ('localhost' is optional) file://file/name points to /name on machine 'file'. """ if not url.startswith('file://'): raise ValueError('URL must start with "file://". Got %s' % url) parts = urlparse.urlparse(url) return urllib.url2pathname(parts.path)
def __exit__(self, err_type, value, tb):
tasks.controller.ts
import { Body, Controller, Delete, Get, HttpCode, HttpStatus, Param, Post, Query, UseGuards, } from '@nestjs/common'; import { AuthGuard } from '@nestjs/passport'; import { ApiBadRequestResponse, ApiBearerAuth, ApiBody, ApiCreatedResponse, ApiHeader, ApiNoContentResponse, ApiNotFoundResponse, ApiOkResponse, ApiQuery, ApiTags, ApiUnauthorizedResponse, } from '@nestjs/swagger'; import { GetUser } from 'src/auth/get-user.decorator'; import { User } from 'src/users/user.entity'; import { CreateTaskDto } from './dto/create-task.dto'; import { GetTaskDto } from './dto/get-task-dto'; import { GetTasksDto } from './dto/get-tasks.dto'; import { Task } from './task.entity'; import { TasksService } from './tasks.service'; // Doc @ApiTags('Tasks') @ApiBearerAuth() @ApiUnauthorizedResponse({ description: 'Unauthorized request' }) // Endpoint @Controller('tasks') @UseGuards(AuthGuard()) export class TasksController { constructor(private taskServices: TasksService) {} // Doc @ApiOkResponse({ description: 'Successful operation' }) @ApiBadRequestResponse({ description: 'Invalid uuid' }) @ApiNotFoundResponse({ description: 'Task not found' }) // Method @Get(':id') getTaskById( @Param() getTaskDto: GetTaskDto, @GetUser() user: User, ): Promise<Task> {
// Doc @ApiOkResponse({ description: 'Successful operation' }) @ApiBadRequestResponse({ description: 'Invalid query input' }) @ApiNotFoundResponse({ description: 'Tasks not found' }) // Method @Get() getTasks( @Query() getTasksDto: GetTasksDto, @GetUser() user: User, ): Promise<Task[]> { return this.taskServices.getUserAllTasks(getTasksDto, user); } // Doc @ApiCreatedResponse({ description: 'Successful operation' }) @ApiBadRequestResponse({ description: 'Invalid input' }) // Method @Post() createTask( @Body() createTaskDto: CreateTaskDto, @GetUser() user: User, ): Promise<Task> { return this.taskServices.createTask(createTaskDto, user); } // Doc @ApiNoContentResponse({ description: 'Successful operation' }) @ApiBadRequestResponse({ description: 'Invalid uuid' }) @ApiNotFoundResponse({ description: 'Not found' }) // Method @Delete(':id') @HttpCode(HttpStatus.NO_CONTENT) deleteTask( @Param() getTaskDto: GetTaskDto, @GetUser() user: User, ): Promise<void> { return this.taskServices.deleteUserTask(getTaskDto, user); } }
return this.taskServices.getUserTask(getTaskDto, user); }
format.rs
use std::fmt; use std::cell::RefCell; /// Format all iterator elements lazily, separated by `sep`. /// /// The format value can only be formatted once, after that the iterator is /// exhausted. /// /// See [`.format_with()`](../trait.Itertools.html#method.format_with) for more information. #[derive(Clone)] pub struct FormatWith<'a, I, F> { sep: &'a str, /// FormatWith uses interior mutability because Display::fmt takes &self. inner: RefCell<Option<(I, F)>>, } /// Format all iterator elements lazily, separated by `sep`. /// /// The format value can only be formatted once, after that the iterator is /// exhausted. /// /// See [`.format()`](../trait.Itertools.html#method.format) /// for more information. #[derive(Clone)] pub struct Format<'a, I> { sep: &'a str, /// Format uses interior mutability because Display::fmt takes &self. inner: RefCell<Option<I>>, } pub fn
<'a, I, F>(iter: I, separator: &'a str, f: F) -> FormatWith<'a, I, F> where I: Iterator, F: FnMut(I::Item, &mut dyn FnMut(&dyn fmt::Display) -> fmt::Result) -> fmt::Result { FormatWith { sep: separator, inner: RefCell::new(Some((iter, f))), } } pub fn new_format_default<'a, I>(iter: I, separator: &'a str) -> Format<'a, I> where I: Iterator, { Format { sep: separator, inner: RefCell::new(Some(iter)), } } impl<'a, I, F> fmt::Display for FormatWith<'a, I, F> where I: Iterator, F: FnMut(I::Item, &mut dyn FnMut(&dyn fmt::Display) -> fmt::Result) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let (mut iter, mut format) = match self.inner.borrow_mut().take() { Some(t) => t, None => panic!("FormatWith: was already formatted once"), }; if let Some(fst) = iter.next() { format(fst, &mut |disp: &dyn fmt::Display| disp.fmt(f))?; for elt in iter { if self.sep.len() > 0 { f.write_str(self.sep)?; } format(elt, &mut |disp: &dyn fmt::Display| disp.fmt(f))?; } } Ok(()) } } impl<'a, I> Format<'a, I> where I: Iterator, { fn format<F>(&self, f: &mut fmt::Formatter, mut cb: F) -> fmt::Result where F: FnMut(&I::Item, &mut fmt::Formatter) -> fmt::Result, { let mut iter = match self.inner.borrow_mut().take() { Some(t) => t, None => panic!("Format: was already formatted once"), }; if let Some(fst) = iter.next() { cb(&fst, f)?; for elt in iter { if self.sep.len() > 0 { f.write_str(self.sep)?; } cb(&elt, f)?; } } Ok(()) } } macro_rules! impl_format { ($($fmt_trait:ident)*) => { $( impl<'a, I> fmt::$fmt_trait for Format<'a, I> where I: Iterator, I::Item: fmt::$fmt_trait, { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.format(f, fmt::$fmt_trait::fmt) } } )* } } impl_format!{Display Debug UpperExp LowerExp UpperHex LowerHex Octal Binary Pointer}
new_format
helper.go
package helper import ( "path/filepath" "runtime" ) // currentProjectRootPath is the root path of current project src var currentProjectRootPath string // JoinWithProjectAbsPath return the path's full-path under project root func
(path string) string { return filepath.Join(currentProjectRootPath, path) } func currentFilePath() string { _, file, _, _ := runtime.Caller(1) return file } func init() { path := currentFilePath() var err error currentProjectRootPath, err = filepath.Abs(filepath.Join(filepath.Dir(path), "../..")) if err != nil { panic(err) } }
JoinWithProjectAbsPath
index.ts
export { event } from './event' export { variable } from './variable' export { asyncVariable } from './asyncVariable' export { filter } from './filter'
export { map } from './map' export { mutex } from './mutex' export { toAsync } from './toAsync' export { Emit, Subscribe, Variable, AsyncEmit, AsyncVariable, Lock } from './types'
lib.rs
#![allow(non_upper_case_globals)] #![allow(non_camel_case_types)] #![allow(non_snake_case)] #![allow(clippy::unreadable_literal)] include!(concat!(env!("OUT_DIR"), "/flutter-engine-sys.rs")); #[cfg(target_os = "android")] #[link(name = "flutter_engine")] extern "C" {} #[cfg(target_os = "ios")] #[link(name = "flutter_engine")] extern "C" {} #[cfg(target_os = "linux")] #[link(name = "flutter_engine")] extern "C" {} #[cfg(target_os = "macos")] #[link(name = "flutter_engine")] extern "C" {} #[cfg(target_os = "windows")] #[link(name = "flutter_engine.dll")] extern "C" {} #[cfg(test)] mod tests { #[allow(unused)] use super::*; use libloading::Library; #[cfg(target_os = "linux")] const LIB: &str = "libflutter_engine.so"; #[cfg(target_os = "macos")] const LIB: &str = "libflutter_engine.dylib"; #[cfg(target_os = "windows")] const LIB: &str = "flutter_engine.dll.lib"; #[test] fn
() { let lib = Library::new(LIB).unwrap(); unsafe { lib.get::<*const ()>(b"gIcudtlData\0").unwrap(); lib.get::<*const ()>(b"gIcudtlEnd\0").unwrap(); lib.get::<*const ()>(b"gIcudtlSize\0").unwrap(); } } }
link
train-text2mel.py
#!/usr/bin/env python """Train the Text2Mel network. See: https://arxiv.org/abs/1710.08969""" __author__ = 'Erdene-Ochir Tuguldur' import sys import time import argparse from tqdm import * import numpy as np import torch
from models import Text2Mel from hyperparams import HParams as hp from logger import Logger from utils import get_last_checkpoint_file_name, load_checkpoint, save_checkpoint, load_checkpoint_test from datasets.data_loader import Text2MelDataLoader parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument("--dataset", required=True, choices=['ljspeech', 'mbspeech','emovdb'], help='dataset name') args = parser.parse_args() if args.dataset == 'ljspeech': from datasets.lj_speech import vocab, LJSpeech as SpeechDataset elif args.dataset == 'emovdb': from datasets.emovdb import vocab, Emovdb as SpeechDataset else: from datasets.mb_speech import vocab, MBSpeech as SpeechDataset use_gpu = torch.cuda.is_available() print('use_gpu', use_gpu) if use_gpu: torch.backends.cudnn.benchmark = True train_data_loader = Text2MelDataLoader(text2mel_dataset=SpeechDataset(['texts', 'mels', 'mel_gates']), batch_size=64, mode='train') valid_data_loader = Text2MelDataLoader(text2mel_dataset=SpeechDataset(['texts', 'mels', 'mel_gates']), batch_size=64, mode='valid') text2mel = Text2Mel(vocab).cpu() start_timestamp = int(time.time() * 1000) start_epoch = 0 global_step = 0 logger = Logger(args.dataset, 'text2mel') # load the last checkpoint if exists last_checkpoint_file_name = get_last_checkpoint_file_name(logger.logdir) if last_checkpoint_file_name: print("loading the last checkpoint: %s" % last_checkpoint_file_name) start_epoch, global_step = load_checkpoint(last_checkpoint_file_name, text2mel, None) optimizer = torch.optim.Adam(text2mel.parameters(), lr=hp.text2mel_lr) def get_lr(): return optimizer.param_groups[0]['lr'] def lr_decay(step, warmup_steps=4000): new_lr = hp.text2mel_lr * warmup_steps ** 0.5 * min((step + 1) * warmup_steps ** -1.5, (step + 1) ** -0.5) optimizer.param_groups[0]['lr'] = new_lr def train(train_epoch, phase='train'): global global_step lr_decay(global_step) print("epoch %3d with lr=%.02e" % (train_epoch, get_lr())) text2mel.train() if phase == 'train' else text2mel.eval() torch.set_grad_enabled(True) if phase == 'train' else torch.set_grad_enabled(False) data_loader = train_data_loader if phase == 'train' else valid_data_loader it = 0 running_loss = 0.0 running_l1_loss = 0.0 running_att_loss = 0.0 pbar = tqdm(data_loader, unit="audios", unit_scale=data_loader.batch_size, disable=hp.disable_progress_bar) for batch in pbar: L, S, gates = batch['texts'], batch['mels'], batch['mel_gates'] S = S.permute(0, 2, 1) # TODO: because of pre processing B, N = L.size() # batch size and text count _, n_mels, T = S.size() # number of melspectrogram bins and time assert gates.size(0) == B # TODO: later remove assert gates.size(1) == T S_shifted = torch.cat((S[:, :, 1:], torch.zeros(B, n_mels, 1)), 2) S.requires_grad = False S_shifted.requires_grad = False gates.requires_grad = False def W_nt(_, n, t, g=0.2): return 1.0 - np.exp(-((n / float(N) - t / float(T)) ** 2) / (2 * g ** 2)) W = np.fromfunction(W_nt, (B, N, T), dtype=np.float32) W = torch.from_numpy(W) L = L.cpu() S = S.cpu() S_shifted = S_shifted.cpu() W = W.cpu() gates = gates.cpu() Y_logit, Y, A = text2mel(L, S, monotonic_attention=True) l1_loss = F.l1_loss(Y, S_shifted) masks = gates.reshape(B, 1, T).float() att_loss = (A * W * masks).mean() loss = l1_loss + att_loss if phase == 'train': lr_decay(global_step) optimizer.zero_grad() loss.backward() optimizer.step() global_step += 1 it += 1 loss, l1_loss, att_loss = loss.item(), l1_loss.item(), att_loss.item() running_loss += loss running_l1_loss += l1_loss running_att_loss += att_loss if phase == 'train': # update the progress bar pbar.set_postfix({ 'l1': "%.05f" % (running_l1_loss / it), 'att': "%.05f" % (running_att_loss / it) }) logger.log_step(phase, global_step, {'loss_l1': l1_loss, 'loss_att': att_loss}, {'mels-true': S[:1, :, :], 'mels-pred': Y[:1, :, :], 'attention': A[:1, :, :]}) if global_step % 1000 == 0: # checkpoint at every 1000th step save_checkpoint(logger.logdir, train_epoch, global_step, text2mel, optimizer) epoch_loss = running_loss / it epoch_l1_loss = running_l1_loss / it epoch_att_loss = running_att_loss / it logger.log_epoch(phase, global_step, {'loss_l1': epoch_l1_loss, 'loss_att': epoch_att_loss}) return epoch_loss since = time.time() epoch = start_epoch while True: train_epoch_loss = train(epoch, phase='train') time_elapsed = time.time() - since time_str = 'total time elapsed: {:.0f}h {:.0f}m {:.0f}s '.format(time_elapsed // 3600, time_elapsed % 3600 // 60, time_elapsed % 60) print("train epoch loss %f, step=%d, %s" % (train_epoch_loss, global_step, time_str)) valid_epoch_loss = train(epoch, phase='valid') print("valid epoch loss %f" % valid_epoch_loss) epoch += 1 if global_step >= hp.text2mel_max_iteration: print("max step %d (current step %d) reached, exiting..." % (hp.text2mel_max_iteration, global_step)) sys.exit(0)
import torch.nn.functional as F # project imports
Inputs.js
import {Mapper, Chain} from 'graflow' import Message from './Message' const Inputs = () => Chain( Mapper(v => Object.entries(v).map(([name, value]) =>
export default Inputs
Message('events', 'event', [['in', name, value]])) ) )
data.rs
// This short example provides the utility to inspect // wasm file data section. extern crate parity_wasm; use std::env; fn main()
{ // Example executable takes one argument which must // refernce the existing file with a valid wasm module let args = env::args().collect::<Vec<_>>(); if args.len() != 2 { println!("Usage: {} somefile.wasm", args[0]); return } // Here we load module using dedicated for this purpose // `deserialize_file` function (which works only with modules) let module = parity_wasm::deserialize_file(&args[1]).expect("Failed to load module"); // We query module for data section. Note that not every valid // wasm module must contain a data section. So in case the provided // module does not contain data section, we panic with an error let data_section = module.data_section().expect("no data section in module"); // Printing the total count of data segments println!("Data segments: {}", data_section.entries().len()); for (index, entry) in data_section.entries().iter().enumerate() { // Printing the details info of each data segment // see `elements::DataSegment` for more properties // you can query println!(" Entry #{}", index); // This shows the initialization member of data segment // (expression which must resolve in the linear memory location). if let Some(offset) = entry.offset() { println!(" init: {}", offset.code()[0]); } // This shows the total length of the data segment in bytes. println!(" size: {}", entry.value().len()); } }
controlPanel.ts
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import { QueryFormMetric, smartDateFormatter, t, validateNonEmpty, } from 'src/core'; import { ControlPanelConfig, D3_TIME_FORMAT_OPTIONS, formatSelectOptions, sections, sharedControls, emitFilterControl, legacySortBy, } from 'src/chartConntrols'; import { MetricsLayoutEnum } from '../types'; const percentageDifference = { name: 'percentageDifference', config: { type: 'CheckboxControl', label: t('Percentage difference'), renderTrigger: true, default: false, description: t('Calculating the expected difference of percentage data'), }, }; const config: ControlPanelConfig = { controlPanelSections: [ { ...sections.legacyTimeseriesTime, expanded: false }, { label: t('Query'), expanded: true, controlSetRows: [ [percentageDifference], [ { name: 'groupbyRows', config: { ...sharedControls.groupby, label: t('Rows'), description: t('Columns to group by on the rows'), }, }, ], [ { name: 'groupbyColumns', config: { ...sharedControls.groupby, label: t('Columns'), description: t('Columns to group by on the columns'), }, }, ], [ { name: 'metrics', config: { ...sharedControls.metrics, validators: [validateNonEmpty], }, }, ], [ { name: 'metricsLayout', config: { type: 'RadioButtonControl', renderTrigger: true, label: t('Apply metrics on'), default: MetricsLayoutEnum.COLUMNS, options: [ [MetricsLayoutEnum.COLUMNS, t('Columns')], [MetricsLayoutEnum.ROWS, t('Rows')], ], description: t( 'Use metrics as a top level group for columns or for rows', ), }, }, ], ['adhoc_filters'], emitFilterControl, [ { name: 'row_limit', config: { ...sharedControls.row_limit, }, }, ], ...legacySortBy, ], }, { label: t('Options'), expanded: true, tabOverride: 'data', controlSetRows: [ [ { name: 'aggregateFunction', config: { type: 'SelectControl', label: t('Aggregation function'), clearable: false, choices: formatSelectOptions([ 'Count', 'Count Unique Values', 'List Unique Values', 'Sum', 'Average', 'Median', 'Sample Variance', 'Sample Standard Deviation', 'Minimum', 'Maximum', 'First', 'Last', 'Sum as Fraction of Total', 'Sum as Fraction of Rows', 'Sum as Fraction of Columns', 'Count as Fraction of Total', 'Count as Fraction of Rows', 'Count as Fraction of Columns', ]), default: 'Sum', description: t( 'Aggregate function to apply when pivoting and computing the total rows and columns', ), renderTrigger: true, }, }, ], [ { name: 'rowTotals', config: { type: 'CheckboxControl', label: t('Show rows total'), default: false, renderTrigger: true, description: t('Display row level total'), }, }, ], [ { name: 'colTotals', config: { type: 'CheckboxControl', label: t('Show columns total'), default: false, renderTrigger: true, description: t('Display column level total'), }, }, ], [ { name: 'transposePivot', config: { type: 'CheckboxControl', label: t('Transpose pivot'), default: false, description: t('Swap rows and columns'), renderTrigger: true, }, }, ], [ { name: 'combineMetric', config: { type: 'CheckboxControl', label: t('Combine metrics'), default: false, description: t( 'Display metrics side by side within each column, as ' + 'opposed to each column being displayed side by side for each metric.', ), renderTrigger: true, }, }, ], ], }, { label: t('Chart Options'), expanded: true, controlSetRows: [ [ { name: 'valueFormat', config: { ...sharedControls.yAxisFormat, label: t('Value format'), }, }, ], [ {
config: { type: 'SelectControl', freeForm: true, label: t('Date format'), default: smartDateFormatter.id, renderTrigger: true, choices: D3_TIME_FORMAT_OPTIONS, description: t('D3 time format for datetime columns'), }, }, ], [ { name: 'rowOrder', config: { type: 'SelectControl', label: t('Rows sort by'), default: 'key_a_to_z', choices: [ // [value, label] ['key_a_to_z', t('key a-z')], ['key_z_to_a', t('key z-a')], ['value_a_to_z', t('value ascending')], ['value_z_to_a', t('value descending')], ], renderTrigger: true, description: t('Order of rows'), }, }, ], [ { name: 'colOrder', config: { type: 'SelectControl', label: t('Cols sort by'), default: 'key_a_to_z', choices: [ // [value, label] ['key_a_to_z', t('key a-z')], ['key_z_to_a', t('key z-a')], ['value_a_to_z', t('value ascending')], ['value_z_to_a', t('value descending')], ], renderTrigger: true, description: t('Order of columns'), }, }, ], [ { name: 'rowSubtotalPosition', config: { type: 'SelectControl', label: t('Rows subtotal position'), default: false, choices: [ // [value, label] [true, t('Top')], [false, t('Bottom')], ], renderTrigger: true, description: t('Position of row level subtotal'), }, }, ], [ { name: 'colSubtotalPosition', config: { type: 'SelectControl', label: t('Cols subtotal position'), default: false, choices: [ // [value, label] [true, t('Left')], [false, t('Right')], ], renderTrigger: true, description: t('Position of column level subtotal'), }, }, ], [ { name: 'conditional_formatting', config: { type: 'ConditionalFormattingControl', renderTrigger: true, label: t('Conditional formatting'), description: t('Apply conditional color formatting to metrics'), mapStateToProps(explore) { const values = (explore?.controls?.metrics?.value as QueryFormMetric[]) ?? []; const verboseMap = explore?.datasource?.verbose_map ?? {}; const metricColumn = values.map(value => { if (typeof value === 'string') { return { value, label: verboseMap[value] ?? value }; } return { value: value.label, label: value.label }; }); return { columnOptions: metricColumn, verboseMap, }; }, }, }, ], ], }, ], }; export default config;
name: 'date_format',
plaintext.py
"""A module to keep track of a plaintext.""" class Plaintext: """An instance of a plaintext. This is a wrapper class for a plaintext, which consists of one polynomial. Attributes: poly (Polynomial): Plaintext polynomial. scaling_factor (float): Scaling factor. """ def
(self, poly, scaling_factor=None): """Sets plaintext to given polynomial. Args: poly (Polynomial): Plaintext polynomial. scaling_factor (float): Scaling factor. """ self.poly = poly self.scaling_factor = scaling_factor def __str__(self): """Represents plaintext as a readable string. Returns: A string which represents the Plaintext. """ return str(self.poly)
__init__
excel_test.go
package excel import ( "mime/multipart" "path/filepath" "reflect" "github.com/erda-project/erda/apistructs" ) func
(file multipart.File, head *multipart.FileHeader, titleRows int, sheetName string, excelTcs []apistructs.TestCaseExcel) error { wb, err := NewWorkBook(file, filepath.Ext(head.Filename), head.Size) if err != nil { return err } return DecXlsForm(wb, sheetName, titleRows, excelTcs) } func DecXlsForm(wb WorkBook, sheetName string, titleRows int, excelTcs []apistructs.TestCaseExcel) error { rows := wb.Rows(sheetName) if rows == nil { return nil } headIndex := FirstNonempty(rows) if headIndex == -1 { return nil } var excelTc apistructs.TestCaseExcel var i int for i = headIndex + titleRows; i < len(rows); i++ { row := rows[i] if row[0] != "" { excelTcs = append(excelTcs, excelTc) excelTc = apistructs.TestCaseExcel{} } var fieldIndex int ucValue := reflect.ValueOf(excelTc) ucType := reflect.TypeOf(excelTc) for j := 0; j < len(rows[i]); j++ { if fieldIndex >= ucType.NumField() { break } destValue := ucValue.Elem().Field(fieldIndex) eleType := ucValue.Elem().Field(fieldIndex).Type().Kind() switch eleType { case reflect.Struct: var k int for k = 0; k < ucType.Field(fieldIndex).Type.Elem().NumField(); k++ { if rows[i][j+k] != "" { destValue.Field(k).Set(reflect.ValueOf(rows[i][j+k])) } } j += k - 1 case reflect.Slice: var ( k int needAppend bool ) destNew := reflect.New(destValue.Type().Elem()).Elem() for k = 0; k < ucType.Field(fieldIndex).Type.Elem().NumField(); k++ { if rows[i][j+k] != "" { destNew.Field(k).Set(reflect.ValueOf(rows[i][j+k])) needAppend = true } } j += k - 1 if needAppend { destValue.Set(reflect.Append(destValue, destNew)) } default: if rows[i][j] != "" { destValue.Set(reflect.ValueOf(rows[i][j])) } } fieldIndex++ } } // 填充最后一个 useCase excelTcs = append(excelTcs, excelTc) return nil }
DecXlsFromFile
index.js
const config = require('config'); const mongoose = require('mongoose'); const path = require('path'); const createApp = require('./app').createApp; const phonebookSchema = require('./models/phonebook'); const settingsConn = mongoose.createConnection(config.mongodb, { useNewUrlParser: true, }); const Phonebook = settingsConn.model( 'Phonebook', phonebookSchema('phonebook') ); console.log('config', config); const manageApp = createApp({ apiDoc: require('./manage/api-doc.js'), paths: path.resolve(__dirname, 'manage/api-routes'),
}, }); manageApp.listen(config.port);
dependencies: { Phonebook,
default.go
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */
"context" "reflect" "strings" ) import ( perrors "github.com/pkg/errors" ) import ( "dubbo.apache.org/dubbo-go/v3/common" "dubbo.apache.org/dubbo-go/v3/common/constant" "dubbo.apache.org/dubbo-go/v3/common/extension" "dubbo.apache.org/dubbo-go/v3/common/logger" "dubbo.apache.org/dubbo-go/v3/common/proxy" "dubbo.apache.org/dubbo-go/v3/protocol" ) func init() { extension.SetProxyFactory("default", NewDefaultProxyFactory) } // DefaultProxyFactory is the default proxy factory type DefaultProxyFactory struct { // delegate ProxyFactory } // you can rewrite DefaultProxyFactory in extension and delegate the default proxy factory like below //func WithDelegate(delegateProxyFactory ProxyFactory) Option { // return func(proxy ProxyFactory) { // proxy.(*DefaultProxyFactory).delegate = delegateProxyFactory // } //} // NewDefaultProxyFactory returns a proxy factory instance func NewDefaultProxyFactory(_ ...proxy.Option) proxy.ProxyFactory { return &DefaultProxyFactory{} } // GetProxy gets a proxy func (factory *DefaultProxyFactory) GetProxy(invoker protocol.Invoker, url *common.URL) *proxy.Proxy { return factory.GetAsyncProxy(invoker, nil, url) } // GetAsyncProxy gets a async proxy func (factory *DefaultProxyFactory) GetAsyncProxy(invoker protocol.Invoker, callBack interface{}, url *common.URL) *proxy.Proxy { // create proxy attachments := map[string]string{} attachments[constant.ASYNC_KEY] = url.GetParam(constant.ASYNC_KEY, "false") return proxy.NewProxy(invoker, callBack, attachments) } // GetInvoker gets a invoker func (factory *DefaultProxyFactory) GetInvoker(url *common.URL) protocol.Invoker { return &ProxyInvoker{ BaseInvoker: *protocol.NewBaseInvoker(url), } } // ProxyInvoker is a invoker struct type ProxyInvoker struct { protocol.BaseInvoker } // Invoke is used to call service method by invocation func (pi *ProxyInvoker) Invoke(ctx context.Context, invocation protocol.Invocation) protocol.Result { result := &protocol.RPCResult{} result.SetAttachments(invocation.Attachments()) // get providerUrl. The origin url may be is registry URL. url := getProviderURL(pi.GetURL()) methodName := invocation.MethodName() proto := url.Protocol path := strings.TrimPrefix(url.Path, "/") args := invocation.Arguments() // get service svc := common.ServiceMap.GetServiceByServiceKey(proto, url.ServiceKey()) if svc == nil { logger.Errorf("cannot find service [%s] in %s", path, proto) result.SetError(perrors.Errorf("cannot find service [%s] in %s", path, proto)) return result } // get method method := svc.Method()[methodName] if method == nil { logger.Errorf("cannot find method [%s] of service [%s] in %s", methodName, path, proto) result.SetError(perrors.Errorf("cannot find method [%s] of service [%s] in %s", methodName, path, proto)) return result } in := []reflect.Value{svc.Rcvr()} if method.CtxType() != nil { ctx = context.WithValue(ctx, constant.AttachmentKey, invocation.Attachments()) in = append(in, method.SuiteContext(ctx)) } // prepare argv if (len(method.ArgsType()) == 1 || len(method.ArgsType()) == 2 && method.ReplyType() == nil) && method.ArgsType()[0].String() == "[]interface {}" { in = append(in, reflect.ValueOf(args)) } else { for i := 0; i < len(args); i++ { t := reflect.ValueOf(args[i]) if !t.IsValid() { at := method.ArgsType()[i] if at.Kind() == reflect.Ptr { at = at.Elem() } t = reflect.New(at) } in = append(in, t) } } // prepare replyv var replyv reflect.Value //if method.ReplyType() == nil && len(method.ArgsType()) > 0 { // // replyv = reflect.New(method.ArgsType()[len(method.ArgsType())-1].Elem()) // in = append(in, replyv) //} returnValues := method.Method().Func.Call(in) var retErr interface{} if len(returnValues) == 1 { retErr = returnValues[0].Interface() } else { replyv = returnValues[0] retErr = returnValues[1].Interface() } if retErr != nil { result.SetError(retErr.(error)) } else { if replyv.IsValid() && (replyv.Kind() != reflect.Ptr || replyv.Kind() == reflect.Ptr && replyv.Elem().IsValid()) { result.SetResult(replyv.Interface()) } } return result } func getProviderURL(url *common.URL) *common.URL { if url.SubURL == nil { return url } return url.SubURL }
package proxy_factory import (
api_op_DeleteAccessPoint.go
// Code generated by smithy-go-codegen DO NOT EDIT. package efs import ( "context" awsmiddleware "github.com/aws/aws-sdk-go-v2/aws/middleware" "github.com/aws/aws-sdk-go-v2/aws/signer/v4" "github.com/awslabs/smithy-go/middleware" smithyhttp "github.com/awslabs/smithy-go/transport/http" ) // Deletes the specified access point. After deletion is complete, new clients can // no longer connect to the access points. Clients connected to the access point at // the time of deletion will continue to function until they terminate their // connection. This operation requires permissions for the // elasticfilesystem:DeleteAccessPoint action. func (c *Client) DeleteAccessPoint(ctx context.Context, params *DeleteAccessPointInput, optFns ...func(*Options)) (*DeleteAccessPointOutput, error) { if params == nil { params = &DeleteAccessPointInput{} } result, metadata, err := c.invokeOperation(ctx, "DeleteAccessPoint", params, optFns, addOperationDeleteAccessPointMiddlewares) if err != nil { return nil, err } out := result.(*DeleteAccessPointOutput) out.ResultMetadata = metadata return out, nil } type DeleteAccessPointInput struct { // The ID of the access point that you want to delete. // // This member is required. AccessPointId *string } type DeleteAccessPointOutput struct { // Metadata pertaining to the operation's result. ResultMetadata middleware.Metadata } func addOperationDeleteAccessPointMiddlewares(stack *middleware.Stack, options Options) (err error) { err = stack.Serialize.Add(&awsRestjson1_serializeOpDeleteAccessPoint{}, middleware.After) if err != nil { return err } err = stack.Deserialize.Add(&awsRestjson1_deserializeOpDeleteAccessPoint{}, middleware.After) if err != nil { return err } awsmiddleware.AddRequestInvocationIDMiddleware(stack) smithyhttp.AddContentLengthMiddleware(stack) addResolveEndpointMiddleware(stack, options) v4.AddComputePayloadSHA256Middleware(stack) addRetryMiddlewares(stack, options) addHTTPSignerV4Middleware(stack, options) awsmiddleware.AddAttemptClockSkewMiddleware(stack) addClientUserAgent(stack) smithyhttp.AddErrorCloseResponseBodyMiddleware(stack) smithyhttp.AddCloseResponseBodyMiddleware(stack) addOpDeleteAccessPointValidationMiddleware(stack) stack.Initialize.Add(newServiceMetadataMiddleware_opDeleteAccessPoint(options.Region), middleware.Before) addRequestIDRetrieverMiddleware(stack) addResponseErrorMiddleware(stack) return nil } func
(region string) awsmiddleware.RegisterServiceMetadata { return awsmiddleware.RegisterServiceMetadata{ Region: region, ServiceID: ServiceID, SigningName: "elasticfilesystem", OperationName: "DeleteAccessPoint", } }
newServiceMetadataMiddleware_opDeleteAccessPoint
ultimate.py
""" BenchExec is a framework for reliable benchmarking. This file is part of BenchExec. Copyright (C) 2015 Daniel Dietsch All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import functools import logging import os import re import subprocess import sys import benchexec.result as result import benchexec.tools.template import benchexec.util as util from benchexec import BenchExecException from benchexec.model import MEMLIMIT from benchexec.tools.template import UnsupportedFeatureException _OPTION_NO_WRAPPER = "--force-no-wrapper" _SVCOMP17_VERSIONS = {"f7c3ed31"} _SVCOMP17_FORBIDDEN_FLAGS = {"--full-output", "--architecture"} _ULTIMATE_VERSION_REGEX = re.compile(r"^Version is (.*)$", re.MULTILINE) # .jar files that are used as launcher arguments with most recent .jar first _LAUNCHER_JARS = ["plugins/org.eclipse.equinox.launcher_1.3.100.v20150511-1540.jar"] class UltimateTool(benchexec.tools.template.BaseTool): """ Abstract tool info for Ultimate-based tools. """ REQUIRED_PATHS = [ "artifacts.xml", "config", "configuration", "cvc4", "cvc4nyu", "cvc4-LICENSE", "features", "LICENSE", "LICENSE.GPL", "LICENSE.GPL.LESSER", "mathsat", "mathsat-LICENSE", "p2", "plugins", "README", "Ultimate", "Ultimate.ini", "Ultimate.py", "z3", "z3-LICENSE", ] REQUIRED_PATHS_SVCOMP17 = [] def __init__(self): self._uses_propertyfile = False @functools.lru_cache() def executable(self): exe = util.find_executable("Ultimate.py") for (dirpath, dirnames, filenames) in os.walk(exe): if "Ultimate" in filenames and "plugins" in dirnames: return exe break # possibly another Ultimate.py was found, check in the current dir current = os.getcwd() for (dirpath, dirnames, filenames) in os.walk(current): if ( "Ultimate" in filenames and "Ultimate.py" in filenames and "plugins" in dirnames ): return "./Ultimate.py" break sys.exit( "ERROR: Could not find Ultimate executable in '{0}' or '{1}'".format( str(exe), str(current) ) ) def _ultimate_version(self, executable): data_dir = os.path.join(os.path.dirname(executable), "data") launcher_jar = self._get_current_launcher_jar(executable) cmds = [ # 2 [ self.get_java(), "-Xss4m", "-jar", launcher_jar, "-data", "@noDefault", "-ultimatedata", data_dir, "--version", ], # 1 [ self.get_java(), "-Xss4m", "-jar", launcher_jar, "-data", data_dir, "--version", ], ] self.api = len(cmds) for cmd in cmds: version = self._query_ultimate_version(cmd, self.api) if version != "": return version self.api = self.api - 1 raise BenchExecException("Could not determine Ultimate version") def _query_ultimate_version(self, cmd, api): try: process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE ) (stdout, stderr) = process.communicate() except OSError as e: logging.warning( "Cannot run Java to determine Ultimate version (API %s): %s", api, e.strerror, ) return "" stdout = util.decode_to_string(stdout).strip() if stderr or process.returncode: logging.warning( "Cannot determine Ultimate version (API %s).\n" "Command was: %s\n" "Exit code: %s\n" "Error output: %s\n" "Standard output: %s", api, " ".join(map(util.escape_string_shell, cmd)), process.returncode, util.decode_to_string(stderr), stdout, ) return "" version_ultimate_match = _ULTIMATE_VERSION_REGEX.search(stdout) if not version_ultimate_match: logging.warning( "Cannot determine Ultimate version (API %s), output was: %s", api, stdout, ) return "" return version_ultimate_match.group(1) @functools.lru_cache() def _get_current_launcher_jar(self, executable): ultimatedir = os.path.dirname(executable) for jar in _LAUNCHER_JARS: launcher_jar = os.path.join(ultimatedir, jar) if os.path.isfile(launcher_jar): return launcher_jar raise FileNotFoundError( "No suitable launcher jar found in {0}".format(ultimatedir) ) @functools.lru_cache() def version(self, executable): wrapper_version = self._version_from_tool(executable) if wrapper_version in _SVCOMP17_VERSIONS: # Keep reported version number for old versions as they were before return wrapper_version ultimate_version = self._ultimate_version(executable) return ultimate_version + "-" + wrapper_version @functools.lru_cache() def _is_svcomp17_version(self, executable): return self.version(executable) in _SVCOMP17_VERSIONS @functools.lru_cache() def _requires_ultimate_data(self, executable): if self._is_svcomp17_version(executable): return False version = self.version(executable) ult, wrapper = version.split("-") major, minor, patch = ult.split(".") # all versions before 0.1.24 do not require ultimatedata return not (int(major) == 0 and int(minor) < 2 and int(patch) < 24) def cmdline(self, executable, options, tasks, propertyfile=None, rlimits=None): if rlimits is None: rlimits = {} self._uses_propertyfile = propertyfile is not None if _OPTION_NO_WRAPPER in options: # do not use old wrapper script even if property file is given self._uses_propertyfile = False propertyfile = None options.remove(_OPTION_NO_WRAPPER) if self._is_svcomp17_version(executable): assert propertyfile cmdline = [executable, propertyfile] cmdline += [ option for option in options if option not in _SVCOMP17_FORBIDDEN_FLAGS ] cmdline.append("--full-output") cmdline += tasks self.__assert_cmdline( cmdline, "cmdline contains empty or None argument when using SVCOMP17 mode: ", ) return cmdline if self._uses_propertyfile: # use the old wrapper script if a property file is given cmdline = [executable, "--spec", propertyfile] if tasks: cmdline += ["--file"] + tasks cmdline += options self.__assert_cmdline( cmdline, "cmdline contains empty or None argument when using default SVCOMP mode: ", ) return cmdline # if no property file is given and toolchain (-tc) is, use ultimate directly if "-tc" in options or "--toolchain" in options: # ignore executable (old executable is just around for backwards compatibility) mem_bytes = rlimits.get(MEMLIMIT, None) cmdline = [self.get_java()] # -ea has to be given directly to java if "-ea" in options: options = [e for e in options if e != "-ea"] cmdline += ["-ea"] if mem_bytes: cmdline += ["-Xmx" + str(mem_bytes)] cmdline += ["-Xss4m"] cmdline += ["-jar", self._get_current_launcher_jar(executable)] if self._requires_ultimate_data(executable): if "-ultimatedata" not in options and "-data" not in options: if self.api == 2: cmdline += [ "-data", "@noDefault", "-ultimatedata", os.path.join(os.path.dirname(executable), "data"), ] if self.api == 1: raise ValueError( "Illegal option -ultimatedata for API {} and Ultimate version {}".format( self.api, self.version(executable) ) ) elif "-ultimatedata" in options and "-data" not in options: if self.api == 2: cmdline += ["-data", "@noDefault"] if self.api == 1: raise ValueError( "Illegal option -ultimatedata for API {} and Ultimate version {}".format( self.api, self.version(executable) ) ) else: if "-data" not in options: if self.api == 2 or self.api == 1: cmdline += [ "-data", os.path.join(os.path.dirname(executable), "data"), ] cmdline += options if tasks: cmdline += ["-i"] + tasks self.__assert_cmdline( cmdline, "cmdline contains empty or None argument when using Ultimate raw mode: ", ) return cmdline # there is no way to run ultimate; not enough parameters raise UnsupportedFeatureException( "Unsupported argument combination: options={} propertyfile={} rlimits={}".format( options, propertyfile, rlimits ) ) def
(self, cmdline, msg): assert all(cmdline), msg + str(cmdline) pass def program_files(self, executable): paths = ( self.REQUIRED_PATHS_SVCOMP17 if self._is_svcomp17_version(executable) else self.REQUIRED_PATHS ) return [executable] + self._program_files_from_executable(executable, paths) def determine_result(self, returncode, returnsignal, output, is_timeout): if self._uses_propertyfile: return self._determine_result_with_propertyfile( returncode, returnsignal, output, is_timeout ) return self._determine_result_without_propertyfile( returncode, returnsignal, output, is_timeout ) def _determine_result_without_propertyfile( self, returncode, returnsignal, output, is_timeout ): # special strings in ultimate output treeautomizer_sat = "TreeAutomizerSatResult" treeautomizer_unsat = "TreeAutomizerUnsatResult" unsupported_syntax_errorstring = "ShortDescription: Unsupported Syntax" incorrect_syntax_errorstring = "ShortDescription: Incorrect Syntax" type_errorstring = "Type Error" witness_errorstring = "InvalidWitnessErrorResult" exception_errorstring = "ExceptionOrErrorResult" safety_string = "Ultimate proved your program to be correct" all_spec_string = "AllSpecificationsHoldResult" unsafety_string = "Ultimate proved your program to be incorrect" mem_deref_false_string = "pointer dereference may fail" mem_deref_false_string_2 = "array index can be out of bounds" mem_free_false_string = "free of unallocated memory possible" mem_memtrack_false_string = "not all allocated memory was freed" termination_false_string = ( "Found a nonterminating execution for the following " "lasso shaped sequence of statements" ) termination_true_string = "TerminationAnalysisResult: Termination proven" ltl_false_string = "execution that violates the LTL property" ltl_true_string = "Buchi Automizer proved that the LTL property" overflow_false_string = "overflow possible" for line in output: if line.find(unsupported_syntax_errorstring) != -1: return "ERROR: UNSUPPORTED SYNTAX" if line.find(incorrect_syntax_errorstring) != -1: return "ERROR: INCORRECT SYNTAX" if line.find(type_errorstring) != -1: return "ERROR: TYPE ERROR" if line.find(witness_errorstring) != -1: return "ERROR: INVALID WITNESS FILE" if line.find(exception_errorstring) != -1: return "ERROR: EXCEPTION" if self._contains_overapproximation_result(line): return "UNKNOWN: OverapproxCex" if line.find(termination_false_string) != -1: return result.RESULT_FALSE_TERMINATION if line.find(termination_true_string) != -1: return result.RESULT_TRUE_PROP if line.find(ltl_false_string) != -1: return "FALSE(valid-ltl)" if line.find(ltl_true_string) != -1: return result.RESULT_TRUE_PROP if line.find(unsafety_string) != -1: return result.RESULT_FALSE_REACH if line.find(mem_deref_false_string) != -1: return result.RESULT_FALSE_DEREF if line.find(mem_deref_false_string_2) != -1: return result.RESULT_FALSE_DEREF if line.find(mem_free_false_string) != -1: return result.RESULT_FALSE_FREE if line.find(mem_memtrack_false_string) != -1: return result.RESULT_FALSE_MEMTRACK if line.find(overflow_false_string) != -1: return result.RESULT_FALSE_OVERFLOW if line.find(safety_string) != -1 or line.find(all_spec_string) != -1: return result.RESULT_TRUE_PROP if line.find(treeautomizer_unsat) != -1: return "unsat" if line.find(treeautomizer_sat) != -1 or line.find(all_spec_string) != -1: return "sat" return result.RESULT_UNKNOWN def _contains_overapproximation_result(self, line): triggers = [ "Reason: overapproximation of", "Reason: overapproximation of bitwiseAnd", "Reason: overapproximation of bitwiseOr", "Reason: overapproximation of bitwiseXor", "Reason: overapproximation of shiftLeft", "Reason: overapproximation of shiftRight", "Reason: overapproximation of bitwiseComplement", ] for trigger in triggers: if line.find(trigger) != -1: return True return False def _determine_result_with_propertyfile( self, returncode, returnsignal, output, is_timeout ): for line in output: if line.startswith("FALSE(valid-free)"): return result.RESULT_FALSE_FREE elif line.startswith("FALSE(valid-deref)"): return result.RESULT_FALSE_DEREF elif line.startswith("FALSE(valid-memtrack)"): return result.RESULT_FALSE_MEMTRACK elif line.startswith("FALSE(valid-memcleanup)"): return result.RESULT_FALSE_MEMCLEANUP elif line.startswith("FALSE(TERM)"): return result.RESULT_FALSE_TERMINATION elif line.startswith("FALSE(OVERFLOW)"): return result.RESULT_FALSE_OVERFLOW elif line.startswith("FALSE"): return result.RESULT_FALSE_REACH elif line.startswith("TRUE"): return result.RESULT_TRUE_PROP elif line.startswith("UNKNOWN"): return result.RESULT_UNKNOWN elif line.startswith("ERROR"): status = result.RESULT_ERROR if line.startswith("ERROR: INVALID WITNESS FILE"): status += " (invalid witness file)" return status return result.RESULT_UNKNOWN def get_value_from_output(self, lines, identifier): # search for the text in output and get its value, # stop after the first line, that contains the searched text for line in lines: if identifier in line: start_position = line.find("=") + 1 return line[start_position:].strip() return None @functools.lru_cache(maxsize=1) def get_java(self): candidates = [ "java", "/usr/bin/java", "/opt/oracle-jdk-bin-1.8.0.202/bin/java", "/usr/lib/jvm/java-8-openjdk-amd64/bin/java", ] for c in candidates: candidate = self.which(c) if not candidate: continue try: process = subprocess.Popen( [candidate, "-version"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, ) (stdout, stderr) = process.communicate() except OSError as e: continue stdout = util.decode_to_string(stdout).strip() if not stdout: continue if "1.8" in stdout: return candidate raise BenchExecException( "Could not find a suitable Java version: Need Java 1.8" ) def which(self, program): def is_exe(fpath): return os.path.isfile(fpath) and os.access(fpath, os.X_OK) fpath, fname = os.path.split(program) if fpath: if is_exe(program): return program else: for path in os.environ["PATH"].split(os.pathsep): exe_file = os.path.join(path, program) if is_exe(exe_file): return exe_file return None
__assert_cmdline
test_admin.py
from django.test import TestCase, Client from django.contrib.auth import get_user_model from django.urls import reverse class AdminSiteTests(TestCase): def setUp(self): self.client = Client() self.admin_user = get_user_model().objects.create_superuser( email="[email protected]", password="test123" ) self.client.force_login(self.admin_user) self.user = get_user_model().objects.create_user( email="[email protected]", password="test123", name="Test User" ) def test_users_listed(self): """Test if the user listed on the user page""" url = reverse('admin:core_user_changelist') res = self.client.get(url) self.assertContains(res, self.user.name) self.assertContains(res, self.user.email) def
(self): """Test that the user edit page works""" url = reverse('admin:core_user_change', args=[self.user.id]) res = self.client.get(url) self.assertEqual(res.status_code, 200) def test_create_user_page(self): """Test that the create user page works""" url = reverse('admin:core_user_add') res = self.client.get(url) self.assertEqual(res.status_code, 200)
test_user_change_page
Developer.js
export default class
{ static developers() { const karumies = { pedro: new Developer( 'Pedro', 3 ), fran: new Developer( 'Fran', 1 ), davide: new Developer( 'Davide', 0 ), sergio: new Developer( 'Sergio', 2 ), jorge: new Developer( 'Jorge', 1 ) }; return karumies; } constructor ( name, maxibons_to_grab ) { this.name = name; this.maxibons_to_grab = Math.max( 0, maxibons_to_grab ); } maxibonsToGrab() { return this.maxibons_to_grab; } static get( developer_key ) { return Developer.developers()[ developer_key ] } }
Developer
update.rs
use super::*; use crate::constants::*; use prisma_models::CompositeFieldRef; pub(crate) struct UpdateDataInputFieldMapper { unchecked: bool, } impl UpdateDataInputFieldMapper { pub fn new_checked() -> Self { Self { unchecked: false } } pub fn new_unchecked() -> Self
} impl DataInputFieldMapper for UpdateDataInputFieldMapper { fn map_scalar(&self, ctx: &mut BuilderContext, sf: &ScalarFieldRef) -> InputField { let base_update_type = match &sf.type_identifier { TypeIdentifier::Float => InputType::object(update_operations_object_type(ctx, "Float", sf, true)), TypeIdentifier::Decimal => InputType::object(update_operations_object_type(ctx, "Decimal", sf, true)), TypeIdentifier::Int => InputType::object(update_operations_object_type(ctx, "Int", sf, true)), TypeIdentifier::BigInt => InputType::object(update_operations_object_type(ctx, "BigInt", sf, true)), TypeIdentifier::String => InputType::object(update_operations_object_type(ctx, "String", sf, false)), TypeIdentifier::Boolean => InputType::object(update_operations_object_type(ctx, "Bool", sf, false)), TypeIdentifier::Enum(e) => { InputType::object(update_operations_object_type(ctx, &format!("Enum{}", e), sf, false)) } TypeIdentifier::Json => map_scalar_input_type_for_field(ctx, sf), TypeIdentifier::DateTime => InputType::object(update_operations_object_type(ctx, "DateTime", sf, false)), TypeIdentifier::UUID => InputType::object(update_operations_object_type(ctx, "Uuid", sf, false)), TypeIdentifier::Xml => InputType::object(update_operations_object_type(ctx, "Xml", sf, false)), TypeIdentifier::Bytes => InputType::object(update_operations_object_type(ctx, "Bytes", sf, false)), TypeIdentifier::Unsupported => unreachable!("No unsupported field should reach that path"), }; let has_adv_json = ctx.has_capability(ConnectorCapability::AdvancedJsonNullability); match &sf.type_identifier { TypeIdentifier::Json if has_adv_json => { let enum_type = json_null_input_enum(!sf.is_required()); let input_field = input_field( sf.name.clone(), vec![InputType::Enum(enum_type), base_update_type], None, ); input_field.optional() } _ => { let types = vec![map_scalar_input_type_for_field(ctx, sf), base_update_type]; let input_field = input_field(sf.name.clone(), types, None); input_field.optional().nullable_if(!sf.is_required()) } } } fn map_scalar_list(&self, ctx: &mut BuilderContext, sf: &ScalarFieldRef) -> InputField { let list_input_type = map_scalar_input_type(ctx, &sf.type_identifier, sf.is_list()); let ident = Identifier::new( format!("{}Update{}Input", sf.container.name(), sf.name), PRISMA_NAMESPACE, ); let input_object = match ctx.get_input_type(&ident) { Some(t) => t, None => { let mut object_fields = vec![input_field(operations::SET, list_input_type.clone(), None).optional()]; // Todo this capability looks wrong to me. if ctx.has_capability(ConnectorCapability::EnumArrayPush) { object_fields.push( input_field( operations::PUSH, vec![ map_scalar_input_type(ctx, &sf.type_identifier, false), list_input_type.clone(), ], None, ) .optional(), ) } let mut input_object = input_object_type(ident.clone(), object_fields); input_object.require_exactly_one_field(); let input_object = Arc::new(input_object); ctx.cache_input_type(ident, input_object.clone()); Arc::downgrade(&input_object) } }; let input_type = InputType::object(input_object); input_field(sf.name.clone(), vec![input_type, list_input_type], None).optional() } fn map_relation(&self, ctx: &mut BuilderContext, rf: &RelationFieldRef) -> InputField { let related_model = rf.related_model(); let related_field = rf.related_field(); // Compute input object name let arity_part = match (rf.is_list(), rf.is_required()) { (true, _) => "Many", (false, true) => "OneRequired", (false, false) => "One", }; let without_part = format!("Without{}", capitalize(&related_field.name)); let unchecked_part = if self.unchecked { "Unchecked" } else { "" }; let ident = Identifier::new( format!( "{}{}Update{}{}Input", related_model.name, unchecked_part, arity_part, without_part ), PRISMA_NAMESPACE, ); let input_object = match ctx.get_input_type(&ident) { Some(t) => t, None => { let input_object = Arc::new(init_input_object_type(ident.clone())); ctx.cache_input_type(ident, input_object.clone()); // Enqueue the nested update input for its fields to be // created at a later point, to avoid recursing too deep // (that has caused stack overflows on large schemas in // the past). ctx.nested_update_inputs_queue .push((Arc::clone(&input_object), Arc::clone(&rf))); Arc::downgrade(&input_object) } }; input_field(rf.name.clone(), InputType::object(input_object), None).optional() } fn map_composite(&self, ctx: &mut BuilderContext, cf: &CompositeFieldRef) -> InputField { // Shorthand object (equivalent to the "set" operation). let shorthand_type = InputType::Object(create::composite_create_object_type(ctx, cf)); // Operation envelope object. let envelope_type = InputType::Object(composite_update_envelope_object_type(ctx, cf)); let mut input_types = vec![envelope_type, shorthand_type.clone()]; if cf.is_list() { input_types.push(InputType::list(shorthand_type)); } input_field(cf.name.clone(), input_types, None) .nullable_if(cf.is_optional() && !cf.is_list()) .optional() } } fn update_operations_object_type( ctx: &mut BuilderContext, prefix: &str, sf: &ScalarFieldRef, with_number_operators: bool, ) -> InputObjectTypeWeakRef { // Different names are required to construct and cache different objects. // - "Nullable" affects the `set` operation (`set` is nullable) let nullable = if !sf.is_required() { "Nullable" } else { "" }; let ident = Identifier::new( format!("{}{}FieldUpdateOperationsInput", nullable, prefix), PRISMA_NAMESPACE, ); return_cached_input!(ctx, &ident); let mut obj = init_input_object_type(ident.clone()); obj.require_exactly_one_field(); let obj = Arc::new(obj); ctx.cache_input_type(ident, obj.clone()); let typ = map_scalar_input_type_for_field(ctx, sf); let mut fields = vec![input_field(operations::SET, typ.clone(), None) .optional() .nullable_if(!sf.is_required())]; if with_number_operators { fields.push(input_field(operations::INCREMENT, typ.clone(), None).optional()); fields.push(input_field(operations::DECREMENT, typ.clone(), None).optional()); fields.push(input_field(operations::MULTIPLY, typ.clone(), None).optional()); fields.push(input_field(operations::DIVIDE, typ, None).optional()); } obj.set_fields(fields); Arc::downgrade(&obj) } /// Build an operation envelope object type for composite updates. /// An operation envelope is an object that encapsulates the possible operations, like: /// ```text /// cf_field: { // this is the envelope object /// set: { ... set type ... } /// update: { ... update type ... } /// ... more ops ... /// } /// ``` fn composite_update_envelope_object_type(ctx: &mut BuilderContext, cf: &CompositeFieldRef) -> InputObjectTypeWeakRef { let arity = if cf.is_optional() { "Nullable" } else if cf.is_list() { "List" } else { "" }; let name = format!("{}{}UpdateEnvelopeInput", cf.typ.name, arity); let ident = Identifier::new(name, PRISMA_NAMESPACE); return_cached_input!(ctx, &ident); let mut input_object = init_input_object_type(ident.clone()); input_object.require_exactly_one_field(); input_object.set_tag(ObjectTag::CompositeEnvelope); let input_object = Arc::new(input_object); ctx.cache_input_type(ident, input_object.clone()); let mut fields = vec![composite_set_update_input_field(ctx, cf)]; append_opt(&mut fields, composite_update_input_field(ctx, cf)); append_opt(&mut fields, composite_push_update_input_field(ctx, cf)); append_opt(&mut fields, composite_unset_update_input_field(cf)); append_opt(&mut fields, composite_upsert_update_input_field(ctx, cf)); input_object.set_fields(fields); Arc::downgrade(&input_object) } /// Builds the `update` input object type. Should be used in the envelope type. fn composite_update_object_type(ctx: &mut BuilderContext, cf: &CompositeFieldRef) -> InputObjectTypeWeakRef { let name = format!("{}UpdateInput", cf.typ.name); let ident = Identifier::new(name, PRISMA_NAMESPACE); return_cached_input!(ctx, &ident); let mut input_object = init_input_object_type(ident.clone()); input_object.set_min_fields(1); let input_object = Arc::new(input_object); ctx.cache_input_type(ident, input_object.clone()); let mapper = UpdateDataInputFieldMapper::new_checked(); let fields = mapper.map_all(ctx, cf.typ.fields()); input_object.set_fields(fields); Arc::downgrade(&input_object) } // Builds an `update` input field. Should only be used in the envelope type. fn composite_update_input_field(ctx: &mut BuilderContext, cf: &CompositeFieldRef) -> Option<InputField> { if cf.is_required() { let update_object_type = composite_update_object_type(ctx, cf); Some(input_field(operations::UPDATE, InputType::Object(update_object_type), None).optional()) } else { None } } // Builds an `unset` input field. Should only be used in the envelope type. fn composite_unset_update_input_field(cf: &CompositeFieldRef) -> Option<InputField> { if cf.is_optional() { Some(input_field(operations::UNSET, InputType::boolean(), None).optional()) } else { None } } // Builds an `set` input field. Should only be used in the envelope type. fn composite_set_update_input_field(ctx: &mut BuilderContext, cf: &CompositeFieldRef) -> InputField { let set_object_type = InputType::Object(create::composite_create_object_type(ctx, cf)); let mut input_types = vec![set_object_type.clone()]; if cf.is_list() { input_types.push(InputType::list(set_object_type)); } input_field(operations::SET, input_types, None) .nullable_if(!cf.is_required() && !cf.is_list()) .optional() } // Builds an `push` input field. Should only be used in the envelope type. fn composite_push_update_input_field(ctx: &mut BuilderContext, cf: &CompositeFieldRef) -> Option<InputField> { if cf.is_list() { let set_object_type = InputType::Object(create::composite_create_object_type(ctx, cf)); let input_types = vec![set_object_type.clone(), InputType::list(set_object_type)]; Some(input_field(operations::PUSH, input_types, None).optional()) } else { None } } /// Builds the `upsert` input object type. Should only be used in the envelope type. fn composite_upsert_object_type(ctx: &mut BuilderContext, cf: &CompositeFieldRef) -> InputObjectTypeWeakRef { let name = format!("{}UpsertInput", cf.typ.name); let ident = Identifier::new(name, PRISMA_NAMESPACE); return_cached_input!(ctx, &ident); let mut input_object = init_input_object_type(ident.clone()); input_object.set_tag(ObjectTag::CompositeEnvelope); let input_object = Arc::new(input_object); ctx.cache_input_type(ident, input_object.clone()); let update_object_type = composite_update_object_type(ctx, cf); let update_field = input_field(operations::UPDATE, InputType::Object(update_object_type), None); let set_field = composite_set_update_input_field(ctx, cf).required(); let fields = vec![set_field, update_field]; input_object.set_fields(fields); Arc::downgrade(&input_object) } // Builds an `upsert` input field. Should only be used in the envelope type. fn composite_upsert_update_input_field(ctx: &mut BuilderContext, cf: &CompositeFieldRef) -> Option<InputField> { if cf.is_optional() { let upsert_object_type = InputType::Object(composite_upsert_object_type(ctx, cf)); Some(input_field(operations::UPSERT, upsert_object_type, None).optional()) } else { None } }
{ Self { unchecked: true } }
ts-file.ts
import Vue, { CreateElement, VNode } from "vue" export default Vue.extend({ render(h: CreateElement): VNode { return h("h2", "This is a .ts component") }
})
point_and_polygon_2d_intersection.rs
use maths::Approximations; use collisions::shapes::Intersection; use collisions::shapes::_2d::{Point2D, Polygon}; impl Intersection<Point2D> for Polygon { type Output = Point2D; fn intersection(&self, point: &Point2D) -> Option<Self::Output> { for plane in self.separating_planes_iter() {
Some(point.clone()) } } impl Intersection<Polygon> for Point2D { type Output = Point2D; #[inline(always)] fn intersection(&self, polygon: &Polygon) -> Option<Self::Output> { polygon.intersection(self) } }
if plane.normal_projection_of(&point.0).is_strictly_positive() { return None; } }
shared_countermeasure_decoy_launcher.py
#### NOTICE: THIS FILE IS AUTOGENERATED #### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY #### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
def create(kernel): result = Intangible() result.template = "object/draft_schematic/space/weapon/missile/shared_countermeasure_decoy_launcher.iff" result.attribute_template_id = -1 result.stfName("string_id_table","") #### BEGIN MODIFICATIONS #### #### END MODIFICATIONS #### return result
from swgpy.object import *
validation.go
package validation import ( "errors" hazelcastv1alpha1 "github.com/hazelcast/hazelcast-platform-operator/api/v1alpha1" "github.com/hazelcast/hazelcast-platform-operator/controllers/util" ) func ValidateSpec(h *hazelcastv1alpha1.Hazelcast) error { if err := validateExposeExternally(h); err != nil { return err } if err := validateLicense(h); err != nil { return err } return nil } func validateExposeExternally(h *hazelcastv1alpha1.Hazelcast) error {
if ee == nil { return nil } if ee.Type == hazelcastv1alpha1.ExposeExternallyTypeUnisocket && ee.MemberAccess != "" { return errors.New("when exposeExternally.type is set to \"Unisocket\", exposeExternally.memberAccess must not be set") } return nil } func validateLicense(h *hazelcastv1alpha1.Hazelcast) error { if util.IsEnterprise(h.Spec.Repository) && len(h.Spec.LicenseKeySecret) == 0 { return errors.New("when Hazelcast Enterprise is deployed, licenseKeySecret must be set") } return nil }
ee := h.Spec.ExposeExternally
lib.rs
#![recursion_limit = "512"] use serde_derive::{Deserialize, Serialize}; use strum::IntoEnumIterator; use strum_macros::{EnumIter, ToString}; use yew::events::KeyboardEvent; use yew::format::Json; use yew::services::storage::{Area, StorageService}; use yew::{html, Component, ComponentLink, Href, Html, InputData, ShouldRender}; const KEY: &'static str = "yew.todomvc.self"; pub struct Model { link: ComponentLink<Self>, storage: StorageService, state: State, } #[derive(Serialize, Deserialize)] pub struct State { entries: Vec<Entry>, filter: Filter, value: String, edit_value: String, } #[derive(Serialize, Deserialize)] struct Entry { description: String, completed: bool, editing: bool, } pub enum Msg { Add, Edit(usize), Update(String), UpdateEdit(String), Remove(usize), SetFilter(Filter), ToggleAll, ToggleEdit(usize), Toggle(usize), ClearCompleted, Nope, } impl Component for Model { type Message = Msg; type Properties = (); fn create(_: Self::Properties, link: ComponentLink<Self>) -> Self { let storage = StorageService::new(Area::Local).expect("storage was disabled by the user"); let entries = { if let Json(Ok(restored_model)) = storage.restore(KEY) { restored_model } else { Vec::new() } }; let state = State { entries, filter: Filter::All, value: "".into(), edit_value: "".into(), }; Model { link, storage, state, } } fn update(&mut self, msg: Self::Message) -> ShouldRender { match msg { Msg::Add => { let entry = Entry { description: self.state.value.clone(), completed: false, editing: false, }; self.state.entries.push(entry); self.state.value = "".to_string(); } Msg::Edit(idx) => { let edit_value = self.state.edit_value.clone(); self.state.complete_edit(idx, edit_value); self.state.edit_value = "".to_string(); } Msg::Update(val) => { println!("Input: {}", val); self.state.value = val; } Msg::UpdateEdit(val) => { println!("Input: {}", val); self.state.edit_value = val; } Msg::Remove(idx) => { self.state.remove(idx); } Msg::SetFilter(filter) => { self.state.filter = filter; } Msg::ToggleEdit(idx) => { self.state.edit_value = self.state.entries[idx].description.clone(); self.state.toggle_edit(idx); } Msg::ToggleAll => { let status = !self.state.is_all_completed(); self.state.toggle_all(status); } Msg::Toggle(idx) => { self.state.toggle(idx); } Msg::ClearCompleted => { self.state.clear_completed(); } Msg::Nope => {} } self.storage.store(KEY, Json(&self.state.entries)); true } fn change(&mut self, _: Self::Properties) -> ShouldRender { false } fn view(&self) -> Html { html! { <div class="todomvc-wrapper"> <section class="todoapp"> <header class="header"> <h1>{ "todos" }</h1> { self.view_input() } </header> <section class="main"> <input type="checkbox" class="toggle-all" checked=self.state.is_all_completed() onclick=self.link.callback(|_| Msg::ToggleAll) /> <ul class="todo-list"> { for self.state.entries.iter().filter(|e| self.state.filter.fit(e)).enumerate().map(|e| self.view_entry(e)) } </ul> </section> <footer class="footer"> <span class="todo-count"> <strong>{ self.state.total() }</strong> { " item(s) left" } </span> <ul class="filters"> { for Filter::iter().map(|flt| self.view_filter(flt)) } </ul> <button class="clear-completed" onclick=self.link.callback(|_| Msg::ClearCompleted)> { format!("Clear completed ({})", self.state.total_completed()) } </button> </footer> </section> <footer class="info"> <p>{ "Double-click to edit a todo" }</p> <p>{ "Written by " }<a href="https://github.com/DenisKolodin/" target="_blank">{ "Denis Kolodin" }</a></p> <p>{ "Part of " }<a href="http://todomvc.com/" target="_blank">{ "TodoMVC" }</a></p> </footer> </div> } } } impl Model { fn view_filter(&self, filter: Filter) -> Html { let flt = filter.clone(); html! { <li> <a class=if self.state.filter == flt { "selected" } else { "not-selected" } href=&flt onclick=self.link.callback(move |_| Msg::SetFilter(flt.clone()))> { filter } </a> </li> } } fn view_input(&self) -> Html { html! { // You can use standard Rust comments. One line: // <li></li> <input class="new-todo" placeholder="What needs to be done?" value=&self.state.value oninput=self.link.callback(|e: InputData| Msg::Update(e.value)) onkeypress=self.link.callback(|e: KeyboardEvent| { if e.key() == "Enter" { Msg::Add } else { Msg::Nope } }) /> /* Or multiline: <ul> <li></li> </ul> */ } } fn view_entry(&self, (idx, entry): (usize, &Entry)) -> Html { let mut class = "todo".to_string(); if entry.editing { class.push_str(" editing"); } if entry.completed { class.push_str(" completed"); } html! { <li class=class> <div class="view"> <input type="checkbox" class="toggle" checked=entry.completed onclick=self.link.callback(move |_| Msg::Toggle(idx)) /> <label ondoubleclick=self.link.callback(move |_| Msg::ToggleEdit(idx))>{ &entry.description }</label> <button class="destroy" onclick=self.link.callback(move |_| Msg::Remove(idx)) /> </div> { self.view_entry_edit_input((idx, &entry)) } </li> } } fn view_entry_edit_input(&self, (idx, entry): (usize, &Entry)) -> Html { if entry.editing { html! { <input class="edit" type="text" value=&entry.description oninput=self.link.callback(|e: InputData| Msg::UpdateEdit(e.value)) onblur=self.link.callback(move |_| Msg::Edit(idx)) onkeypress=self.link.callback(move |e: KeyboardEvent| { if e.key() == "Enter" { Msg::Edit(idx) } else { Msg::Nope } }) /> } } else { html! { <input type="hidden" /> } } } } #[derive(EnumIter, ToString, Clone, PartialEq, Serialize, Deserialize)] pub enum Filter { All, Active, Completed, } impl<'a> Into<Href> for &'a Filter { fn into(self) -> Href { match *self { Filter::All => "#/".into(), Filter::Active => "#/active".into(), Filter::Completed => "#/completed".into(), } } } impl Filter { fn
(&self, entry: &Entry) -> bool { match *self { Filter::All => true, Filter::Active => !entry.completed, Filter::Completed => entry.completed, } } } impl State { fn total(&self) -> usize { self.entries.len() } fn total_completed(&self) -> usize { self.entries .iter() .filter(|e| Filter::Completed.fit(e)) .count() } fn is_all_completed(&self) -> bool { let mut filtered_iter = self .entries .iter() .filter(|e| self.filter.fit(e)) .peekable(); if filtered_iter.peek().is_none() { return false; } filtered_iter.all(|e| e.completed) } fn toggle_all(&mut self, value: bool) { for entry in self.entries.iter_mut() { if self.filter.fit(entry) { entry.completed = value; } } } fn clear_completed(&mut self) { let entries = self .entries .drain(..) .filter(|e| Filter::Active.fit(e)) .collect(); self.entries = entries; } fn toggle(&mut self, idx: usize) { let filter = self.filter.clone(); let mut entries = self .entries .iter_mut() .filter(|e| filter.fit(e)) .collect::<Vec<_>>(); let entry = entries.get_mut(idx).unwrap(); entry.completed = !entry.completed; } fn toggle_edit(&mut self, idx: usize) { let filter = self.filter.clone(); let mut entries = self .entries .iter_mut() .filter(|e| filter.fit(e)) .collect::<Vec<_>>(); let entry = entries.get_mut(idx).unwrap(); entry.editing = !entry.editing; } fn complete_edit(&mut self, idx: usize, val: String) { let filter = self.filter.clone(); let mut entries = self .entries .iter_mut() .filter(|e| filter.fit(e)) .collect::<Vec<_>>(); let entry = entries.get_mut(idx).unwrap(); entry.description = val; entry.editing = !entry.editing; } fn remove(&mut self, idx: usize) { let idx = { let filter = self.filter.clone(); let entries = self .entries .iter() .enumerate() .filter(|&(_, e)| filter.fit(e)) .collect::<Vec<_>>(); let &(idx, _) = entries.get(idx).unwrap(); idx }; self.entries.remove(idx); } }
fit
wrapper.py
from OpenNE.src.libnrl import graph from OpenNE.src.libnrl import grarep from OpenNE.src.libnrl import line from OpenNE.src.libnrl import node2vec from OpenNE.src.libnrl.gcn import gcnAPI from itertools import product import networkx as nx import numpy as np import tensorflow as tf def nx_to_openne_graph(nxgraph, stringify_nodes=True): dg = nx.to_directed(nxgraph).copy() if stringify_nodes: nx.relabel_nodes(dg, {n:str(n) for n in dg.nodes}, copy=False) nx.set_edge_attributes(dg, 1.0, 'weight') g = graph.Graph() g.G = dg g.encode_node() return g class OpenNEEmbeddingBase: def __init__(self, thisgraph, parameters): self.graph = nx_to_openne_graph(thisgraph) self.embeddings = None self.parameters = parameters def run(self): raise NotImplementedError('') def update_parameters(self, new_parameters): self.parameters = new_parameters self.embeddings = None def get_embeddings(self): if not self.embeddings: self.run() return self.embeddings def get_vectors(self): return self.get_embeddings().vectors @staticmethod def valid_parameter_combinations(parameterSpace): """ returns all possible combinations, if some are not valid / useful, this method needs to be overwritten """ all_combinations = product(*parameterSpace.values()) return [{k:v for k,v in zip(parameterSpace.keys(), combn)} for combn in all_combinations] class Node2VecEmbedding(OpenNEEmbeddingBase): """ {'dim': 2, 'num_paths': 80, 'p': 1, 'path_length': 10, 'q': 1} """ def run(self): self.embeddings = node2vec.Node2vec(self.graph, retrainable=True, **self.parameters) def
(self, new_graph, num_paths=80, epochs=5): g = nx_to_openne_graph(new_graph) self.embeddings.retrain(g, num_paths=num_paths, epochs=epochs) class GraRepEmbedding(OpenNEEmbeddingBase): def run(self): self.embeddings = grarep.GraRep(self.graph, **self.parameters) @staticmethod def valid_parameter_combinations(parameterSpace): """ returns all possible combinations, if some are not valid / useful, this method needs to be overwritten """ all_combinations = product(*parameterSpace.values()) all_combinations = [{k:v for k,v in zip(parameterSpace.keys(), combn)} for combn in all_combinations] return [x for x in all_combinations if x["dim"] % x["Kstep"] == 0] class LINEEmbedding(OpenNEEmbeddingBase): def run(self): tf.reset_default_graph() self.embeddings = line.LINE(self.graph, **self.parameters) from scipy.sparse.linalg.eigen.arpack import eigsh as largest_eigsh class SpectralClusteringEmbedding(OpenNEEmbeddingBase): def __init__(self, thisgraph, parameters): self.graph = thisgraph self.embeddings = None self.parameters = parameters nx.relabel_nodes(self.graph, {n:str(n) for n in self.graph.nodes}, copy=False) def run(self): L = nx.normalized_laplacian_matrix(self.graph) evalues, evectors = a,b = largest_eigsh(L, k=self.parameters['dim']) self.embeddings = {str(n):v for n,v in zip(self.graph.nodes, evectors)} def get_vectors(self): return self.get_embeddings() def _RandNE(graph, dim, q, beta): d = dim A = nx.to_scipy_sparse_matrix(graph) R = np.random.normal(loc=0, scale=1/d, size=(A.shape[0], d)) U0, _ = np.linalg.qr(R) Ulist = [U0] for i in range(q): Ulist.append(A.dot(Ulist[-1])) Ulist = np.array(Ulist) betas = (beta**np.arange(0, q+1)) U = np.array([scalar*m for scalar,m in zip(betas, Ulist)]).sum(axis=0) return U class RandNEEmbedding(OpenNEEmbeddingBase): def __init__(self, thisgraph, parameters): self.graph = thisgraph self.embeddings = None self.parameters = parameters def run(self): U = _RandNE(self.graph, **self.parameters) self.embeddings = {str(n):v for n,v in zip(self.graph.nodes, U)} def get_vectors(self): return self.get_embeddings()
retrain
makeQueryString.ts
const query = Object.keys(params) .filter((k) => !!params[k]) .map((k) => `${encodeURIComponent(k)}=${encodeURIComponent(params[k])}`) .join('&') return query } export { makeQueryString }
function makeQueryString(params) {
message_test.go
// Copyright © 2021 Kaleido, Inc. // // SPDX-License-Identifier: Apache-2.0 // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package broadcast import ( "bytes" "context" "fmt" "io" "io/ioutil" "testing" "github.com/hyperledger-labs/firefly/internal/syncasync" "github.com/hyperledger-labs/firefly/mocks/blockchainmocks" "github.com/hyperledger-labs/firefly/mocks/databasemocks" "github.com/hyperledger-labs/firefly/mocks/dataexchangemocks" "github.com/hyperledger-labs/firefly/mocks/datamocks" "github.com/hyperledger-labs/firefly/mocks/publicstoragemocks" "github.com/hyperledger-labs/firefly/mocks/syncasyncmocks" "github.com/hyperledger-labs/firefly/pkg/fftypes" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" ) func TestBroadcastMessageOk(t *testing.T) { bm, cancel := newTestBroadcast(t) defer cancel() mdi := bm.database.(*databasemocks.Plugin) mdm := bm.data.(*datamocks.Manager) mbi := bm.blockchain.(*blockchainmocks.Plugin) ctx := context.Background() rag := mdi.On("RunAsGroup", ctx, mock.Anything) rag.RunFn = func(a mock.Arguments) { var fn = a[1].(func(context.Context) error) rag.ReturnArguments = mock.Arguments{fn(a[0].(context.Context))} } mbi.On("VerifyIdentitySyntax", ctx, "0x12345").Return("0x12345", nil) mdm.On("ResolveInlineDataBroadcast", ctx, "ns1", mock.Anything).Return(fftypes.DataRefs{ {ID: fftypes.NewUUID(), Hash: fftypes.NewRandB32()}, }, []*fftypes.DataAndBlob{}, nil) mdi.On("InsertMessageLocal", ctx, mock.Anything).Return(nil) msg, err := bm.BroadcastMessage(ctx, "ns1", &fftypes.MessageInOut{ Message: fftypes.Message{ Header: fftypes.MessageHeader{ Author: "0x12345", }, }, InlineData: fftypes.InlineData{ {Value: fftypes.Byteable(`{"hello": "world"}`)}, }, }, false) assert.NoError(t, err) assert.NotNil(t, msg.Data[0].ID) assert.NotNil(t, msg.Data[0].Hash) assert.Equal(t, "ns1", msg.Header.Namespace) mdi.AssertExpectations(t) mdm.AssertExpectations(t) } func TestBroadcastMessageWaitConfirmOk(t *testing.T) { bm, cancel := newTestBroadcast(t) defer cancel() mdi := bm.database.(*databasemocks.Plugin) mdm := bm.data.(*datamocks.Manager) mbi := bm.blockchain.(*blockchainmocks.Plugin) msa := bm.syncasync.(*syncasyncmocks.Bridge) ctx := context.Background() rag := mdi.On("RunAsGroup", ctx, mock.Anything) rag.RunFn = func(a mock.Arguments) { var fn = a[1].(func(context.Context) error) rag.ReturnArguments = mock.Arguments{fn(a[0].(context.Context))} } mbi.On("VerifyIdentitySyntax", ctx, "0x12345").Return("0x12345", nil) mdm.On("ResolveInlineDataBroadcast", ctx, "ns1", mock.Anything).Return(fftypes.DataRefs{ {ID: fftypes.NewUUID(), Hash: fftypes.NewRandB32()}, }, []*fftypes.DataAndBlob{}, nil) requestID := fftypes.NewUUID() replyMsg := &fftypes.Message{ Header: fftypes.MessageHeader{ Namespace: "ns1", ID: fftypes.NewUUID(), }, } msa.On("SendConfirm", ctx, "ns1", mock.Anything). Run(func(args mock.Arguments) { send := args[2].(syncasync.RequestSender) send(requestID) }). Return(replyMsg, nil) mdi.On("InsertMessageLocal", ctx, mock.MatchedBy(func(msg *fftypes.Message) bool { return msg.Header.ID == requestID })).Return(nil) msg, err := bm.BroadcastMessage(ctx, "ns1", &fftypes.MessageInOut{ Message: fftypes.Message{ Header: fftypes.MessageHeader{ Author: "0x12345", }, }, InlineData: fftypes.InlineData{ {Value: fftypes.Byteable(`{"hello": "world"}`)}, }, }, true) assert.NoError(t, err) assert.Equal(t, replyMsg, msg) assert.Equal(t, "ns1", msg.Header.Namespace) mdi.AssertExpectations(t) mdm.AssertExpectations(t) } func TestBroadcastMessageWithBlobsOk(t *testing.T) { bm, cancel := newTestBroadcast(t) defer cancel() mdi := bm.database.(*databasemocks.Plugin) mdm := bm.data.(*datamocks.Manager) mbi := bm.blockchain.(*blockchainmocks.Plugin) mdx := bm.exchange.(*dataexchangemocks.Plugin) mps := bm.publicstorage.(*publicstoragemocks.Plugin) blobHash := fftypes.NewRandB32() dataID := fftypes.NewUUID() ctx := context.Background() rag := mdi.On("RunAsGroup", ctx, mock.Anything) rag.RunFn = func(a mock.Arguments) { var fn = a[1].(func(context.Context) error) rag.ReturnArguments = mock.Arguments{fn(a[0].(context.Context))} } mbi.On("VerifyIdentitySyntax", ctx, "0x12345").Return("0x12345", nil) mdm.On("ResolveInlineDataBroadcast", ctx, "ns1", mock.Anything).Return(fftypes.DataRefs{ {ID: dataID, Hash: fftypes.NewRandB32()}, }, []*fftypes.DataAndBlob{ { Data: &fftypes.Data{ ID: dataID, Blob: &fftypes.BlobRef{ Hash: blobHash, }, }, Blob: &fftypes.Blob{ Hash: blobHash, PayloadRef: "blob/1", }, }, }, nil) mdx.On("DownloadBLOB", ctx, "blob/1").Return(ioutil.NopCloser(bytes.NewReader([]byte(`some data`))), nil) mps.On("PublishData", ctx, mock.MatchedBy(func(reader io.ReadCloser) bool { b, err := ioutil.ReadAll(reader) assert.NoError(t, err) assert.Equal(t, "some data", string(b)) return true })).Return("payload-ref", nil) mdi.On("UpdateData", ctx, mock.Anything, mock.Anything).Return(nil) mdi.On("InsertMessageLocal", ctx, mock.Anything).Return(nil) msg, err := bm.BroadcastMessage(ctx, "ns1", &fftypes.MessageInOut{ Message: fftypes.Message{ Header: fftypes.MessageHeader{ Author: "0x12345", }, }, InlineData: fftypes.InlineData{ {Blob: &fftypes.BlobRef{ Hash: blobHash, }}, }, }, false) assert.NoError(t, err) assert.NotNil(t, msg.Data[0].ID) assert.NotNil(t, msg.Data[0].Hash) assert.Equal(t, "ns1", msg.Header.Namespace) mdi.AssertExpectations(t) mdm.AssertExpectations(t) } func TestBroadcastMessageBadInput(t *testing.T) { bm, cancel := newTestBroadcast(t) defer cancel() mdi := bm.database.(*databasemocks.Plugin) mdm := bm.data.(*datamocks.Manager) mbi := bm.blockchain.(*blockchainmocks.Plugin)
rag := mdi.On("RunAsGroup", ctx, mock.Anything) rag.RunFn = func(a mock.Arguments) { var fn = a[1].(func(context.Context) error) rag.ReturnArguments = mock.Arguments{fn(a[0].(context.Context))} } mdm.On("ResolveInlineDataBroadcast", ctx, "ns1", mock.Anything).Return(nil, nil, fmt.Errorf("pop")) _, err := bm.BroadcastMessage(ctx, "ns1", &fftypes.MessageInOut{ InlineData: fftypes.InlineData{ {Value: fftypes.Byteable(`{"hello": "world"}`)}, }, }, false) assert.EqualError(t, err, "pop") mdi.AssertExpectations(t) mdm.AssertExpectations(t) } func TestPublishBlobsSendMessageFail(t *testing.T) { bm, cancel := newTestBroadcast(t) defer cancel() mdi := bm.database.(*databasemocks.Plugin) mdx := bm.exchange.(*dataexchangemocks.Plugin) mps := bm.publicstorage.(*publicstoragemocks.Plugin) blobHash := fftypes.NewRandB32() dataID := fftypes.NewUUID() ctx := context.Background() mdx.On("DownloadBLOB", ctx, "blob/1").Return(ioutil.NopCloser(bytes.NewReader([]byte(`some data`))), nil) mps.On("PublishData", ctx, mock.MatchedBy(func(reader io.ReadCloser) bool { b, err := ioutil.ReadAll(reader) assert.NoError(t, err) assert.Equal(t, "some data", string(b)) return true })).Return("payload-ref", nil) mdi.On("UpdateData", ctx, mock.Anything, mock.Anything).Return(nil) mdi.On("InsertMessageLocal", ctx, mock.Anything).Return(fmt.Errorf("pop")) _, err := bm.publishBlobsAndSend(ctx, &fftypes.Message{}, []*fftypes.DataAndBlob{ { Data: &fftypes.Data{ ID: dataID, Blob: &fftypes.BlobRef{ Hash: blobHash, }, }, Blob: &fftypes.Blob{ Hash: blobHash, PayloadRef: "blob/1", }, }, }, false) assert.EqualError(t, err, "pop") mdi.AssertExpectations(t) } func TestPublishBlobsUpdateDataFail(t *testing.T) { bm, cancel := newTestBroadcast(t) defer cancel() mdi := bm.database.(*databasemocks.Plugin) mdx := bm.exchange.(*dataexchangemocks.Plugin) mps := bm.publicstorage.(*publicstoragemocks.Plugin) blobHash := fftypes.NewRandB32() dataID := fftypes.NewUUID() ctx := context.Background() mdx.On("DownloadBLOB", ctx, "blob/1").Return(ioutil.NopCloser(bytes.NewReader([]byte(`some data`))), nil) mps.On("PublishData", ctx, mock.MatchedBy(func(reader io.ReadCloser) bool { b, err := ioutil.ReadAll(reader) assert.NoError(t, err) assert.Equal(t, "some data", string(b)) return true })).Return("payload-ref", nil) mdi.On("UpdateData", ctx, mock.Anything, mock.Anything).Return(fmt.Errorf("pop")) _, err := bm.publishBlobsAndSend(ctx, &fftypes.Message{}, []*fftypes.DataAndBlob{ { Data: &fftypes.Data{ ID: dataID, Blob: &fftypes.BlobRef{ Hash: blobHash, }, }, Blob: &fftypes.Blob{ Hash: blobHash, PayloadRef: "blob/1", }, }, }, false) assert.EqualError(t, err, "pop") mdi.AssertExpectations(t) } func TestPublishBlobsPublishFail(t *testing.T) { bm, cancel := newTestBroadcast(t) defer cancel() mdi := bm.database.(*databasemocks.Plugin) mdx := bm.exchange.(*dataexchangemocks.Plugin) mps := bm.publicstorage.(*publicstoragemocks.Plugin) blobHash := fftypes.NewRandB32() dataID := fftypes.NewUUID() ctx := context.Background() mdx.On("DownloadBLOB", ctx, "blob/1").Return(ioutil.NopCloser(bytes.NewReader([]byte(`some data`))), nil) mps.On("PublishData", ctx, mock.MatchedBy(func(reader io.ReadCloser) bool { b, err := ioutil.ReadAll(reader) assert.NoError(t, err) assert.Equal(t, "some data", string(b)) return true })).Return("", fmt.Errorf("pop")) _, err := bm.publishBlobsAndSend(ctx, &fftypes.Message{}, []*fftypes.DataAndBlob{ { Data: &fftypes.Data{ ID: dataID, Blob: &fftypes.BlobRef{ Hash: blobHash, }, }, Blob: &fftypes.Blob{ Hash: blobHash, PayloadRef: "blob/1", }, }, }, false) assert.EqualError(t, err, "pop") mdi.AssertExpectations(t) } func TestPublishBlobsDownloadFail(t *testing.T) { bm, cancel := newTestBroadcast(t) defer cancel() mdi := bm.database.(*databasemocks.Plugin) mdx := bm.exchange.(*dataexchangemocks.Plugin) blobHash := fftypes.NewRandB32() dataID := fftypes.NewUUID() ctx := context.Background() mdx.On("DownloadBLOB", ctx, "blob/1").Return(nil, fmt.Errorf("pop")) _, err := bm.publishBlobsAndSend(ctx, &fftypes.Message{}, []*fftypes.DataAndBlob{ { Data: &fftypes.Data{ ID: dataID, Blob: &fftypes.BlobRef{ Hash: blobHash, }, }, Blob: &fftypes.Blob{ Hash: blobHash, PayloadRef: "blob/1", }, }, }, false) assert.Regexp(t, "FF10240", err) mdi.AssertExpectations(t) }
ctx := context.Background() mbi.On("VerifyIdentitySyntax", ctx, mock.Anything).Return("0x12345", nil)
__init__.py
import subprocess class NotificationError(Exception): pass class BaseNotification: def set_typed_variable(self, value, specified_type): if isinstance(value, specified_type): return value else: raise NotificationError( 'can only set ' f'{specified_type.__name__} ' f'(not "{value.__class__.__name__}")' ) # Main def notify(self): raise NotImplementedError() class OSSpecificNotification(BaseNotification): ''' OSSpecificNotification: OS ごとの通知 ''' def __init__(self): import platform self.system = platform.system() # macOS 用の通知 def darwin_notify(self): raise NotImplementedError() # Linux 用の通知 def linux_notify(self): raise NotImplementedError() # Windows 用の通知 def windows_notify(self): raise NotImplementedError() # 通知の実行 def notify(self): if self.system == 'Darwin': self.darwin_notify() elif self.system == 'Linux': self.linux_notify() elif self.system == 'Windows': self.windows_notify() else: NotificationError(f'{self.system} is supported system') class MessageNotification(BaseNotification): ''' MessageNotification: メッセージを打ち込める通知 引数: message(str): 本文 ''' def __init__(self, message): self._message = None self.set_message(message) # message のプロパティ用 def get_message(self): return self._message def set_message(self, message): self._message = self.set_typed_variable(message, str) message = property(get_message, set_message) class WebhookNotification(MessageNotification): ''' WebhookNotification: Webhook による通知 引数: message(str): 本文 url(str): Webhook の URL ''' def __init__(self, message, url): super().__init__(message) self._url = None self.set_url(url) # url のプロパティ用 def get_url(self): return self._url def set_url(self, url): self._url = self.set_typed_variable(url, str) url = property(get_url, set_url) class TokenN
tion(MessageNotification): ''' TokenNotification: Token による通知 引数: message(str): 本文 token(str): トークン ''' def __init__(self, message, token): super().__init__(message) self._token = None self.set_token(token) # token のプロパティ用 def get_token(self): return self._token def set_token(self, token): self._token = self.set_typed_variable(token, str) token = property(get_token, set_token) class BeepNotification(OSSpecificNotification): ''' BeepNotification: ビープ音による通知 引数: times(int): ビープ音の回数 ''' def __init__(self, times): super().__init__() self._times = None self.set_times(times) # times のプロパティ用 def get_times(self): return self._times def set_times(self, times): self._times = self.set_typed_variable(times, int) times = property(get_times, set_times) # 通知の実行 def darwin_notify(self): cmd = ['osascript', '-e', f'beep {self._times}'] subprocess.run(cmd) def linux_notify(self): import time for _ in range(self._times): cmd = ['xkbbell'] time.sleep(0.5) subprocess.run(cmd) class CenterNotification(MessageNotification): ''' CenterNotification: 通知センターによる通知 引数: message(str): 本文 title(str): タイトル subtitle(str): サブタイトル sound(bool): 音の有無 ''' def __init__(self, message, title=None, subtitle=None, sound=True): super().__init__(message) self._title = None self._subtitle = None self._sound = None if title: self.set_title(title) if subtitle: self.set_subtitle(subtitle) if sound: self.set_sound(sound) # title のプロパティ用 def get_title(self): return self._title def set_title(self, title): self._title = self.set_typed_variable(title, str) # タイトルとサブタイトルの両方がないといけないため、 # 片方だけ設定された場合、もう一方を空白にする if not self._subtitle: self._subtitle = ' ' title = property(get_title, set_title) # subtitle のプロパティ用 def get_subtitle(self): return self._subtitle def set_subtitle(self, subtitle): self._subtitle = self.set_typed_variable(subtitle, str) # タイトルとサブタイトルの両方がないといけないため、 # 片方だけ設定された場合、もう一方を空白にする if not self._title: self._title = ' ' subtitle = property(get_subtitle, set_subtitle) # sound のプロパティ用 def get_sound(self): return self._sound def set_sound(self, sound): self._sound = self.set_typed_variable(sound, bool) sound = property(get_sound, set_sound) # 通知の実行 def notify(self): _message = f'display notification \"{self._message}\"' _title = \ f'with title \"{self._title}\" subtitle \"{self._subtitle}\"' \ if self._title and self._subtitle else '' _sound = 'sound name \"\"' if self._sound else '' cmd = ['osascript', '-e', f'{_message} {_title} {_sound}'] subprocess.run(cmd) class SlackNotification(WebhookNotification): ''' SlackNotification: Slack による通知 引数(WebhookNotification): message(str): 本文 url(str): Incoming Webhook の URL ''' # 通知の実行 def notify(self): import json import requests data = {'text': self._message} requests.post(self._url, data=json.dumps(data)) class DiscordNotification(WebhookNotification): ''' DiscordNotification: Discord による通知 引数(WebhookNotification): message(str): 本文 url(str): Discord の Webhook の URL ''' # 通知の実行 def notify(self): import json import requests data = {'content': self._message} requests.post( self._url, headers={'Content-Type': 'application/json'}, data=json.dumps(data) ) class LineNotification(TokenNotification): ''' LineNotification: Line による通知 引数: message(str): 本文 token(str): LINE Notify のトークン ''' def __init__(self, message, token): super().__init__(message, token) self.URL = 'https://notify-api.line.me/api/notify' # 通知の実行 def notify(self): import requests headers = {'Authorization': f'Bearer {self._token}'} params = {'message': self._message} requests.post( self.URL, headers=headers, params=params )
otifica
iter.rs
use super::{ page::{self, ScheduledIo}, Shard, }; use std::slice; pub(in crate::net::driver::reactor) struct UniqueIter<'a> { pub(super) shards: slice::IterMut<'a, Shard>, pub(super) pages: slice::Iter<'a, page::Shared>, pub(super) slots: Option<page::Iter<'a>>, } impl<'a> Iterator for UniqueIter<'a> { type Item = &'a ScheduledIo; fn next(&mut self) -> Option<Self::Item> { loop { if let Some(item) = self.slots.as_mut().and_then(|slots| slots.next()) { return Some(item); } if let Some(page) = self.pages.next() { self.slots = page.iter(); } if let Some(shard) = self.shards.next() { self.pages = shard.iter(); } else { return None; } } } } pub(in crate::net::driver::reactor) struct ShardIter<'a> { pub(super) pages: slice::IterMut<'a, page::Shared>, pub(super) slots: Option<page::Iter<'a>>, } impl<'a> Iterator for ShardIter<'a> { type Item = &'a ScheduledIo; fn next(&mut self) -> Option<Self::Item>
}
{ loop { if let Some(item) = self.slots.as_mut().and_then(|slots| slots.next()) { return Some(item); } if let Some(page) = self.pages.next() { self.slots = page.iter(); } else { return None; } } }
patchsurveyquestion.go
package platformclientv2 import ( "github.com/leekchan/timeutil" "encoding/json" "strconv" "strings" ) // Patchsurveyquestion type Patchsurveyquestion struct { // VarType - Type of survey question. VarType *string `json:"type,omitempty"` // Label - Label of question. Label *string `json:"label,omitempty"` // CustomerProperty - The customer property that the answer maps to. CustomerProperty *string `json:"customerProperty,omitempty"` // Choices - Choices available to user. Choices *[]string `json:"choices,omitempty"` // IsMandatory - Whether answering this question is mandatory. IsMandatory *bool `json:"isMandatory,omitempty"` } func (o *Patchsurveyquestion) MarshalJSON() ([]byte, error) { // Redundant initialization to avoid unused import errors for models with no Time values _ = timeutil.Timedelta{} type Alias Patchsurveyquestion return json.Marshal(&struct { VarType *string `json:"type,omitempty"` Label *string `json:"label,omitempty"` CustomerProperty *string `json:"customerProperty,omitempty"` Choices *[]string `json:"choices,omitempty"` IsMandatory *bool `json:"isMandatory,omitempty"` *Alias }{ VarType: o.VarType, Label: o.Label, CustomerProperty: o.CustomerProperty, Choices: o.Choices, IsMandatory: o.IsMandatory, Alias: (*Alias)(o), }) } func (o *Patchsurveyquestion) UnmarshalJSON(b []byte) error { var PatchsurveyquestionMap map[string]interface{} err := json.Unmarshal(b, &PatchsurveyquestionMap) if err != nil { return err } if VarType, ok := PatchsurveyquestionMap["type"].(string); ok { o.VarType = &VarType } if Label, ok := PatchsurveyquestionMap["label"].(string); ok { o.Label = &Label } if CustomerProperty, ok := PatchsurveyquestionMap["customerProperty"].(string); ok { o.CustomerProperty = &CustomerProperty } if Choices, ok := PatchsurveyquestionMap["choices"].([]interface{}); ok
if IsMandatory, ok := PatchsurveyquestionMap["isMandatory"].(bool); ok { o.IsMandatory = &IsMandatory } return nil } // String returns a JSON representation of the model func (o *Patchsurveyquestion) String() string { j, _ := json.Marshal(o) str, _ := strconv.Unquote(strings.Replace(strconv.Quote(string(j)), `\\u`, `\u`, -1)) return str }
{ ChoicesString, _ := json.Marshal(Choices) json.Unmarshal(ChoicesString, &o.Choices) }
sweetalert.js
$(function () { $('.js-sweetalert button').on('click', function () { var type = $(this).data('type'); if (type === 'basic') { showBasicMessage(); } else if (type === 'with-title') { showWithTitleMessage(); } else if (type === 'success') { showSuccessMessage(); } else if (type === 'confirm') { showConfirmMessage(); } else if (type === 'html-message') { showHtmlMessage(); } else if (type === 'autoclose-timer') { showAutoCloseTimerMessage(); } else if (type === 'we-set-buttons') { showWeSet3Buttons(); } else if (type === 'AJAX-requests') { showAJAXrequests(); } else if (type === 'DOM-content') { showDOMContent(); } }); }); //These codes takes from http://t4t5.github.io/sweetalert/ function showBasicMessage() { swal("Hello world!"); } function showWithTitleMessage() { swal("Here's a message!", "It's pretty, isn't it?"); } function showSuccessMessage() { swal("Good job!", "You clicked the button!", "success"); } function showConfirmMessage() { swal({ title: "Are you sure?", text: "Once deleted, you will not be able to recover this imaginary file!", icon: "warning", buttons: true, dangerMode: true, }) .then((willDelete) => { if (willDelete) { swal("Poof! Your imaginary file has been deleted!", { icon: "success", }); } else { swal("Your imaginary file is safe!"); } }); } function showHtmlMessage() { swal({ title: "HTML <small>Title</small>!", text: "A custom <span style=\"color: #CC0000\">html<span> message.", html: true }); } function showAutoCloseTimerMessage() { swal({ title: "Auto close alert!", text: "I will close in 2 seconds.", timer: 2000, showConfirmButton: false }); } function
() { swal("A wild Pikachu appeared! What do you want to do?", { buttons: { cancel: "Run away!", catch: { text: "Throw Pokéball!", value: "catch", }, defeat: true, }, }) .then((value) => { switch (value) { case "defeat": swal("Pikachu fainted! You gained 500 XP!"); break; case "catch": swal("Gotcha!", "Pikachu was caught!", "success"); break; default: swal("Got away safely!"); } }); } function showAJAXrequests() { swal({ text: 'Search for a movie. e.g. "La La Land".', content: "input", button: { text: "Search!", closeModal: false, }, }) .then(name => { if (!name) throw null; return fetch(`https://itunes.apple.com/search?term=${name}&entity=movie`); }) .then(results => { return results.json(); }) .then(json => { const movie = json.results[0]; if (!movie) { return swal("No movie was found!"); } const name = movie.trackName; const imageURL = movie.artworkUrl100; swal({ title: "Top result:", text: name, icon: imageURL, }); }) .catch(err => { if (err) { swal("Oh noes!", "The AJAX request failed!", "error"); } else { swal.stopLoading(); swal.close(); } }); } function showDOMContent() { swal("Write something here:", { content: "input", }) .then((value) => { swal(`You typed: ${value}`); }); }
showWeSet3Buttons
read.py
def
(fn): verts=[] faces=[] with open(fn,"r") as f: t=f.readlines() for line in t: line=line.replace("\n","") line=line.split(" ") while "" in line: line.remove("") print([line]) if line[0]=="v": vert=[] for x in line[1:]: vert.append(float(x)) verts.append(vert) if line[0]=="f": face=[] for x in line[1:]: face.append(int(x)) faces.append(face) return verts, faces if __name__=="__main__": r=main("testcube.obj") print(r)
read
notification_test.go
/* Copyright 2016 The Kubernetes Authors All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package comment import ( "reflect" "testing" "github.com/google/go-github/github" ) func TestParseNotification(t *testing.T) { tests := []struct { notif *Notification comment string }{ { notif: nil, comment: "I have nothing to do with a notification", }, { notif: nil, comment: " [NOTIF] Line can't start with space", }, { notif: nil, comment: "[NOTIF SOMETHING] Notif name can't have space", }, { notif: &Notification{Name: "NOTIF"}, comment: "[NOTIF]", }, { notif: nil,
notif: &Notification{Name: "NOTIF", Arguments: "Valid notification"}, comment: "[NOTIF] Valid notification", }, { notif: &Notification{Name: "NOTIF", Arguments: "Multiple Lines"}, comment: "[NOTIF] Multiple Lines \nAnd something else...", }, { notif: &Notification{Name: "NOTIF", Arguments: "Notif name is upper-cased"}, comment: "[notif] Notif name is upper-cased", }, { notif: &Notification{Name: "NOTIF", Arguments: "Arguments is trimmed"}, comment: "[notif] Arguments is trimmed ", }, } for _, test := range tests { actualNotif := ParseNotification(&github.IssueComment{Body: &test.comment}) if !reflect.DeepEqual(actualNotif, test.notif) { t.Error(actualNotif, "doesn't match expected notif:", test.notif) } } } func TestStringNotification(t *testing.T) { tests := []struct { notif *Notification str string }{ { notif: &Notification{Name: "NOTIF"}, str: "[NOTIF]", }, { notif: &Notification{Name: "NOTIF", Arguments: "Argument"}, str: "[NOTIF] Argument", }, { notif: &Notification{Name: "NOTIF", Arguments: "Argument", Context: "Context"}, str: "[NOTIF] Argument\n\nContext", }, { notif: &Notification{Name: "notif", Arguments: " Argument ", Context: "Context"}, str: "[NOTIF] Argument\n\nContext", }, } for _, test := range tests { actualString := test.notif.String() if actualString != test.str { t.Error(actualString, "doesn't match expected string:", test.str) } } }
comment: "Notif must be at the very beginning:\n[NOTIF]\nAnd something else...", }, {
compare_plots.py
import argparse import os import matplotlib.pyplot as plt import numpy as np import seaborn as sns from matplotlib.ticker import FuncFormatter from replay.aggregate_plots import lightcolors, darkcolors, Y_LIM_SHAPED_REWARD, Y_LIM_SPARSE_REWARD, millions from srl_zoo.utils import printGreen, printRed # Init seaborn sns.set() # Style for the title fontstyle = {'fontname': 'DejaVu Sans', 'fontsize': 16} def comparePlots(path, plots, y_limits, title="Learning Curve", timesteps=False, truncate_x=-1, no_display=False): """ :param path: (str) path to the folder where the plots are stored :param plots: ([str]) List of saved plots as npz file :param y_limits: ([float]) y-limits for the plot :param title: (str) plot title :param timesteps: (bool) Plot timesteps instead of episodes :param truncate_x: (int) Truncate the experiments after n ticks on the x-axis :param no_display: (bool) Set to true, the plot won't be displayed (useful when only saving plot) """ y_list = [] x_list = [] for plot in plots: saved_plot = np.load('{}/{}'.format(path, plot)) x_list.append(saved_plot['x']) y_list.append(saved_plot['y']) lengths = list(map(len, x_list)) min_x, max_x = np.min(lengths), np.max(lengths) print("Min x: {}".format(min_x)) print("Max x: {}".format(max_x)) if truncate_x > 0: min_x = min(truncate_x, min_x) print("Truncating the x-axis at {}".format(min_x)) x = np.array(x_list[0][:min_x]) printGreen("{} Experiments".format(len(y_list))) # print("Min, Max rewards:", np.min(y), np.max(y)) fig = plt.figure(title) for i in range(len(y_list)): label = plots[i].split('.npz')[0] y = y_list[i][:, :min_x] print('{}: {} experiments'.format(label, len(y))) # Compute mean for different seeds m = np.mean(y, axis=0) # Compute standard error s = np.squeeze(np.asarray(np.std(y, axis=0))) n = y.shape[0] plt.fill_between(x, m - s / np.sqrt(n), m + s / np.sqrt(n), color=lightcolors[i % len(lightcolors)], alpha=0.5) plt.plot(x, m, color=darkcolors[i % len(darkcolors)], label=label, linewidth=2) if timesteps: formatter = FuncFormatter(millions) plt.xlabel('Number of Timesteps') fig.axes[0].xaxis.set_major_formatter(formatter) else: plt.xlabel('Number of Episodes') plt.ylabel('Rewards') plt.title(title, **fontstyle) plt.ylim(y_limits) plt.legend(framealpha=0.8, frameon=True, labelspacing=0.01, loc='lower right', fontsize=16) if not no_display: plt.show() if __name__ == '__main__': parser = argparse.ArgumentParser(description="Plot trained agent") parser.add_argument('-i', '--input-dir', help='folder with the plots as npz files', type=str, required=True) parser.add_argument('-t', '--title', help='Plot title', type=str, default='Learning Curve') parser.add_argument('--episode_window', type=int, default=40, help='Episode window for moving average plot (default: 40)') parser.add_argument('--shape-reward', action='store_true', default=False, help='Change the y_limit to correspond shaped reward bounds') parser.add_argument('--y-lim', nargs=2, type=float, default=[-1, -1], help="limits for the y axis") parser.add_argument('--truncate-x', type=int, default=-1, help="Truncate the experiments after n ticks on the x-axis (default: -1, no truncation)") parser.add_argument('--timesteps', action='store_true', default=False, help='Plot timesteps instead of episodes') parser.add_argument('--no-display', action='store_true', default=False, help='Do not display plot') args = parser.parse_args() y_limits = args.y_lim if y_limits[0] == y_limits[1]: if args.shape_reward: y_limits = Y_LIM_SHAPED_REWARD else: y_limits = Y_LIM_SPARSE_REWARD print("Using default limits:", y_limits) plots = [f for f in os.listdir(args.input_dir) if f.endswith('.npz')] plots.sort() if len(plots) == 0:
comparePlots(args.input_dir, plots, title=args.title, y_limits=y_limits, no_display=args.no_display, timesteps=args.timesteps, truncate_x=args.truncate_x)
printRed("No npz files found in {}".format(args.input_dir)) exit(-1)
script_agent_augmented.py
import itertools import types import numpy as np import torch import click import gym import time import yaml from robos2r.model import build_model from .agent import Agent from .script_agent import ScriptAgent, make_noised from .utils import Rate from PIL import Image from pathlib import Path from einops import rearrange from torchvision import transforms as T @click.command(help="script_agent env_name [options]") @click.argument("env_name", type=str) @click.option("-s", "--seed", default=0, help="seed") @click.option("-t", "--times-repeat", default=1, help="times to repeat the script") @click.option("-n", "--add-noise", is_flag=True, help="adding noise to actions or not") @click.option( "-sc", "--skill-collection/--no-skill-collection", is_flag=True, help="whether to show the skills collection", ) def main(env_name, seed, times_repeat, add_noise, skill_collection): print("Loading Augmentor model...") diffaug_model_path = "/home/rgarciap/Remote/models/diffs2r_new/resnet_adam_lr_1e-3_lraug0.01_bs_64_L8/" diffaug_model_path = Path(diffaug_model_path) diffaug_cfg_path = diffaug_model_path / "config.yml" with open(str(diffaug_cfg_path), "rb") as f: diffaug_cfg = yaml.load(f, Loader=yaml.FullLoader) model_cfg = dict( name="diffaug", reg_output_size=3, aug_pipeline=diffaug_cfg["aug_pipeline"], multi=diffaug_cfg["multi_pipeline"], num_layers=diffaug_cfg["num_layers"], gumbel=diffaug_cfg["gumbel"], backbone_name=diffaug_cfg["backbone_name"], ) diffaug_model = build_model(model_cfg) diffaug_ckp_path = diffaug_model_path / "best_checkpoint.pth" checkpoint = torch.load(str(diffaug_ckp_path), map_location="cpu") diffaug_model.load_state_dict(checkpoint["model"]) augmentor = diffaug_model.augmentor augmentor.to("cpu") augmentor.eval() print("Model loaded") env = gym.make(env_name) scene = env.unwrapped.scene scene.renders(True) if skill_collection: scene.skill_data_collection = True env.seed(seed) for _ in range(times_repeat): obs = env.reset() agent = ScriptAgent(env) import matplotlib.pyplot as plt done = False i = 0 rate = Rate(scene.dt) action = agent.get_action() if add_noise: make_noised(action) frames = [] j = 0 while not done and action is not None: obs, reward, done, info = env.step(action) im = T.ToTensor()(obs["rgb0"]).unsqueeze(0) mask = torch.tensor(obs["mask0"]).unsqueeze(0) im, mask = augmentor((im, mask)) im = rearrange(im.detach().detach().squeeze(0).numpy(), "c h w -> h w c") im = Image.fromarray((im * 255).astype(np.uint8)) im.save(f"0/output{j}.jpeg") j += 1 action = agent.get_action() if add_noise and action is not None: make_noised(action) if action is None: info["failure_message"] = "End of Script." if not info["success"]: click.secho( "Failure Seed {}: {}".format(seed, info["failure_message"]), fg="red" ) print("Success", info["success"]) if __name__ == "__main__": main()
deals.pb.go
// Code generated by protoc-gen-go. DO NOT EDIT. // source: deals.proto package filecoin_deals_pb import ( context "context" fmt "fmt" proto "github.com/golang/protobuf/proto" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" status "google.golang.org/grpc/status" math "math" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package type DealConfig struct { Miner string `protobuf:"bytes,1,opt,name=miner,proto3" json:"miner,omitempty"` EpochPrice uint64 `protobuf:"varint,2,opt,name=epochPrice,proto3" json:"epochPrice,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *DealConfig) Reset() { *m = DealConfig{} } func (m *DealConfig) String() string { return proto.CompactTextString(m) } func (*DealConfig) ProtoMessage() {} func (*DealConfig) Descriptor() ([]byte, []int) { return fileDescriptor_71783c876a92172d, []int{0} } func (m *DealConfig) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_DealConfig.Unmarshal(m, b) } func (m *DealConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_DealConfig.Marshal(b, m, deterministic) } func (m *DealConfig) XXX_Merge(src proto.Message) { xxx_messageInfo_DealConfig.Merge(m, src) } func (m *DealConfig) XXX_Size() int { return xxx_messageInfo_DealConfig.Size(m) } func (m *DealConfig) XXX_DiscardUnknown() { xxx_messageInfo_DealConfig.DiscardUnknown(m) } var xxx_messageInfo_DealConfig proto.InternalMessageInfo func (m *DealConfig) GetMiner() string { if m != nil { return m.Miner } return "" } func (m *DealConfig) GetEpochPrice() uint64 { if m != nil { return m.EpochPrice } return 0 } type DealInfo struct { ProposalCid string `protobuf:"bytes,1,opt,name=proposalCid,proto3" json:"proposalCid,omitempty"` StateID uint64 `protobuf:"varint,2,opt,name=stateID,proto3" json:"stateID,omitempty"` StateName string `protobuf:"bytes,3,opt,name=stateName,proto3" json:"stateName,omitempty"` Miner string `protobuf:"bytes,4,opt,name=miner,proto3" json:"miner,omitempty"` PieceRef []byte `protobuf:"bytes,5,opt,name=pieceRef,proto3" json:"pieceRef,omitempty"` Size uint64 `protobuf:"varint,6,opt,name=size,proto3" json:"size,omitempty"` PricePerEpoch uint64 `protobuf:"varint,7,opt,name=pricePerEpoch,proto3" json:"pricePerEpoch,omitempty"` Duration uint64 `protobuf:"varint,8,opt,name=duration,proto3" json:"duration,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *DealInfo) Reset() { *m = DealInfo{} } func (m *DealInfo) String() string { return proto.CompactTextString(m) } func (*DealInfo) ProtoMessage() {} func (*DealInfo) Descriptor() ([]byte, []int) { return fileDescriptor_71783c876a92172d, []int{1} } func (m *DealInfo) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_DealInfo.Unmarshal(m, b) } func (m *DealInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_DealInfo.Marshal(b, m, deterministic) } func (m *DealInfo) XXX_Merge(src proto.Message) { xxx_messageInfo_DealInfo.Merge(m, src) } func (m *DealInfo) XXX_Size() int { return xxx_messageInfo_DealInfo.Size(m) } func (m *DealInfo) XXX_DiscardUnknown() { xxx_messageInfo_DealInfo.DiscardUnknown(m) } var xxx_messageInfo_DealInfo proto.InternalMessageInfo func (m *DealInfo) GetProposalCid() string { if m != nil { return m.ProposalCid } return "" } func (m *DealInfo) GetStateID() uint64 { if m != nil { return m.StateID } return 0 } func (m *DealInfo) GetStateName() string { if m != nil { return m.StateName } return "" } func (m *DealInfo) GetMiner() string { if m != nil { return m.Miner } return "" } func (m *DealInfo) GetPieceRef() []byte { if m != nil { return m.PieceRef } return nil } func (m *DealInfo) GetSize() uint64 { if m != nil { return m.Size } return 0 } func (m *DealInfo) GetPricePerEpoch() uint64 { if m != nil { return m.PricePerEpoch } return 0 } func (m *DealInfo) GetDuration() uint64 { if m != nil { return m.Duration } return 0 } type StoreParams struct { Address string `protobuf:"bytes,1,opt,name=address,proto3" json:"address,omitempty"` DealConfigs []*DealConfig `protobuf:"bytes,2,rep,name=dealConfigs,proto3" json:"dealConfigs,omitempty"` Duration uint64 `protobuf:"varint,3,opt,name=duration,proto3" json:"duration,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *StoreParams) Reset() { *m = StoreParams{} } func (m *StoreParams) String() string { return proto.CompactTextString(m) } func (*StoreParams) ProtoMessage() {} func (*StoreParams) Descriptor() ([]byte, []int) { return fileDescriptor_71783c876a92172d, []int{2} } func (m *StoreParams) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_StoreParams.Unmarshal(m, b) } func (m *StoreParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_StoreParams.Marshal(b, m, deterministic) } func (m *StoreParams) XXX_Merge(src proto.Message) { xxx_messageInfo_StoreParams.Merge(m, src) } func (m *StoreParams) XXX_Size() int { return xxx_messageInfo_StoreParams.Size(m) } func (m *StoreParams) XXX_DiscardUnknown() { xxx_messageInfo_StoreParams.DiscardUnknown(m) } var xxx_messageInfo_StoreParams proto.InternalMessageInfo func (m *StoreParams) GetAddress() string { if m != nil { return m.Address } return "" } func (m *StoreParams) GetDealConfigs() []*DealConfig { if m != nil { return m.DealConfigs } return nil } func (m *StoreParams) GetDuration() uint64 { if m != nil { return m.Duration } return 0 } type StoreRequest struct { // Types that are valid to be assigned to Payload: // *StoreRequest_StoreParams // *StoreRequest_Chunk Payload isStoreRequest_Payload `protobuf_oneof:"payload"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *StoreRequest) Reset() { *m = StoreRequest{} } func (m *StoreRequest) String() string { return proto.CompactTextString(m) } func (*StoreRequest) ProtoMessage() {} func (*StoreRequest) Descriptor() ([]byte, []int) { return fileDescriptor_71783c876a92172d, []int{3} } func (m *StoreRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_StoreRequest.Unmarshal(m, b) } func (m *StoreRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_StoreRequest.Marshal(b, m, deterministic) } func (m *StoreRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_StoreRequest.Merge(m, src) } func (m *StoreRequest) XXX_Size() int { return xxx_messageInfo_StoreRequest.Size(m) } func (m *StoreRequest) XXX_DiscardUnknown() { xxx_messageInfo_StoreRequest.DiscardUnknown(m) } var xxx_messageInfo_StoreRequest proto.InternalMessageInfo type isStoreRequest_Payload interface { isStoreRequest_Payload() } type StoreRequest_StoreParams struct { StoreParams *StoreParams `protobuf:"bytes,1,opt,name=storeParams,proto3,oneof"` } type StoreRequest_Chunk struct { Chunk []byte `protobuf:"bytes,2,opt,name=chunk,proto3,oneof"` } func (*StoreRequest_StoreParams) isStoreRequest_Payload() {} func (*StoreRequest_Chunk) isStoreRequest_Payload() {} func (m *StoreRequest) GetPayload() isStoreRequest_Payload { if m != nil { return m.Payload } return nil } func (m *StoreRequest) GetStoreParams() *StoreParams { if x, ok := m.GetPayload().(*StoreRequest_StoreParams); ok { return x.StoreParams } return nil } func (m *StoreRequest) GetChunk() []byte { if x, ok := m.GetPayload().(*StoreRequest_Chunk); ok { return x.Chunk } return nil } // XXX_OneofWrappers is for the internal use of the proto package. func (*StoreRequest) XXX_OneofWrappers() []interface{} { return []interface{}{ (*StoreRequest_StoreParams)(nil), (*StoreRequest_Chunk)(nil), } } type StoreReply struct { Cids []string `protobuf:"bytes,1,rep,name=cids,proto3" json:"cids,omitempty"` FailedDeals []*DealConfig `protobuf:"bytes,2,rep,name=failedDeals,proto3" json:"failedDeals,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *StoreReply) Reset() { *m = StoreReply{} } func (m *StoreReply) String() string { return proto.CompactTextString(m) } func (*StoreReply) ProtoMessage() {} func (*StoreReply) Descriptor() ([]byte, []int) { return fileDescriptor_71783c876a92172d, []int{4} } func (m *StoreReply) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_StoreReply.Unmarshal(m, b) } func (m *StoreReply) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_StoreReply.Marshal(b, m, deterministic) } func (m *StoreReply) XXX_Merge(src proto.Message) { xxx_messageInfo_StoreReply.Merge(m, src) } func (m *StoreReply) XXX_Size() int { return xxx_messageInfo_StoreReply.Size(m) } func (m *StoreReply) XXX_DiscardUnknown() { xxx_messageInfo_StoreReply.DiscardUnknown(m) } var xxx_messageInfo_StoreReply proto.InternalMessageInfo func (m *StoreReply) GetCids() []string { if m != nil { return m.Cids } return nil } func (m *StoreReply) GetFailedDeals() []*DealConfig { if m != nil { return m.FailedDeals } return nil } type WatchRequest struct { Proposals []string `protobuf:"bytes,1,rep,name=proposals,proto3" json:"proposals,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *WatchRequest) Reset() { *m = WatchRequest{} } func (m *WatchRequest) String() string { return proto.CompactTextString(m) } func (*WatchRequest) ProtoMessage() {} func (*WatchRequest) Descriptor() ([]byte, []int) { return fileDescriptor_71783c876a92172d, []int{5} } func (m *WatchRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_WatchRequest.Unmarshal(m, b) } func (m *WatchRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_WatchRequest.Marshal(b, m, deterministic) } func (m *WatchRequest) XXX_Merge(src proto.Message) { xxx_messageInfo_WatchRequest.Merge(m, src) } func (m *WatchRequest) XXX_Size() int { return xxx_messageInfo_WatchRequest.Size(m) } func (m *WatchRequest) XXX_DiscardUnknown() { xxx_messageInfo_WatchRequest.DiscardUnknown(m) } var xxx_messageInfo_WatchRequest proto.InternalMessageInfo func (m *WatchRequest) GetProposals() []string { if m != nil { return m.Proposals } return nil } type WatchReply struct { DealInfo *DealInfo `protobuf:"bytes,1,opt,name=dealInfo,proto3" json:"dealInfo,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *WatchReply) Reset() { *m = WatchReply{} } func (m *WatchReply) String() string { return proto.CompactTextString(m) } func (*WatchReply) ProtoMessage() {} func (*WatchReply) Descriptor() ([]byte, []int) { return fileDescriptor_71783c876a92172d, []int{6} } func (m *WatchReply) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_WatchReply.Unmarshal(m, b) } func (m *WatchReply) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_WatchReply.Marshal(b, m, deterministic) } func (m *WatchReply) XXX_Merge(src proto.Message) { xxx_messageInfo_WatchReply.Merge(m, src) } func (m *WatchReply) XXX_Size() int { return xxx_messageInfo_WatchReply.Size(m) } func (m *WatchReply) XXX_DiscardUnknown() { xxx_messageInfo_WatchReply.DiscardUnknown(m) } var xxx_messageInfo_WatchReply proto.InternalMessageInfo func (m *WatchReply) GetDealInfo() *DealInfo { if m != nil { return m.DealInfo } return nil } func init() { proto.RegisterType((*DealConfig)(nil), "filecoin.deals.pb.DealConfig") proto.RegisterType((*DealInfo)(nil), "filecoin.deals.pb.DealInfo") proto.RegisterType((*StoreParams)(nil), "filecoin.deals.pb.StoreParams") proto.RegisterType((*StoreRequest)(nil), "filecoin.deals.pb.StoreRequest") proto.RegisterType((*StoreReply)(nil), "filecoin.deals.pb.StoreReply") proto.RegisterType((*WatchRequest)(nil), "filecoin.deals.pb.WatchRequest") proto.RegisterType((*WatchReply)(nil), "filecoin.deals.pb.WatchReply") } func init() { proto.RegisterFile("deals.proto", fileDescriptor_71783c876a92172d) } var fileDescriptor_71783c876a92172d = []byte{ // 493 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x53, 0xd1, 0x8a, 0xd3, 0x40, 0x14, 0xed, 0x6c, 0xda, 0x6d, 0x7b, 0xd3, 0x7d, 0x70, 0x10, 0x09, 0x6b, 0x77, 0x2d, 0xc1, 0x87, 0x3c, 0x48, 0x90, 0x8a, 0xf8, 0x28, 0x66, 0xb7, 0xb2, 0x45, 0x90, 0x30, 0x2e, 0xf8, 0x3c, 0x9b, 0xdc, 0xd8, 0xc1, 0x34, 0x13, 0x33, 0x29, 0x58, 0x9f, 0xfd, 0x0f, 0xc1, 0xef, 0xf3, 0x23, 0x64, 0xa6, 0x93, 0x26, 0xeb, 0xb6, 0xe0, 0xdb, 0xdc, 0x93, 0x7b, 0xcf, 0x3d, 0xe7, 0xcc, 0x04, 0xdc, 0x14, 0x79, 0xae, 0xc2, 0xb2, 0x92, 0xb5, 0xa4, 0x8f, 0x32, 0x91, 0x63, 0x22, 0x45, 0x11, 0x5a, 0xf4, 0xce, 0x8f, 0x00, 0xae, 0x91, 0xe7, 0x57, 0xb2, 0xc8, 0xc4, 0x17, 0xfa, 0x18, 0x06, 0x6b, 0x51, 0x60, 0xe5, 0x91, 0x19, 0x09, 0xc6, 0x6c, 0x57, 0xd0, 0x4b, 0x00, 0x2c, 0x65, 0xb2, 0x8a, 0x2b, 0x91, 0xa0, 0x77, 0x32, 0x23, 0x41, 0x9f, 0x75, 0x10, 0xff, 0x0f, 0x81, 0x91, 0x26, 0x59, 0x16, 0x99, 0xa4, 0x33, 0x70, 0xcb, 0x4a, 0x96, 0x52, 0xf1, 0xfc, 0x4a, 0xa4, 0x96, 0xa8, 0x0b, 0x51, 0x0f, 0x86, 0xaa, 0xe6, 0x35, 0x2e, 0xaf, 0x2d, 0x57, 0x53, 0xd2, 0x29, 0x8c, 0xcd, 0xf1, 0x23, 0x5f, 0xa3, 0xe7, 0x98, 0xc9, 0x16, 0x68, 0xc5, 0xf5, 0xbb, 0xe2, 0xce, 0x61, 0x54, 0x0a, 0x4c, 0x90, 0x61, 0xe6, 0x0d, 0x66, 0x24, 0x98, 0xb0, 0x7d, 0x4d, 0x29, 0xf4, 0x95, 0xf8, 0x81, 0xde, 0xa9, 0x59, 0x63, 0xce, 0xf4, 0x39, 0x9c, 0x95, 0x5a, 0x75, 0x8c, 0xd5, 0x42, 0x5b, 0xf0, 0x86, 0xe6, 0xe3, 0x7d, 0x50, 0xb3, 0xa6, 0x9b, 0x8a, 0xd7, 0x42, 0x16, 0xde, 0xc8, 0x34, 0xec, 0x6b, 0xff, 0x27, 0x01, 0xf7, 0x53, 0x2d, 0x2b, 0x8c, 0x79, 0xc5, 0xd7, 0x4a, 0xfb, 0xe1, 0x69, 0x5a, 0xa1, 0x52, 0xd6, 0x6d, 0x53, 0xd2, 0xb7, 0xbb, 0xf8, 0x77, 0xe1, 0x2a, 0xef, 0x64, 0xe6, 0x04, 0xee, 0xfc, 0x22, 0x7c, 0x70, 0x0b, 0x61, 0x7b, 0x05, 0xac, 0x3b, 0x71, 0x4f, 0x86, 0xf3, 0x8f, 0x8c, 0x0d, 0x4c, 0x8c, 0x0a, 0x86, 0xdf, 0x36, 0xa8, 0x6a, 0x1a, 0x81, 0xab, 0x5a, 0x55, 0x46, 0x8a, 0x3b, 0xbf, 0x3c, 0xb0, 0xac, 0xa3, 0xfd, 0xa6, 0xc7, 0xba, 0x43, 0xf4, 0x09, 0x0c, 0x92, 0xd5, 0xa6, 0xf8, 0x6a, 0x2e, 0x66, 0x72, 0xd3, 0x63, 0xbb, 0x32, 0x1a, 0xc3, 0xb0, 0xe4, 0xdb, 0x5c, 0xf2, 0xd4, 0xe7, 0x00, 0x76, 0x6d, 0x99, 0x6f, 0x75, 0xc2, 0x89, 0x48, 0xf5, 0x36, 0x27, 0x18, 0x33, 0x73, 0xd6, 0xae, 0x33, 0x2e, 0x72, 0x4c, 0xb5, 0xab, 0xff, 0x75, 0xdd, 0x99, 0xf0, 0x5f, 0xc0, 0xe4, 0x33, 0xaf, 0x93, 0x55, 0xe3, 0x6c, 0x0a, 0xe3, 0xe6, 0xfd, 0x34, 0x9b, 0x5a, 0xc0, 0x5f, 0x00, 0xd8, 0x6e, 0x2d, 0xe8, 0x0d, 0x8c, 0x52, 0xfb, 0x14, 0x6d, 0x04, 0x4f, 0x8f, 0x6c, 0xd6, 0x2d, 0x6c, 0xdf, 0x3c, 0xff, 0x45, 0xc0, 0x79, 0x17, 0x2f, 0xe9, 0x07, 0x18, 0x18, 0x7f, 0xf4, 0xd9, 0xb1, 0xe8, 0xac, 0xac, 0xf3, 0x8b, 0xe3, 0x0d, 0x65, 0xbe, 0xf5, 0x7b, 0x01, 0xd1, 0x64, 0x46, 0xdb, 0x41, 0xb2, 0xae, 0xc7, 0x83, 0x64, 0xad, 0x2d, 0xbf, 0xf7, 0x92, 0x44, 0xaf, 0x61, 0x2a, 0x64, 0x58, 0xe3, 0xf7, 0x5a, 0xe4, 0xf8, 0xb0, 0x3d, 0x3a, 0x7b, 0x6f, 0x21, 0x93, 0x62, 0x4c, 0x7e, 0x9f, 0x38, 0xb7, 0xb7, 0x8b, 0xbb, 0x53, 0xf3, 0xef, 0xbf, 0xfa, 0x1b, 0x00, 0x00, 0xff, 0xff, 0x93, 0xa5, 0x8d, 0x6a, 0x0a, 0x04, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. var _ context.Context var _ grpc.ClientConn // This is a compile-time assertion to ensure that this generated file // is compatible with the grpc package it is being compiled against. const _ = grpc.SupportPackageIsVersion4 // APIClient is the client API for API service. // // For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. type APIClient interface { Store(ctx context.Context, opts ...grpc.CallOption) (API_StoreClient, error) Watch(ctx context.Context, in *WatchRequest, opts ...grpc.CallOption) (API_WatchClient, error) } type aPIClient struct { cc *grpc.ClientConn } func NewAPIClient(cc *grpc.ClientConn) APIClient { return &aPIClient{cc} } func (c *aPIClient) Store(ctx context.Context, opts ...grpc.CallOption) (API_StoreClient, error) { stream, err := c.cc.NewStream(ctx, &_API_serviceDesc.Streams[0], "/filecoin.deals.pb.API/Store", opts...) if err != nil { return nil, err } x := &aPIStoreClient{stream} return x, nil } type API_StoreClient interface { Send(*StoreRequest) error CloseAndRecv() (*StoreReply, error) grpc.ClientStream } type aPIStoreClient struct { grpc.ClientStream } func (x *aPIStoreClient) Send(m *StoreRequest) error { return x.ClientStream.SendMsg(m) } func (x *aPIStoreClient) CloseAndRecv() (*StoreReply, error) { if err := x.ClientStream.CloseSend(); err != nil { return nil, err } m := new(StoreReply) if err := x.ClientStream.RecvMsg(m); err != nil { return nil, err } return m, nil } func (c *aPIClient) Watch(ctx context.Context, in *WatchRequest, opts ...grpc.CallOption) (API_WatchClient, error) { stream, err := c.cc.NewStream(ctx, &_API_serviceDesc.Streams[1], "/filecoin.deals.pb.API/Watch", opts...) if err != nil { return nil, err } x := &aPIWatchClient{stream} if err := x.ClientStream.SendMsg(in); err != nil { return nil, err } if err := x.ClientStream.CloseSend(); err != nil { return nil, err } return x, nil } type API_WatchClient interface { Recv() (*WatchReply, error) grpc.ClientStream } type aPIWatchClient struct { grpc.ClientStream } func (x *aPIWatchClient) Recv() (*WatchReply, error) { m := new(WatchReply) if err := x.ClientStream.RecvMsg(m); err != nil { return nil, err } return m, nil } // APIServer is the server API for API service. type APIServer interface { Store(API_StoreServer) error Watch(*WatchRequest, API_WatchServer) error } // UnimplementedAPIServer can be embedded to have forward compatible implementations. type UnimplementedAPIServer struct { } func (*UnimplementedAPIServer) Store(srv API_StoreServer) error { return status.Errorf(codes.Unimplemented, "method Store not implemented") } func (*UnimplementedAPIServer) Watch(req *WatchRequest, srv API_WatchServer) error { return status.Errorf(codes.Unimplemented, "method Watch not implemented") } func RegisterAPIServer(s *grpc.Server, srv APIServer) { s.RegisterService(&_API_serviceDesc, srv) } func
(srv interface{}, stream grpc.ServerStream) error { return srv.(APIServer).Store(&aPIStoreServer{stream}) } type API_StoreServer interface { SendAndClose(*StoreReply) error Recv() (*StoreRequest, error) grpc.ServerStream } type aPIStoreServer struct { grpc.ServerStream } func (x *aPIStoreServer) SendAndClose(m *StoreReply) error { return x.ServerStream.SendMsg(m) } func (x *aPIStoreServer) Recv() (*StoreRequest, error) { m := new(StoreRequest) if err := x.ServerStream.RecvMsg(m); err != nil { return nil, err } return m, nil } func _API_Watch_Handler(srv interface{}, stream grpc.ServerStream) error { m := new(WatchRequest) if err := stream.RecvMsg(m); err != nil { return err } return srv.(APIServer).Watch(m, &aPIWatchServer{stream}) } type API_WatchServer interface { Send(*WatchReply) error grpc.ServerStream } type aPIWatchServer struct { grpc.ServerStream } func (x *aPIWatchServer) Send(m *WatchReply) error { return x.ServerStream.SendMsg(m) } var _API_serviceDesc = grpc.ServiceDesc{ ServiceName: "filecoin.deals.pb.API", HandlerType: (*APIServer)(nil), Methods: []grpc.MethodDesc{}, Streams: []grpc.StreamDesc{ { StreamName: "Store", Handler: _API_Store_Handler, ClientStreams: true, }, { StreamName: "Watch", Handler: _API_Watch_Handler, ServerStreams: true, }, }, Metadata: "deals.proto", }
_API_Store_Handler
bench_test.go
package vcfgo import ( "os" "testing" ) func benchmarkReader(lazy bool, b *testing.B) { for n := 0; n < b.N; n++ { f, err := os.Open("examples/test.query.vcf") if err != nil { panic(err) } rdr, err := NewReader(f, lazy) if err != nil { panic(err) } j := 0 for { v := rdr.Read() if v == nil { break } j++ } } } func
(b *testing.B) { benchmarkReader(true, b) } func BenchmarkEager(b *testing.B) { benchmarkReader(false, b) }
BenchmarkLazy
diagnostics.rs
// Copyright 2021 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use { anyhow::{format_err, Error}, diagnostics_data::LogsData, fidl_fuchsia_developer_remotecontrol::{ArchiveIteratorMarker, ArchiveIteratorProxy}, fidl_fuchsia_test_manager as ftest_manager, fuchsia_async as fasync, futures::Stream, futures::{channel::mpsc, stream::BoxStream, SinkExt, StreamExt}, pin_project::pin_project, serde_json, std::{ pin::Pin, task::{Context, Poll}, }, }; #[cfg(not(target_os = "fuchsia"))] use log::warn; #[cfg(target_os = "fuchsia")] use crate::diagnostics::fuchsia::BatchLogStream; #[derive(Debug)] pub enum LogStreamProtocol { BatchIterator, ArchiveIterator, } #[pin_project] pub struct LogStream { #[pin] stream: BoxStream<'static, Result<LogsData, Error>>, iterator_server_end: Option<ftest_manager::LogsIterator>, } impl LogStream { fn new<S>(stream: S, iterator: ftest_manager::LogsIterator) -> Self where S: Stream<Item = Result<LogsData, Error>> + Send + 'static,
/// Creates a new `LogStream` forcing the backing log iterator protocol to the given one or the /// platform default. In Fuchsia, defaults to BatchIterator. In host, defaults to /// ArchiveIterator. If the platform is host and BatchIterator is requested, the request is /// ignored since that one is not supported in host. pub fn create(force_log_protocol: Option<LogStreamProtocol>) -> Result<LogStream, fidl::Error> { get_log_stream(force_log_protocol) } /// Takes the server end of the backing log iterator protocol. pub fn take_iterator_server_end(&mut self) -> Option<ftest_manager::LogsIterator> { self.iterator_server_end.take() } } impl Stream for LogStream { type Item = Result<LogsData, Error>; fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> { let this = self.project(); this.stream.poll_next(cx) } } #[cfg(target_os = "fuchsia")] fn get_log_stream(force_log_protocol: Option<LogStreamProtocol>) -> Result<LogStream, fidl::Error> { match force_log_protocol { Some(LogStreamProtocol::BatchIterator) => { let (stream, iterator) = BatchLogStream::new()?; Ok(LogStream::new(stream, iterator)) } Some(LogStreamProtocol::ArchiveIterator) => { let (stream, iterator) = ArchiveLogStream::new()?; Ok(LogStream::new(stream, iterator)) } None => { let (stream, iterator) = BatchLogStream::new()?; Ok(LogStream::new(stream, iterator)) } } } #[cfg(not(target_os = "fuchsia"))] fn get_log_stream(force_log_protocol: Option<LogStreamProtocol>) -> Result<LogStream, fidl::Error> { match force_log_protocol { Some(LogStreamProtocol::BatchIterator) => { warn!("Batch iterator is not supported in host, ignoring force_log_protocol"); let (stream, iterator) = ArchiveLogStream::new()?; Ok(LogStream::new(stream, iterator)) } None | Some(LogStreamProtocol::ArchiveIterator) => { let (stream, iterator) = ArchiveLogStream::new()?; Ok(LogStream::new(stream, iterator)) } } } #[cfg(target_os = "fuchsia")] mod fuchsia { use { super::*, diagnostics_data::Logs, diagnostics_reader::Subscription, fidl_fuchsia_diagnostics::BatchIteratorMarker, }; #[pin_project] pub struct BatchLogStream { #[pin] subscription: Subscription<Logs>, } impl BatchLogStream { pub fn new() -> Result<(Self, ftest_manager::LogsIterator), fidl::Error> { fidl::endpoints::create_proxy::<BatchIteratorMarker>().map(|(proxy, server_end)| { let subscription = Subscription::new(proxy); (Self { subscription }, ftest_manager::LogsIterator::Batch(server_end)) }) } } impl Stream for BatchLogStream { type Item = Result<LogsData, Error>; fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> { let this = self.project(); match this.subscription.poll_next(cx) { Poll::Ready(Some(Err(e))) => Poll::Ready(Some(Err(e.into()))), Poll::Ready(Some(Ok(value))) => Poll::Ready(Some(Ok(value))), Poll::Ready(None) => Poll::Ready(None), Poll::Pending => Poll::Pending, } } } } #[pin_project] struct ArchiveLogStream { #[pin] receiver: mpsc::Receiver<Result<LogsData, Error>>, _drain_task: fasync::Task<()>, } impl ArchiveLogStream { fn new() -> Result<(Self, ftest_manager::LogsIterator), fidl::Error> { fidl::endpoints::create_proxy::<ArchiveIteratorMarker>().map(|(proxy, server_end)| { let (receiver, _drain_task) = Self::start_streaming_logs(proxy); (Self { _drain_task, receiver }, ftest_manager::LogsIterator::Archive(server_end)) }) } } impl ArchiveLogStream { fn start_streaming_logs( proxy: ArchiveIteratorProxy, ) -> (mpsc::Receiver<Result<LogsData, Error>>, fasync::Task<()>) { let (mut sender, receiver) = mpsc::channel(32); let task = fasync::Task::spawn(async move { loop { let result = match proxy.get_next().await { Err(e) => { let _ = sender.send(Err(format_err!("Error calling GetNext: {:?}", e))).await; break; } Ok(batch) => batch, }; let entries = match result { Err(e) => { let _ = sender.send(Err(format_err!("GetNext returned error: {:?}", e))).await; break; } Ok(entries) => entries, }; if entries.is_empty() { break; } for data_str in entries.into_iter().map(|e| e.data).filter_map(|data| data) { let _ = match serde_json::from_str(&data_str) { Ok(data) => sender.send(Ok(data)).await, Err(e) => sender.send(Err(format_err!("Malformed json: {:?}", e))).await, }; } } }); (receiver, task) } } impl Stream for ArchiveLogStream { type Item = Result<LogsData, Error>; fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> { let this = self.project(); this.receiver.poll_next(cx) } } #[cfg(test)] mod tests { use { super::*, diagnostics_data::{ DiagnosticsHierarchy, LogsData, LogsField, Property, Severity, Timestamp, }, fidl::endpoints::ServerEnd, fuchsia_async as fasync, futures::TryStreamExt, }; #[cfg(target_os = "fuchsia")] mod fuchsia { use { super::*, fidl_fuchsia_diagnostics::{ BatchIteratorMarker, BatchIteratorRequest, FormattedContent, ReaderError, }, fidl_fuchsia_mem as fmem, fuchsia_zircon as zx, futures::StreamExt, matches::assert_matches, }; struct BatchIteratorOpts { with_error: bool, } /// Spanws a dummy batch iterator for testing that return 3 logs: "1", "2", "3" all with /// the same severity async fn spawn_batch_iterator_server( server_end: ServerEnd<BatchIteratorMarker>, opts: BatchIteratorOpts, ) { let mut request_stream = server_end.into_stream().expect("got stream"); let mut values = vec![1i64, 2, 3].into_iter(); while let Some(BatchIteratorRequest::GetNext { responder }) = request_stream.try_next().await.expect("get next request") { match values.next() { None => { responder.send(&mut Ok(vec![])).expect("send empty response"); } Some(value) => { if opts.with_error { responder.send(&mut Err(ReaderError::Io)).expect("send error"); continue; } let content = get_json_data(value); let size = content.len() as u64; let vmo = zx::Vmo::create(size).expect("create vmo"); vmo.write(content.as_bytes(), 0).expect("write vmo"); let result = FormattedContent::Json(fmem::Buffer { vmo, size }); responder.send(&mut Ok(vec![result])).expect("send response"); } } } } #[fasync::run_singlethreaded(test)] async fn log_stream_returns_logs() { let mut log_stream = LogStream::create(None).expect("got log stream"); let server_end = match log_stream.take_iterator_server_end() { Some(ftest_manager::LogsIterator::Batch(server_end)) => server_end, _ => panic!("unexpected logs iterator server end"), }; fasync::Task::spawn(spawn_batch_iterator_server( server_end, BatchIteratorOpts { with_error: false }, )) .detach(); assert_eq!(log_stream.next().await.unwrap().expect("got ok result").msg(), Some("1")); assert_eq!(log_stream.next().await.unwrap().expect("got ok result").msg(), Some("2")); assert_eq!(log_stream.next().await.unwrap().expect("got ok result").msg(), Some("3")); assert_matches!(log_stream.next().await, None); } #[fasync::run_singlethreaded(test)] async fn log_stream_can_return_errors() { let mut log_stream = LogStream::create(None).expect("got log stream"); let server_end = match log_stream.take_iterator_server_end() { Some(ftest_manager::LogsIterator::Batch(server_end)) => server_end, _ => panic!("unexpected logs iterator server end"), }; fasync::Task::spawn(spawn_batch_iterator_server( server_end, BatchIteratorOpts { with_error: true }, )) .detach(); assert_matches!(log_stream.next().await, Some(Err(_))); } #[fasync::run_singlethreaded(test)] async fn get_log_stream_on_fuchsia() { let mut stream = LogStream::create(None).expect("get log stream ok"); assert_matches!( stream.take_iterator_server_end(), Some(ftest_manager::LogsIterator::Batch(_)) ); let mut stream = LogStream::create(Some(LogStreamProtocol::ArchiveIterator)) .expect("get log stream ok"); assert_matches!( stream.take_iterator_server_end(), Some(ftest_manager::LogsIterator::Archive(_)) ); let mut stream = LogStream::create(Some(LogStreamProtocol::BatchIterator)) .expect("get log stream ok"); assert_matches!( stream.take_iterator_server_end(), Some(ftest_manager::LogsIterator::Batch(_)) ); } } #[cfg(not(target_os = "fuchsia"))] mod host { use { super::*, fidl_fuchsia_developer_remotecontrol::{ ArchiveIteratorEntry, ArchiveIteratorError, ArchiveIteratorMarker, ArchiveIteratorRequest, }, matches::assert_matches, }; async fn spawn_archive_iterator_server( server_end: ServerEnd<ArchiveIteratorMarker>, with_error: bool, ) { let mut request_stream = server_end.into_stream().expect("got stream"); let mut values = vec![1, 2, 3].into_iter(); while let Some(ArchiveIteratorRequest::GetNext { responder }) = request_stream.try_next().await.expect("get next request") { match values.next() { None => { responder.send(&mut Ok(vec![])).expect("send empty response"); } Some(value) => { if with_error { responder .send(&mut Err(ArchiveIteratorError::DataReadFailed)) .expect("send error"); continue; } let json_data = get_json_data(value); let result = ArchiveIteratorEntry { data: Some(json_data), ..ArchiveIteratorEntry::EMPTY }; responder.send(&mut Ok(vec![result])).expect("send response"); } } } } #[fasync::run_singlethreaded(test)] async fn get_log_stream_always_returns_archive_in_host() { let mut stream = LogStream::create(None).expect("get log stream ok"); assert_matches!( stream.take_iterator_server_end(), Some(ftest_manager::LogsIterator::Archive(_)) ); let mut stream = LogStream::create(Some(LogStreamProtocol::ArchiveIterator)) .expect("get log stream ok"); assert_matches!( stream.take_iterator_server_end(), Some(ftest_manager::LogsIterator::Archive(_)) ); let mut stream = LogStream::create(Some(LogStreamProtocol::BatchIterator)) .expect("get log stream ok"); assert_matches!( stream.take_iterator_server_end(), Some(ftest_manager::LogsIterator::Archive(_)) ); } #[fasync::run_singlethreaded(test)] async fn archive_stream_returns_logs() { let mut log_stream = LogStream::create(None).expect("got log stream"); let server_end = match log_stream.take_iterator_server_end() { Some(ftest_manager::LogsIterator::Archive(server_end)) => server_end, _ => panic!("unexpected logs iterator server end"), }; fasync::Task::spawn(spawn_archive_iterator_server(server_end, false)).detach(); assert_eq!(log_stream.next().await.unwrap().expect("got ok result").msg(), Some("1")); assert_eq!(log_stream.next().await.unwrap().expect("got ok result").msg(), Some("2")); assert_eq!(log_stream.next().await.unwrap().expect("got ok result").msg(), Some("3")); assert_matches!(log_stream.next().await, None); } #[fasync::run_singlethreaded(test)] async fn archive_stream_can_return_errors() { let mut log_stream = LogStream::create(None).expect("got log stream"); let server_end = match log_stream.take_iterator_server_end() { Some(ftest_manager::LogsIterator::Archive(server_end)) => server_end, _ => panic!("unexpected logs iterator server end"), }; fasync::Task::spawn(spawn_archive_iterator_server(server_end, true)).detach(); assert_matches!(log_stream.next().await, Some(Err(_))); } } fn get_json_data(value: i64) -> String { let hierarchy = DiagnosticsHierarchy::new( "root", vec![Property::String(LogsField::Msg, format!("{}", value))], vec![], ); let data = LogsData::for_logs( String::from("test/moniker"), Some(hierarchy), Timestamp::from(0), String::from("fake-url"), Severity::Info, 1, vec![], ); serde_json::to_string(&data).expect("serialize to json") } }
{ Self { stream: stream.boxed(), iterator_server_end: Some(iterator) } }
consts.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use back::abi; use llvm; use llvm::{ConstFCmp, ConstICmp, SetLinkage, SetUnnamedAddr}; use llvm::{InternalLinkage, ValueRef, Bool, True}; use middle::{check_const, const_eval, def}; use middle::const_eval::{const_int_checked_neg, const_uint_checked_neg}; use middle::const_eval::{const_int_checked_add, const_uint_checked_add}; use middle::const_eval::{const_int_checked_sub, const_uint_checked_sub}; use middle::const_eval::{const_int_checked_mul, const_uint_checked_mul}; use middle::const_eval::{const_int_checked_div, const_uint_checked_div}; use middle::const_eval::{const_int_checked_rem, const_uint_checked_rem}; use middle::const_eval::{const_int_checked_shl, const_uint_checked_shl}; use middle::const_eval::{const_int_checked_shr, const_uint_checked_shr}; use trans::{adt, closure, debuginfo, expr, inline, machine}; use trans::base::{self, push_ctxt}; use trans::common::*; use trans::declare; use trans::monomorphize; use trans::type_::Type; use trans::type_of; use middle::cast::{CastTy,IntTy}; use middle::subst::Substs; use middle::ty::{self, Ty}; use util::ppaux::{Repr, ty_to_string}; use util::nodemap::NodeMap; use std::iter::repeat; use libc::c_uint; use syntax::{ast, ast_util}; use syntax::parse::token; use syntax::ptr::P; pub type FnArgMap<'a> = Option<&'a NodeMap<ValueRef>>; pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: &ast::Lit) -> ValueRef { let _icx = push_ctxt("trans_lit"); debug!("const_lit: {:?}", lit); match lit.node { ast::LitByte(b) => C_integral(Type::uint_from_ty(cx, ast::TyU8), b as u64, false), ast::LitChar(i) => C_integral(Type::char(cx), i as u64, false), ast::LitInt(i, ast::SignedIntLit(t, _)) => { C_integral(Type::int_from_ty(cx, t), i, true) } ast::LitInt(u, ast::UnsignedIntLit(t)) => { C_integral(Type::uint_from_ty(cx, t), u, false) } ast::LitInt(i, ast::UnsuffixedIntLit(_)) => { let lit_int_ty = ty::node_id_to_type(cx.tcx(), e.id); match lit_int_ty.sty { ty::TyInt(t) => { C_integral(Type::int_from_ty(cx, t), i as u64, true) } ty::TyUint(t) => { C_integral(Type::uint_from_ty(cx, t), i as u64, false) } _ => cx.sess().span_bug(lit.span, &format!("integer literal has type {} (expected int \ or usize)", ty_to_string(cx.tcx(), lit_int_ty))) } } ast::LitFloat(ref fs, t) => { C_floating(&fs, Type::float_from_ty(cx, t)) } ast::LitFloatUnsuffixed(ref fs) => { let lit_float_ty = ty::node_id_to_type(cx.tcx(), e.id); match lit_float_ty.sty { ty::TyFloat(t) => { C_floating(&fs, Type::float_from_ty(cx, t)) } _ => { cx.sess().span_bug(lit.span, "floating point literal doesn't have the right type"); } } } ast::LitBool(b) => C_bool(cx, b), ast::LitStr(ref s, _) => C_str_slice(cx, (*s).clone()), ast::LitBinary(ref data) => { addr_of(cx, C_bytes(cx, &data[..]), "binary") } } } pub fn ptrcast(val: ValueRef, ty: Type) -> ValueRef { unsafe { llvm::LLVMConstPointerCast(val, ty.to_ref()) } } fn addr_of_mut(ccx: &CrateContext, cv: ValueRef, kind: &str) -> ValueRef { unsafe { // FIXME: this totally needs a better name generation scheme, perhaps a simple global // counter? Also most other uses of gensym in trans. let gsym = token::gensym("_"); let name = format!("{}{}", kind, gsym.usize()); let gv = declare::define_global(ccx, &name[..], val_ty(cv)).unwrap_or_else(||{ ccx.sess().bug(&format!("symbol `{}` is already defined", name)); }); llvm::LLVMSetInitializer(gv, cv); SetLinkage(gv, InternalLinkage); SetUnnamedAddr(gv, true); gv } } pub fn addr_of(ccx: &CrateContext, cv: ValueRef, kind: &str) -> ValueRef { match ccx.const_globals().borrow().get(&cv) { Some(&gv) => return gv, None => {} } let gv = addr_of_mut(ccx, cv, kind); unsafe { llvm::LLVMSetGlobalConstant(gv, True); } ccx.const_globals().borrow_mut().insert(cv, gv); gv } fn const_deref_ptr(cx: &CrateContext, v: ValueRef) -> ValueRef { let v = match cx.const_unsized().borrow().get(&v) { Some(&v) => v, None => v }; unsafe { llvm::LLVMGetInitializer(v) } } fn const_deref<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, v: ValueRef, ty: Ty<'tcx>) -> (ValueRef, Ty<'tcx>) { match ty::deref(ty, true) { Some(mt) => { if type_is_sized(cx.tcx(), mt.ty) { (const_deref_ptr(cx, v), mt.ty) } else { // Derefing a fat pointer does not change the representation, // just the type to the unsized contents. (v, mt.ty) } } None => { cx.sess().bug(&format!("unexpected dereferenceable type {}", ty_to_string(cx.tcx(), ty))) } } } fn const_fn_call<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, node: ExprOrMethodCall, def_id: ast::DefId, arg_vals: &[ValueRef], param_substs: &'tcx Substs<'tcx>) -> ValueRef { let fn_like = const_eval::lookup_const_fn_by_id(ccx.tcx(), def_id); let fn_like = fn_like.expect("lookup_const_fn_by_id failed in const_fn_call"); let args = &fn_like.decl().inputs; assert_eq!(args.len(), arg_vals.len()); let arg_ids = args.iter().map(|arg| arg.pat.id); let fn_args = arg_ids.zip(arg_vals.iter().cloned()).collect(); let substs = ccx.tcx().mk_substs(node_id_substs(ccx, node, param_substs)); match fn_like.body().expr { Some(ref expr) => { const_expr(ccx, &**expr, substs, Some(&fn_args)).0 } None => C_nil(ccx) } } pub fn get_const_expr<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, def_id: ast::DefId, ref_expr: &ast::Expr) -> &'tcx ast::Expr { let def_id = inline::maybe_instantiate_inline(ccx, def_id); if def_id.krate != ast::LOCAL_CRATE { ccx.sess().span_bug(ref_expr.span, "cross crate constant could not be inlined"); } match const_eval::lookup_const_by_id(ccx.tcx(), def_id, Some(ref_expr.id)) { Some(ref expr) => expr, None => { ccx.sess().span_bug(ref_expr.span, "constant item not found") } } } fn get_const_val(ccx: &CrateContext, def_id: ast::DefId, ref_expr: &ast::Expr) -> ValueRef { let expr = get_const_expr(ccx, def_id, ref_expr); let empty_substs = ccx.tcx().mk_substs(Substs::trans_empty()); get_const_expr_as_global(ccx, expr, check_const::ConstQualif::empty(), empty_substs) } pub fn get_const_expr_as_global<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, expr: &ast::Expr, qualif: check_const::ConstQualif, param_substs: &'tcx Substs<'tcx>) -> ValueRef { debug!("get_const_expr_as_global: {:?}", expr.id); // Special-case constants to cache a common global for all uses. match expr.node { ast::ExprPath(..) => { let def = ccx.tcx().def_map.borrow().get(&expr.id).unwrap().full_def(); match def { def::DefConst(def_id) | def::DefAssociatedConst(def_id, _) => { if !ccx.tcx().adjustments.borrow().contains_key(&expr.id) { debug!("get_const_expr_as_global ({:?}): found const {:?}", expr.id, def_id); return get_const_val(ccx, def_id, expr); } } _ => {} } } _ => {} } let key = (expr.id, param_substs); match ccx.const_values().borrow().get(&key) { Some(&val) => return val, None => {} } let val = if qualif.intersects(check_const::ConstQualif::NON_STATIC_BORROWS) { // Avoid autorefs as they would create global instead of stack // references, even when only the latter are correct. let ty = monomorphize::apply_param_substs(ccx.tcx(), param_substs, &ty::expr_ty(ccx.tcx(), expr)); const_expr_unadjusted(ccx, expr, ty, param_substs, None) } else { const_expr(ccx, expr, param_substs, None).0 }; // boolean SSA values are i1, but they have to be stored in i8 slots, // otherwise some LLVM optimization passes don't work as expected let val = unsafe { if llvm::LLVMTypeOf(val) == Type::i1(ccx).to_ref() { llvm::LLVMConstZExt(val, Type::i8(ccx).to_ref()) } else { val } }; let lvalue = addr_of(ccx, val, "const"); ccx.const_values().borrow_mut().insert(key, lvalue); lvalue } pub fn const_expr<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, e: &ast::Expr, param_substs: &'tcx Substs<'tcx>, fn_args: FnArgMap) -> (ValueRef, Ty<'tcx>) { let ety = monomorphize::apply_param_substs(cx.tcx(), param_substs, &ty::expr_ty(cx.tcx(), e)); let llconst = const_expr_unadjusted(cx, e, ety, param_substs, fn_args); let mut llconst = llconst; let mut ety_adjusted = monomorphize::apply_param_substs(cx.tcx(), param_substs, &ty::expr_ty_adjusted(cx.tcx(), e)); let opt_adj = cx.tcx().adjustments.borrow().get(&e.id).cloned(); match opt_adj { Some(ty::AdjustReifyFnPointer) => { // FIXME(#19925) once fn item types are // zero-sized, we'll need to do something here } Some(ty::AdjustUnsafeFnPointer) => { // purely a type-level thing } Some(ty::AdjustDerefRef(adj)) => { let mut ty = ety; // Save the last autoderef in case we can avoid it. if adj.autoderefs > 0 { for _ in 0..adj.autoderefs-1 { let (dv, dt) = const_deref(cx, llconst, ty); llconst = dv; ty = dt; } } if adj.autoref.is_some() { if adj.autoderefs == 0 { // Don't copy data to do a deref+ref // (i.e., skip the last auto-deref). llconst = addr_of(cx, llconst, "autoref"); ty = ty::mk_imm_rptr(cx.tcx(), cx.tcx().mk_region(ty::ReStatic), ty); } } else { let (dv, dt) = const_deref(cx, llconst, ty); llconst = dv; // If we derefed a fat pointer then we will have an // open type here. So we need to update the type with // the one returned from const_deref. ety_adjusted = dt; } if let Some(target) = adj.unsize { let target = monomorphize::apply_param_substs(cx.tcx(), param_substs, &target); let pointee_ty = ty::deref(ty, true) .expect("consts: unsizing got non-pointer type").ty; let (base, old_info) = if !type_is_sized(cx.tcx(), pointee_ty) { // Normally, the source is a thin pointer and we are // adding extra info to make a fat pointer. The exception // is when we are upcasting an existing object fat pointer // to use a different vtable. In that case, we want to // load out the original data pointer so we can repackage // it. (const_get_elt(cx, llconst, &[abi::FAT_PTR_ADDR as u32]), Some(const_get_elt(cx, llconst, &[abi::FAT_PTR_EXTRA as u32]))) } else { (llconst, None) }; let unsized_ty = ty::deref(target, true) .expect("consts: unsizing got non-pointer target type").ty; let ptr_ty = type_of::in_memory_type_of(cx, unsized_ty).ptr_to(); let base = ptrcast(base, ptr_ty); let info = expr::unsized_info(cx, pointee_ty, unsized_ty, old_info, param_substs); if old_info.is_none() { let prev_const = cx.const_unsized().borrow_mut() .insert(base, llconst); assert!(prev_const.is_none() || prev_const == Some(llconst)); } assert_eq!(abi::FAT_PTR_ADDR, 0); assert_eq!(abi::FAT_PTR_EXTRA, 1); llconst = C_struct(cx, &[base, info], false); } } None => {} }; let llty = type_of::sizing_type_of(cx, ety_adjusted); let csize = machine::llsize_of_alloc(cx, val_ty(llconst)); let tsize = machine::llsize_of_alloc(cx, llty); if csize != tsize { cx.sess().abort_if_errors(); unsafe { // FIXME these values could use some context llvm::LLVMDumpValue(llconst); llvm::LLVMDumpValue(C_undef(llty)); } cx.sess().bug(&format!("const {} of type {} has size {} instead of {}", e.repr(cx.tcx()), ty_to_string(cx.tcx(), ety_adjusted), csize, tsize)); } (llconst, ety_adjusted) } fn check_unary_expr_validity(cx: &CrateContext, e: &ast::Expr, t: Ty, te: ValueRef) { // The only kind of unary expression that we check for validity // here is `-expr`, to check if it "overflows" (e.g. `-i32::MIN`). if let ast::ExprUnary(ast::UnNeg, ref inner_e) = e.node { // An unfortunate special case: we parse e.g. -128 as a // negation of the literal 128, which means if we're expecting // a i8 (or if it was already suffixed, e.g. `-128_i8`), then // 128 will have already overflowed to -128, and so then the // constant evaluator thinks we're trying to negate -128. // // Catch this up front by looking for ExprLit directly, // and just accepting it. if let ast::ExprLit(_) = inner_e.node
let result = match t.sty { ty::TyInt(int_type) => { let input = match const_to_opt_int(te) { Some(v) => v, None => return, }; const_int_checked_neg( input, e, Some(const_eval::IntTy::from(cx.tcx(), int_type))) } ty::TyUint(uint_type) => { let input = match const_to_opt_uint(te) { Some(v) => v, None => return, }; const_uint_checked_neg( input, e, Some(const_eval::UintTy::from(cx.tcx(), uint_type))) } _ => return, }; // We do not actually care about a successful result. if let Err(err) = result { cx.tcx().sess.span_err(e.span, &err.description()); } } } fn check_binary_expr_validity(cx: &CrateContext, e: &ast::Expr, t: Ty, te1: ValueRef, te2: ValueRef) { let b = if let ast::ExprBinary(b, _, _) = e.node { b } else { return }; let result = match t.sty { ty::TyInt(int_type) => { let (lhs, rhs) = match (const_to_opt_int(te1), const_to_opt_int(te2)) { (Some(v1), Some(v2)) => (v1, v2), _ => return, }; let opt_ety = Some(const_eval::IntTy::from(cx.tcx(), int_type)); match b.node { ast::BiAdd => const_int_checked_add(lhs, rhs, e, opt_ety), ast::BiSub => const_int_checked_sub(lhs, rhs, e, opt_ety), ast::BiMul => const_int_checked_mul(lhs, rhs, e, opt_ety), ast::BiDiv => const_int_checked_div(lhs, rhs, e, opt_ety), ast::BiRem => const_int_checked_rem(lhs, rhs, e, opt_ety), ast::BiShl => const_int_checked_shl(lhs, rhs, e, opt_ety), ast::BiShr => const_int_checked_shr(lhs, rhs, e, opt_ety), _ => return, } } ty::TyUint(uint_type) => { let (lhs, rhs) = match (const_to_opt_uint(te1), const_to_opt_uint(te2)) { (Some(v1), Some(v2)) => (v1, v2), _ => return, }; let opt_ety = Some(const_eval::UintTy::from(cx.tcx(), uint_type)); match b.node { ast::BiAdd => const_uint_checked_add(lhs, rhs, e, opt_ety), ast::BiSub => const_uint_checked_sub(lhs, rhs, e, opt_ety), ast::BiMul => const_uint_checked_mul(lhs, rhs, e, opt_ety), ast::BiDiv => const_uint_checked_div(lhs, rhs, e, opt_ety), ast::BiRem => const_uint_checked_rem(lhs, rhs, e, opt_ety), ast::BiShl => const_uint_checked_shl(lhs, rhs, e, opt_ety), ast::BiShr => const_uint_checked_shr(lhs, rhs, e, opt_ety), _ => return, } } _ => return, }; // We do not actually care about a successful result. if let Err(err) = result { cx.tcx().sess.span_err(e.span, &err.description()); } } fn const_expr_unadjusted<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, e: &ast::Expr, ety: Ty<'tcx>, param_substs: &'tcx Substs<'tcx>, fn_args: FnArgMap) -> ValueRef { debug!("const_expr_unadjusted(e={}, ety={}, param_substs={})", e.repr(cx.tcx()), ety.repr(cx.tcx()), param_substs.repr(cx.tcx())); let map_list = |exprs: &[P<ast::Expr>]| -> Vec<ValueRef> { exprs.iter() .map(|e| const_expr(cx, &**e, param_substs, fn_args).0) .collect() }; unsafe { let _icx = push_ctxt("const_expr"); match e.node { ast::ExprLit(ref lit) => { const_lit(cx, e, &**lit) } ast::ExprBinary(b, ref e1, ref e2) => { /* Neither type is bottom, and we expect them to be unified * already, so the following is safe. */ let (te1, ty) = const_expr(cx, &**e1, param_substs, fn_args); debug!("const_expr_unadjusted: te1={}, ty={}", cx.tn().val_to_string(te1), ty.repr(cx.tcx())); let is_simd = ty::type_is_simd(cx.tcx(), ty); let intype = if is_simd { ty::simd_type(cx.tcx(), ty) } else { ty }; let is_float = ty::type_is_fp(intype); let signed = ty::type_is_signed(intype); let (te2, _) = const_expr(cx, &**e2, param_substs, fn_args); check_binary_expr_validity(cx, e, ty, te1, te2); match b.node { ast::BiAdd => { if is_float { llvm::LLVMConstFAdd(te1, te2) } else { llvm::LLVMConstAdd(te1, te2) } } ast::BiSub => { if is_float { llvm::LLVMConstFSub(te1, te2) } else { llvm::LLVMConstSub(te1, te2) } } ast::BiMul => { if is_float { llvm::LLVMConstFMul(te1, te2) } else { llvm::LLVMConstMul(te1, te2) } } ast::BiDiv => { if is_float { llvm::LLVMConstFDiv(te1, te2) } else if signed { llvm::LLVMConstSDiv(te1, te2) } else { llvm::LLVMConstUDiv(te1, te2) } } ast::BiRem => { if is_float { llvm::LLVMConstFRem(te1, te2) } else if signed { llvm::LLVMConstSRem(te1, te2) } else { llvm::LLVMConstURem(te1, te2) } } ast::BiAnd => llvm::LLVMConstAnd(te1, te2), ast::BiOr => llvm::LLVMConstOr(te1, te2), ast::BiBitXor => llvm::LLVMConstXor(te1, te2), ast::BiBitAnd => llvm::LLVMConstAnd(te1, te2), ast::BiBitOr => llvm::LLVMConstOr(te1, te2), ast::BiShl => { let te2 = base::cast_shift_const_rhs(b.node, te1, te2); llvm::LLVMConstShl(te1, te2) } ast::BiShr => { let te2 = base::cast_shift_const_rhs(b.node, te1, te2); if signed { llvm::LLVMConstAShr(te1, te2) } else { llvm::LLVMConstLShr(te1, te2) } } ast::BiEq | ast::BiNe | ast::BiLt | ast::BiLe | ast::BiGt | ast::BiGe => { if is_float { let cmp = base::bin_op_to_fcmp_predicate(cx, b.node); ConstFCmp(cmp, te1, te2) } else { let cmp = base::bin_op_to_icmp_predicate(cx, b.node, signed); let bool_val = ConstICmp(cmp, te1, te2); if is_simd { // LLVM outputs an `< size x i1 >`, so we need to perform // a sign extension to get the correctly sized type. llvm::LLVMConstIntCast(bool_val, val_ty(te1).to_ref(), True) } else { bool_val } } } } }, ast::ExprUnary(u, ref inner_e) => { let (te, ty) = const_expr(cx, &**inner_e, param_substs, fn_args); check_unary_expr_validity(cx, e, ty, te); let is_float = ty::type_is_fp(ty); match u { ast::UnUniq | ast::UnDeref => { const_deref(cx, te, ty).0 } ast::UnNot => llvm::LLVMConstNot(te), ast::UnNeg => { if is_float { llvm::LLVMConstFNeg(te) } else { llvm::LLVMConstNeg(te) } } } } ast::ExprField(ref base, field) => { let (bv, bt) = const_expr(cx, &**base, param_substs, fn_args); let brepr = adt::represent_type(cx, bt); expr::with_field_tys(cx.tcx(), bt, None, |discr, field_tys| { let ix = ty::field_idx_strict(cx.tcx(), field.node.name, field_tys); adt::const_get_field(cx, &*brepr, bv, discr, ix) }) } ast::ExprTupField(ref base, idx) => { let (bv, bt) = const_expr(cx, &**base, param_substs, fn_args); let brepr = adt::represent_type(cx, bt); expr::with_field_tys(cx.tcx(), bt, None, |discr, _| { adt::const_get_field(cx, &*brepr, bv, discr, idx.node) }) } ast::ExprIndex(ref base, ref index) => { let (bv, bt) = const_expr(cx, &**base, param_substs, fn_args); let iv = match const_eval::eval_const_expr_partial(cx.tcx(), &**index, None) { Ok(const_eval::const_int(i)) => i as u64, Ok(const_eval::const_uint(u)) => u, _ => cx.sess().span_bug(index.span, "index is not an integer-constant expression") }; let (arr, len) = match bt.sty { ty::TyArray(_, u) => (bv, C_uint(cx, u)), ty::TySlice(_) | ty::TyStr => { let e1 = const_get_elt(cx, bv, &[0]); (const_deref_ptr(cx, e1), const_get_elt(cx, bv, &[1])) } ty::TyRef(_, mt) => match mt.ty.sty { ty::TyArray(_, u) => { (const_deref_ptr(cx, bv), C_uint(cx, u)) }, _ => cx.sess().span_bug(base.span, &format!("index-expr base must be a vector \ or string type, found {}", ty_to_string(cx.tcx(), bt))) }, _ => cx.sess().span_bug(base.span, &format!("index-expr base must be a vector \ or string type, found {}", ty_to_string(cx.tcx(), bt))) }; let len = llvm::LLVMConstIntGetZExtValue(len) as u64; let len = match bt.sty { ty::TyBox(ty) | ty::TyRef(_, ty::mt{ty, ..}) => match ty.sty { ty::TyStr => { assert!(len > 0); len - 1 } _ => len }, _ => len }; if iv >= len { // FIXME #3170: report this earlier on in the const-eval // pass. Reporting here is a bit late. cx.sess().span_err(e.span, "const index-expr is out of bounds"); C_undef(type_of::type_of(cx, bt).element_type()) } else { const_get_elt(cx, arr, &[iv as c_uint]) } } ast::ExprCast(ref base, _) => { let t_cast = ety; let llty = type_of::type_of(cx, t_cast); let (v, t_expr) = const_expr(cx, &**base, param_substs, fn_args); debug!("trans_const_cast({} as {})", t_expr.repr(cx.tcx()), t_cast.repr(cx.tcx())); if expr::cast_is_noop(cx.tcx(), base, t_expr, t_cast) { return v; } if type_is_fat_ptr(cx.tcx(), t_expr) { // Fat pointer casts. let t_cast_inner = ty::deref(t_cast, true).expect("cast to non-pointer").ty; let ptr_ty = type_of::in_memory_type_of(cx, t_cast_inner).ptr_to(); let addr = ptrcast(const_get_elt(cx, v, &[abi::FAT_PTR_ADDR as u32]), ptr_ty); if type_is_fat_ptr(cx.tcx(), t_cast) { let info = const_get_elt(cx, v, &[abi::FAT_PTR_EXTRA as u32]); return C_struct(cx, &[addr, info], false) } else { return addr; } } match (CastTy::from_ty(cx.tcx(), t_expr).expect("bad input type for cast"), CastTy::from_ty(cx.tcx(), t_cast).expect("bad output type for cast")) { (CastTy::Int(IntTy::CEnum), CastTy::Int(_)) => { let repr = adt::represent_type(cx, t_expr); let discr = adt::const_get_discrim(cx, &*repr, v); let iv = C_integral(cx.int_type(), discr, false); let s = adt::is_discr_signed(&*repr) as Bool; llvm::LLVMConstIntCast(iv, llty.to_ref(), s) } (CastTy::Int(_), CastTy::Int(_)) => { let s = ty::type_is_signed(t_expr) as Bool; llvm::LLVMConstIntCast(v, llty.to_ref(), s) } (CastTy::Int(_), CastTy::Float) => { if ty::type_is_signed(t_expr) { llvm::LLVMConstSIToFP(v, llty.to_ref()) } else { llvm::LLVMConstUIToFP(v, llty.to_ref()) } } (CastTy::Float, CastTy::Float) => { llvm::LLVMConstFPCast(v, llty.to_ref()) } (CastTy::Float, CastTy::Int(IntTy::I)) => { llvm::LLVMConstFPToSI(v, llty.to_ref()) } (CastTy::Float, CastTy::Int(_)) => { llvm::LLVMConstFPToUI(v, llty.to_ref()) } (CastTy::Ptr(_), CastTy::Ptr(_)) | (CastTy::FnPtr, CastTy::Ptr(_)) | (CastTy::RPtr(_), CastTy::Ptr(_)) => { ptrcast(v, llty) } (CastTy::FnPtr, CastTy::FnPtr) => ptrcast(v, llty), // isn't this a coercion? (CastTy::Int(_), CastTy::Ptr(_)) => { llvm::LLVMConstIntToPtr(v, llty.to_ref()) } (CastTy::Ptr(_), CastTy::Int(_)) | (CastTy::FnPtr, CastTy::Int(_)) => { llvm::LLVMConstPtrToInt(v, llty.to_ref()) } _ => { cx.sess().impossible_case(e.span, "bad combination of types for cast") } } } ast::ExprAddrOf(ast::MutImmutable, ref sub) => { // If this is the address of some static, then we need to return // the actual address of the static itself (short circuit the rest // of const eval). let mut cur = sub; loop { match cur.node { ast::ExprParen(ref sub) => cur = sub, ast::ExprBlock(ref blk) => { if let Some(ref sub) = blk.expr { cur = sub; } else { break; } } _ => break, } } let opt_def = cx.tcx().def_map.borrow().get(&cur.id).map(|d| d.full_def()); if let Some(def::DefStatic(def_id, _)) = opt_def { get_static_val(cx, def_id, ety) } else { // If this isn't the address of a static, then keep going through // normal constant evaluation. let (v, _) = const_expr(cx, &**sub, param_substs, fn_args); addr_of(cx, v, "ref") } } ast::ExprAddrOf(ast::MutMutable, ref sub) => { let (v, _) = const_expr(cx, &**sub, param_substs, fn_args); addr_of_mut(cx, v, "ref_mut_slice") } ast::ExprTup(ref es) => { let repr = adt::represent_type(cx, ety); let vals = map_list(&es[..]); adt::trans_const(cx, &*repr, 0, &vals[..]) } ast::ExprStruct(_, ref fs, ref base_opt) => { let repr = adt::represent_type(cx, ety); let base_val = match *base_opt { Some(ref base) => Some(const_expr(cx, &**base, param_substs, fn_args)), None => None }; expr::with_field_tys(cx.tcx(), ety, Some(e.id), |discr, field_tys| { let cs = field_tys.iter().enumerate() .map(|(ix, &field_ty)| { match fs.iter().find(|f| field_ty.name == f.ident.node.name) { Some(ref f) => const_expr(cx, &*f.expr, param_substs, fn_args).0, None => { match base_val { Some((bv, _)) => { adt::const_get_field(cx, &*repr, bv, discr, ix) } None => { cx.sess().span_bug(e.span, "missing struct field") } } } } }).collect::<Vec<_>>(); if ty::type_is_simd(cx.tcx(), ety) { C_vector(&cs[..]) } else { adt::trans_const(cx, &*repr, discr, &cs[..]) } }) } ast::ExprVec(ref es) => { let unit_ty = ty::sequence_element_type(cx.tcx(), ety); let llunitty = type_of::type_of(cx, unit_ty); let vs = es.iter().map(|e| const_expr(cx, &**e, param_substs, fn_args).0) .collect::<Vec<_>>(); // If the vector contains enums, an LLVM array won't work. if vs.iter().any(|vi| val_ty(*vi) != llunitty) { C_struct(cx, &vs[..], false) } else { C_array(llunitty, &vs[..]) } } ast::ExprRepeat(ref elem, ref count) => { let unit_ty = ty::sequence_element_type(cx.tcx(), ety); let llunitty = type_of::type_of(cx, unit_ty); let n = ty::eval_repeat_count(cx.tcx(), count); let unit_val = const_expr(cx, &**elem, param_substs, fn_args).0; let vs: Vec<_> = repeat(unit_val).take(n).collect(); if val_ty(unit_val) != llunitty { C_struct(cx, &vs[..], false) } else { C_array(llunitty, &vs[..]) } } ast::ExprPath(..) => { let def = cx.tcx().def_map.borrow().get(&e.id).unwrap().full_def(); match def { def::DefLocal(id) => { if let Some(val) = fn_args.and_then(|args| args.get(&id).cloned()) { val } else { cx.sess().span_bug(e.span, "const fn argument not found") } } def::DefFn(..) | def::DefMethod(..) => { expr::trans_def_fn_unadjusted(cx, e, def, param_substs).val } def::DefConst(def_id) | def::DefAssociatedConst(def_id, _) => { const_deref_ptr(cx, get_const_val(cx, def_id, e)) } def::DefVariant(enum_did, variant_did, _) => { let vinfo = ty::enum_variant_with_id(cx.tcx(), enum_did, variant_did); if !vinfo.args.is_empty() { // N-ary variant. expr::trans_def_fn_unadjusted(cx, e, def, param_substs).val } else { // Nullary variant. let repr = adt::represent_type(cx, ety); adt::trans_const(cx, &*repr, vinfo.disr_val, &[]) } } def::DefStruct(_) => { if let ty::TyBareFn(..) = ety.sty { // Tuple struct. expr::trans_def_fn_unadjusted(cx, e, def, param_substs).val } else { // Unit struct. C_null(type_of::type_of(cx, ety)) } } _ => { cx.sess().span_bug(e.span, "expected a const, fn, struct, \ or variant def") } } } ast::ExprCall(ref callee, ref args) => { let mut callee = &**callee; loop { callee = match callee.node { ast::ExprParen(ref inner) => &**inner, ast::ExprBlock(ref block) => match block.expr { Some(ref tail) => &**tail, None => break }, _ => break }; } let def = cx.tcx().def_map.borrow()[&callee.id].full_def(); let arg_vals = map_list(args); match def { def::DefFn(did, _) | def::DefMethod(did, _) => { const_fn_call(cx, ExprId(callee.id), did, &arg_vals, param_substs) } def::DefStruct(_) => { if ty::type_is_simd(cx.tcx(), ety) { C_vector(&arg_vals[..]) } else { let repr = adt::represent_type(cx, ety); adt::trans_const(cx, &*repr, 0, &arg_vals[..]) } } def::DefVariant(enum_did, variant_did, _) => { let repr = adt::represent_type(cx, ety); let vinfo = ty::enum_variant_with_id(cx.tcx(), enum_did, variant_did); adt::trans_const(cx, &*repr, vinfo.disr_val, &arg_vals[..]) } _ => cx.sess().span_bug(e.span, "expected a struct, variant, or const fn def") } } ast::ExprMethodCall(_, _, ref args) => { let arg_vals = map_list(args); let method_call = ty::MethodCall::expr(e.id); let method_did = match cx.tcx().method_map.borrow()[&method_call].origin { ty::MethodStatic(did) => did, _ => cx.sess().span_bug(e.span, "expected a const method def") }; const_fn_call(cx, MethodCallKey(method_call), method_did, &arg_vals, param_substs) } ast::ExprParen(ref e) => const_expr(cx, &**e, param_substs, fn_args).0, ast::ExprBlock(ref block) => { match block.expr { Some(ref expr) => const_expr(cx, &**expr, param_substs, fn_args).0, None => C_nil(cx) } } ast::ExprClosure(_, ref decl, ref body) => { closure::trans_closure_expr(closure::Dest::Ignore(cx), decl, body, e.id, param_substs); C_null(type_of::type_of(cx, ety)) } _ => cx.sess().span_bug(e.span, "bad constant expression type in consts::const_expr") } } } pub fn trans_static(ccx: &CrateContext, m: ast::Mutability, id: ast::NodeId) -> ValueRef { unsafe { let _icx = push_ctxt("trans_static"); let g = base::get_item_val(ccx, id); // At this point, get_item_val has already translated the // constant's initializer to determine its LLVM type. let v = ccx.static_values().borrow().get(&id).unwrap().clone(); // boolean SSA values are i1, but they have to be stored in i8 slots, // otherwise some LLVM optimization passes don't work as expected let v = if llvm::LLVMTypeOf(v) == Type::i1(ccx).to_ref() { llvm::LLVMConstZExt(v, Type::i8(ccx).to_ref()) } else { v }; llvm::LLVMSetInitializer(g, v); // As an optimization, all shared statics which do not have interior // mutability are placed into read-only memory. if m != ast::MutMutable { let node_ty = ty::node_id_to_type(ccx.tcx(), id); let tcontents = ty::type_contents(ccx.tcx(), node_ty); if !tcontents.interior_unsafe() { llvm::LLVMSetGlobalConstant(g, True); } } debuginfo::create_global_var_metadata(ccx, id, g); g } } fn get_static_val<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, did: ast::DefId, ty: Ty<'tcx>) -> ValueRef { if ast_util::is_local(did) { return base::get_item_val(ccx, did.node) } base::trans_external_path(ccx, did, ty) }
{ return; }
cylinders.rs
// Copyright (c) 2017 The Noise-rs Developers. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT // or http://opensource.org/licenses/MIT>, at your option. All files in the // project carrying such notice may not be copied, modified, or distributed // except according to those terms. use math::{Point2, Point3, Point4}; use noise_fns::NoiseFn;
/// Noise function that outputs concentric cylinders. /// /// This noise function outputs concentric cylinders centered on the origin. The /// cylinders are oriented along the z axis similar to the concentric rings of /// a tree. Each cylinder extends infinitely along the z axis. #[derive(Clone, Copy, Debug)] pub struct Cylinders { /// Frequency of the concentric objects. pub frequency: f64, } impl Cylinders { pub fn new() -> Cylinders { Cylinders { frequency: DEFAULT_CYLINDERS_FREQUENCY } } pub fn set_frequency(self, frequency: f64) -> Cylinders { Cylinders { frequency: frequency } } } impl Default for Cylinders { fn default() -> Self { Self::new() } } impl NoiseFn<Point2<f64>> for Cylinders { fn get(&self, point: Point2<f64>) -> f64 { calculate_cylinders(&point, self.frequency) } } impl NoiseFn<Point3<f64>> for Cylinders { fn get(&self, point: Point3<f64>) -> f64 { calculate_cylinders(&point, self.frequency) } } impl NoiseFn<Point4<f64>> for Cylinders { fn get(&self, point: Point4<f64>) -> f64 { calculate_cylinders(&point, self.frequency) } } fn calculate_cylinders(point: &[f64], frequency: f64) -> f64 { // Scale the inputs by the frequency. let x = point[0] * frequency; let y = point[1] * frequency; // Calculate the distance of the point from the origin. let dist_from_center = (x.powi(2) + y.powi(2)).sqrt(); let dist_from_smaller_sphere = dist_from_center - dist_from_center.floor(); let dist_from_larger_sphere = 1.0 - dist_from_smaller_sphere; let nearest_dist = dist_from_smaller_sphere.min(dist_from_larger_sphere); // Shift the result to be in the -1.0 to +1.0 range. 1.0 - (nearest_dist * 4.0) }
/// Default cylinders frequency pub const DEFAULT_CYLINDERS_FREQUENCY: f64 = 1.0;
agence.service.ts
import { HttpClient, HttpHeaders } from "@angular/common/http"; import { Injectable } from "@angular/core"; @Injectable({ providedIn: "root", }) export class AgenceService {
public httpOptions: any; constructor(private http: HttpClient) {} getAllAgences() { this.httpOptions = { headers: new HttpHeaders({ "Content-Type": "application/json", Authorization: "Token " + localStorage.getItem("token"), }), }; return this.http.get("http://localhost:8000/agence", this.httpOptions); } getAllCities() { this.httpOptions = { headers: new HttpHeaders({ "Content-Type": "application/json", Authorization: "Token " + localStorage.getItem("token"), }), }; return this.http.get("http://127.0.0.1:8000/cities", this.httpOptions); } getAgenceById(id) { this.httpOptions = { headers: new HttpHeaders({ "Content-Type": "application/json", Authorization: "Token " + localStorage.getItem("token"), }), }; return this.http.get( "http://localhost:8000/agence/" + id, this.httpOptions ); } posterAgence(agence: any) { this.httpOptions = { headers: new HttpHeaders({ "Content-Type": "application/json", Authorization: "Token " + localStorage.getItem("token"), }), }; return this.http.post( "http://localhost:8000/agence/store", agence, this.httpOptions ); } updateAgence(agence: any, id) { this.httpOptions = { headers: new HttpHeaders({ "Content-Type": "application/json", Authorization: "Token " + localStorage.getItem("token"), }), }; return this.http.put( "http://localhost:8000/agence/update/" + id, agence, this.httpOptions ); } removeAgence(id) { this.httpOptions = { headers: new HttpHeaders({ "Content-Type": "application/json", Authorization: "Token " + localStorage.getItem("token"), }), }; return this.http.delete( "http://localhost:8000/agence/delete/" + id, this.httpOptions ); } getAllUsersAgences() { this.httpOptions = { headers: new HttpHeaders({ "Content-Type": "application/json", Authorization: "Token " + localStorage.getItem("token"), }), }; return this.http.get("http://127.0.0.1:8000/userAgence", this.httpOptions); } posterUserAgence(user: any, agence: any) { this.httpOptions = { headers: new HttpHeaders({ "Content-Type": "application/json", Authorization: "Token " + localStorage.getItem("token"), }), }; return this.http.get( "http://localhost:8000/agenceuser/" + user + "/" + agence, this.httpOptions ); } updateUserAgence(user: any, agence: any, id) { this.httpOptions = { headers: new HttpHeaders({ "Content-Type": "application/json", Authorization: "Token " + localStorage.getItem("token"), }), }; return this.http.put( "http://localhost:8000/agenceuser/update/" + id + "/" + user + "/" + agence, this.httpOptions ); } removeUserAgence(id) { this.httpOptions = { headers: new HttpHeaders({ "Content-Type": "application/json", Authorization: "Token " + localStorage.getItem("token"), }), }; return this.http.delete( "http://localhost:8000/agenceuser/delete/" + id, this.httpOptions ); } }
NTriplesParseError.rs
// This file is part of olympus-xmp. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/raphaelcohn/olympus-xmp/master/COPYRIGHT. No part of olympus-xmp, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file. // Copyright © 2022 The developers of olympus-xmp. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/raphaelcohn/olympus-xmp/master/COPYRIGHT. /// Parse error. #[derive(Debug, Clone, Eq, PartialEq)] pub enum NTriplesParseError { #[allow(missing_docs)] NTripleParse(NTripleParseError), #[allow(missing_docs)] OutOfMemory(TryReserveError), } impl Display for NTriplesParseError { #[inline(always)] fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result {
} impl error::Error for NTriplesParseError { #[inline(always)] fn source(&self) -> Option<&(dyn error::Error + 'static)> { use NTriplesParseError::*; match self { NTripleParse(cause) => Some(cause), OutOfMemory(cause) => Some(cause), } } }
Debug::fmt(self, formatter) }
_document.js
import { ServerStyleSheet } from 'styled-components' import Document, { Html, Head, Main, NextScript } from 'next/document' export default class MyDocument extends Document { static async getInitialProps(ctx) { const sheet = new ServerStyleSheet() const originalRenderPage = ctx.renderPage try { ctx.renderPage = () => originalRenderPage({ enhanceApp: (App) => (props) => sheet.collectStyles(<App {...props} />), }) const initialProps = await Document.getInitialProps(ctx) return { ...initialProps, styles: ( <> {initialProps.styles}
</> ), } } finally { sheet.seal() } } render() { return ( <Html> <Head /> <body className="stork-multitheme"> <Main /> <NextScript /> </body> </Html> ) } }
{sheet.getStyleElement()}
msa_reply_affected_entities.rs
/* * CrowdStrike API Specification * * Use this API specification as a reference for the API endpoints you can use to interact with your Falcon environment. These endpoints support authentication via OAuth2 and interact with detections and network containment. For detailed usage guides and more information about API endpoints that don't yet support OAuth2, see our [documentation inside the Falcon console](https://falcon.crowdstrike.com/support/documentation). To use the APIs described below, combine the base URL with the path shown for each API endpoint. For commercial cloud customers, your base URL is `https://api.crowdstrike.com`. Each API endpoint requires authorization via an OAuth2 token. Your first API request should retrieve an OAuth2 token using the `oauth2/token` endpoint, such as `https://api.crowdstrike.com/oauth2/token`. For subsequent requests, include the OAuth2 token in an HTTP authorization header. Tokens expire after 30 minutes, after which you should make a new token request to continue making API requests. * * The version of the OpenAPI document: rolling * * Generated by: https://openapi-generator.tech */ #[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] pub struct
{ #[serde(rename = "errors")] pub errors: Vec<crate::models::MsaApiError>, #[serde(rename = "meta")] pub meta: Box<crate::models::MsaMetaInfo>, #[serde(rename = "resources")] pub resources: Vec<crate::models::MsaAffectedEntity>, } impl MsaReplyAffectedEntities { pub fn new(errors: Vec<crate::models::MsaApiError>, meta: crate::models::MsaMetaInfo, resources: Vec<crate::models::MsaAffectedEntity>) -> MsaReplyAffectedEntities { MsaReplyAffectedEntities { errors, meta: Box::new(meta), resources } } }
MsaReplyAffectedEntities
ralstoniainsidiosa.py
""" This file offers the methods to automatically retrieve the graph Ralstonia insidiosa. The graph is automatically retrieved from the STRING repository. References --------------------- Please cite the following if you use the data: ```bib @article{szklarczyk2019string, title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets}, author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others}, journal={Nucleic acids research}, volume={47}, number={D1}, pages={D607--D613}, year={2019}, publisher={Oxford University Press} } ``` """ from typing import Dict from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph from ...ensmallen import Graph # pylint: disable=import-error def
( directed: bool = False, preprocess: bool = True, load_nodes: bool = True, verbose: int = 2, cache: bool = True, cache_path: str = "graphs/string", version: str = "links.v11.5", **additional_graph_kwargs: Dict ) -> Graph: """Return new instance of the Ralstonia insidiosa graph. The graph is automatically retrieved from the STRING repository. Parameters ------------------- directed: bool = False Wether to load the graph as directed or undirected. By default false. preprocess: bool = True Whether to preprocess the graph to be loaded in optimal time and memory. load_nodes: bool = True, Whether to load the nodes vocabulary or treat the nodes simply as a numeric range. verbose: int = 2, Wether to show loading bars during the retrieval and building of the graph. cache: bool = True Whether to use cache, i.e. download files only once and preprocess them only once. cache_path: str = "graphs" Where to store the downloaded graphs. version: str = "links.v11.5" The version of the graph to retrieve. The available versions are: - homology.v11.5 - physical.links.v11.5 - links.v11.5 additional_graph_kwargs: Dict Additional graph kwargs. Returns ----------------------- Instace of Ralstonia insidiosa graph. References --------------------- Please cite the following if you use the data: ```bib @article{szklarczyk2019string, title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets}, author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others}, journal={Nucleic acids research}, volume={47}, number={D1}, pages={D607--D613}, year={2019}, publisher={Oxford University Press} } ``` """ return AutomaticallyRetrievedGraph( graph_name="RalstoniaInsidiosa", repository="string", version=version, directed=directed, preprocess=preprocess, load_nodes=load_nodes, verbose=verbose, cache=cache, cache_path=cache_path, additional_graph_kwargs=additional_graph_kwargs )()
RalstoniaInsidiosa
statistics.py
#!/usr/bin/env python import dateutils def quartiles(values): """ Returns the (rough) quintlines of a series of values. This is not intended to be statistically correct - it's not a quick 'n' dirty measure. """ return [i * max(values) / 4 for i in range(5)] def longest_streak(dates):
def current_streak(dates): """ Given a list of datetime.date objects, return today's date (if present) and all/any preceding consecutive dates. """ streak = [] current_date = dateutils.today() while current_date in dates: streak.append(current_date) current_date = dateutils.previous_day(current_date) return sorted(streak)
""" Given a list of datetime.date objects, return the longest sublist of consecutive dates. If there are multiple longest sublists of the same length, then the first such sublist is returned. """ if not dates: return [] dates = sorted(dates) streaks = [] current_streak = [dates[0]] # For each date, check to see whether it extends the current streak for idx in range(1, len(dates)): date = dates[idx] if dateutils.previous_day(date) == current_streak[-1]: current_streak.append(date) else: streaks.append(current_streak) current_streak = [date] # When we've gone through all the dates, save the last streak streaks.append(current_streak) return max(streaks, key=len)
test_pathlib.py
import collections.abc import io import os import sys import errno import pathlib import pickle import socket import stat import tempfile import unittest from unittest import mock from test import support from test.support import TESTFN, FakePath try: import grp, pwd except ImportError: grp = pwd = None class _BaseFlavourTest(object): def _check_parse_parts(self, arg, expected): f = self.flavour.parse_parts sep = self.flavour.sep altsep = self.flavour.altsep actual = f([x.replace('/', sep) for x in arg]) self.assertEqual(actual, expected) if altsep: actual = f([x.replace('/', altsep) for x in arg]) self.assertEqual(actual, expected) def test_parse_parts_common(self): check = self._check_parse_parts sep = self.flavour.sep # Unanchored parts. check([], ('', '', [])) check(['a'], ('', '', ['a'])) check(['a/'], ('', '', ['a'])) check(['a', 'b'], ('', '', ['a', 'b'])) # Expansion. check(['a/b'], ('', '', ['a', 'b'])) check(['a/b/'], ('', '', ['a', 'b'])) check(['a', 'b/c', 'd'], ('', '', ['a', 'b', 'c', 'd'])) # Collapsing and stripping excess slashes. check(['a', 'b//c', 'd'], ('', '', ['a', 'b', 'c', 'd'])) check(['a', 'b/c/', 'd'], ('', '', ['a', 'b', 'c', 'd'])) # Eliminating standalone dots. check(['.'], ('', '', [])) check(['.', '.', 'b'], ('', '', ['b'])) check(['a', '.', 'b'], ('', '', ['a', 'b'])) check(['a', '.', '.'], ('', '', ['a'])) # The first part is anchored. check(['/a/b'], ('', sep, [sep, 'a', 'b'])) check(['/a', 'b'], ('', sep, [sep, 'a', 'b'])) check(['/a/', 'b'], ('', sep, [sep, 'a', 'b'])) # Ignoring parts before an anchored part. check(['a', '/b', 'c'], ('', sep, [sep, 'b', 'c'])) check(['a', '/b', '/c'], ('', sep, [sep, 'c'])) class PosixFlavourTest(_BaseFlavourTest, unittest.TestCase): flavour = pathlib._posix_flavour def test_parse_parts(self): check = self._check_parse_parts # Collapsing of excess leading slashes, except for the double-slash # special case. check(['//a', 'b'], ('', '//', ['//', 'a', 'b'])) check(['///a', 'b'], ('', '/', ['/', 'a', 'b'])) check(['////a', 'b'], ('', '/', ['/', 'a', 'b'])) # Paths which look like NT paths aren't treated specially. check(['c:a'], ('', '', ['c:a'])) check(['c:\\a'], ('', '', ['c:\\a'])) check(['\\a'], ('', '', ['\\a'])) def test_splitroot(self): f = self.flavour.splitroot self.assertEqual(f(''), ('', '', '')) self.assertEqual(f('a'), ('', '', 'a')) self.assertEqual(f('a/b'), ('', '', 'a/b')) self.assertEqual(f('a/b/'), ('', '', 'a/b/')) self.assertEqual(f('/a'), ('', '/', 'a')) self.assertEqual(f('/a/b'), ('', '/', 'a/b')) self.assertEqual(f('/a/b/'), ('', '/', 'a/b/')) # The root is collapsed when there are redundant slashes # except when there are exactly two leading slashes, which # is a special case in POSIX. self.assertEqual(f('//a'), ('', '//', 'a')) self.assertEqual(f('///a'), ('', '/', 'a')) self.assertEqual(f('///a/b'), ('', '/', 'a/b')) # Paths which look like NT paths aren't treated specially. self.assertEqual(f('c:/a/b'), ('', '', 'c:/a/b')) self.assertEqual(f('\\/a/b'), ('', '', '\\/a/b')) self.assertEqual(f('\\a\\b'), ('', '', '\\a\\b')) class NTFlavourTest(_BaseFlavourTest, unittest.TestCase): flavour = pathlib._windows_flavour def test_parse_parts(self): check = self._check_parse_parts # First part is anchored. check(['c:'], ('c:', '', ['c:'])) check(['c:/'], ('c:', '\\', ['c:\\'])) check(['/'], ('', '\\', ['\\'])) check(['c:a'], ('c:', '', ['c:', 'a'])) check(['c:/a'], ('c:', '\\', ['c:\\', 'a'])) check(['/a'], ('', '\\', ['\\', 'a'])) # UNC paths. check(['//a/b'], ('\\\\a\\b', '\\', ['\\\\a\\b\\'])) check(['//a/b/'], ('\\\\a\\b', '\\', ['\\\\a\\b\\'])) check(['//a/b/c'], ('\\\\a\\b', '\\', ['\\\\a\\b\\', 'c'])) # Second part is anchored, so that the first part is ignored. check(['a', 'Z:b', 'c'], ('Z:', '', ['Z:', 'b', 'c'])) check(['a', 'Z:/b', 'c'], ('Z:', '\\', ['Z:\\', 'b', 'c'])) # UNC paths. check(['a', '//b/c', 'd'], ('\\\\b\\c', '\\', ['\\\\b\\c\\', 'd'])) # Collapsing and stripping excess slashes. check(['a', 'Z://b//c/', 'd/'], ('Z:', '\\', ['Z:\\', 'b', 'c', 'd'])) # UNC paths. check(['a', '//b/c//', 'd'], ('\\\\b\\c', '\\', ['\\\\b\\c\\', 'd'])) # Extended paths. check(['//?/c:/'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\'])) check(['//?/c:/a'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\', 'a'])) check(['//?/c:/a', '/b'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\', 'b'])) # Extended UNC paths (format is "\\?\UNC\server\share"). check(['//?/UNC/b/c'], ('\\\\?\\UNC\\b\\c', '\\', ['\\\\?\\UNC\\b\\c\\'])) check(['//?/UNC/b/c/d'], ('\\\\?\\UNC\\b\\c', '\\', ['\\\\?\\UNC\\b\\c\\', 'd'])) # Second part has a root but not drive. check(['a', '/b', 'c'], ('', '\\', ['\\', 'b', 'c'])) check(['Z:/a', '/b', 'c'], ('Z:', '\\', ['Z:\\', 'b', 'c'])) check(['//?/Z:/a', '/b', 'c'], ('\\\\?\\Z:', '\\', ['\\\\?\\Z:\\', 'b', 'c'])) def test_splitroot(self): f = self.flavour.splitroot self.assertEqual(f(''), ('', '', '')) self.assertEqual(f('a'), ('', '', 'a')) self.assertEqual(f('a\\b'), ('', '', 'a\\b')) self.assertEqual(f('\\a'), ('', '\\', 'a')) self.assertEqual(f('\\a\\b'), ('', '\\', 'a\\b')) self.assertEqual(f('c:a\\b'), ('c:', '', 'a\\b')) self.assertEqual(f('c:\\a\\b'), ('c:', '\\', 'a\\b')) # Redundant slashes in the root are collapsed. self.assertEqual(f('\\\\a'), ('', '\\', 'a')) self.assertEqual(f('\\\\\\a/b'), ('', '\\', 'a/b')) self.assertEqual(f('c:\\\\a'), ('c:', '\\', 'a')) self.assertEqual(f('c:\\\\\\a/b'), ('c:', '\\', 'a/b')) # Valid UNC paths. self.assertEqual(f('\\\\a\\b'), ('\\\\a\\b', '\\', '')) self.assertEqual(f('\\\\a\\b\\'), ('\\\\a\\b', '\\', '')) self.assertEqual(f('\\\\a\\b\\c\\d'), ('\\\\a\\b', '\\', 'c\\d')) # These are non-UNC paths (according to ntpath.py and test_ntpath). # However, command.com says such paths are invalid, so it's # difficult to know what the right semantics are. self.assertEqual(f('\\\\\\a\\b'), ('', '\\', 'a\\b')) self.assertEqual(f('\\\\a'), ('', '\\', 'a')) # # Tests for the pure classes. # class _BasePurePathTest(object): # Keys are canonical paths, values are list of tuples of arguments # supposed to produce equal paths. equivalences = { 'a/b': [ ('a', 'b'), ('a/', 'b'), ('a', 'b/'), ('a/', 'b/'), ('a/b/',), ('a//b',), ('a//b//',), # Empty components get removed. ('', 'a', 'b'), ('a', '', 'b'), ('a', 'b', ''), ], '/b/c/d': [ ('a', '/b/c', 'd'), ('a', '///b//c', 'd/'), ('/a', '/b/c', 'd'), # Empty components get removed. ('/', 'b', '', 'c/d'), ('/', '', 'b/c/d'), ('', '/b/c/d'), ], } def setUp(self): p = self.cls('a') self.flavour = p._flavour self.sep = self.flavour.sep self.altsep = self.flavour.altsep def test_constructor_common(self): P = self.cls p = P('a') self.assertIsInstance(p, P) P('a', 'b', 'c') P('/a', 'b', 'c') P('a/b/c') P('/a/b/c') P(FakePath("a/b/c")) self.assertEqual(P(P('a')), P('a')) self.assertEqual(P(P('a'), 'b'), P('a/b')) self.assertEqual(P(P('a'), P('b')), P('a/b')) self.assertEqual(P(P('a'), P('b'), P('c')), P(FakePath("a/b/c"))) def _check_str_subclass(self, *args): # Issue #21127: it should be possible to construct a PurePath object # from a str subclass instance, and it then gets converted to # a pure str object. class StrSubclass(str): pass P = self.cls p = P(*(StrSubclass(x) for x in args)) self.assertEqual(p, P(*args)) for part in p.parts: self.assertIs(type(part), str) def test_str_subclass_common(self): self._check_str_subclass('') self._check_str_subclass('.') self._check_str_subclass('a') self._check_str_subclass('a/b.txt') self._check_str_subclass('/a/b.txt') def test_join_common(self): P = self.cls p = P('a/b') pp = p.joinpath('c') self.assertEqual(pp, P('a/b/c')) self.assertIs(type(pp), type(p)) pp = p.joinpath('c', 'd') self.assertEqual(pp, P('a/b/c/d')) pp = p.joinpath(P('c')) self.assertEqual(pp, P('a/b/c')) pp = p.joinpath('/c') self.assertEqual(pp, P('/c')) def test_div_common(self): # Basically the same as joinpath(). P = self.cls p = P('a/b') pp = p / 'c' self.assertEqual(pp, P('a/b/c')) self.assertIs(type(pp), type(p)) pp = p / 'c/d' self.assertEqual(pp, P('a/b/c/d')) pp = p / 'c' / 'd' self.assertEqual(pp, P('a/b/c/d')) pp = 'c' / p / 'd' self.assertEqual(pp, P('c/a/b/d')) pp = p / P('c') self.assertEqual(pp, P('a/b/c')) pp = p/ '/c' self.assertEqual(pp, P('/c')) def _check_str(self, expected, args): p = self.cls(*args) self.assertEqual(str(p), expected.replace('/', self.sep)) def test_str_common(self): # Canonicalized paths roundtrip. for pathstr in ('a', 'a/b', 'a/b/c', '/', '/a/b', '/a/b/c'): self._check_str(pathstr, (pathstr,)) # Special case for the empty path. self._check_str('.', ('',)) # Other tests for str() are in test_equivalences(). def test_as_posix_common(self): P = self.cls for pathstr in ('a', 'a/b', 'a/b/c', '/', '/a/b', '/a/b/c'): self.assertEqual(P(pathstr).as_posix(), pathstr) # Other tests for as_posix() are in test_equivalences(). def test_as_bytes_common(self): sep = os.fsencode(self.sep) P = self.cls self.assertEqual(bytes(P('a/b')), b'a' + sep + b'b') def test_as_uri_common(self): P = self.cls with self.assertRaises(ValueError): P('a').as_uri() with self.assertRaises(ValueError): P().as_uri() def test_repr_common(self): for pathstr in ('a', 'a/b', 'a/b/c', '/', '/a/b', '/a/b/c'): p = self.cls(pathstr) clsname = p.__class__.__name__ r = repr(p) # The repr() is in the form ClassName("forward-slashes path"). self.assertTrue(r.startswith(clsname + '('), r) self.assertTrue(r.endswith(')'), r) inner = r[len(clsname) + 1 : -1] self.assertEqual(eval(inner), p.as_posix()) # The repr() roundtrips. q = eval(r, pathlib.__dict__) self.assertIs(q.__class__, p.__class__) self.assertEqual(q, p) self.assertEqual(repr(q), r) def test_eq_common(self): P = self.cls self.assertEqual(P('a/b'), P('a/b')) self.assertEqual(P('a/b'), P('a', 'b')) self.assertNotEqual(P('a/b'), P('a')) self.assertNotEqual(P('a/b'), P('/a/b')) self.assertNotEqual(P('a/b'), P()) self.assertNotEqual(P('/a/b'), P('/')) self.assertNotEqual(P(), P('/')) self.assertNotEqual(P(), "") self.assertNotEqual(P(), {}) self.assertNotEqual(P(), int) def test_match_common(self): P = self.cls self.assertRaises(ValueError, P('a').match, '') self.assertRaises(ValueError, P('a').match, '.') # Simple relative pattern. self.assertTrue(P('b.py').match('b.py')) self.assertTrue(P('a/b.py').match('b.py')) self.assertTrue(P('/a/b.py').match('b.py')) self.assertFalse(P('a.py').match('b.py')) self.assertFalse(P('b/py').match('b.py')) self.assertFalse(P('/a.py').match('b.py')) self.assertFalse(P('b.py/c').match('b.py')) # Wilcard relative pattern. self.assertTrue(P('b.py').match('*.py')) self.assertTrue(P('a/b.py').match('*.py')) self.assertTrue(P('/a/b.py').match('*.py')) self.assertFalse(P('b.pyc').match('*.py')) self.assertFalse(P('b./py').match('*.py')) self.assertFalse(P('b.py/c').match('*.py')) # Multi-part relative pattern. self.assertTrue(P('ab/c.py').match('a*/*.py')) self.assertTrue(P('/d/ab/c.py').match('a*/*.py')) self.assertFalse(P('a.py').match('a*/*.py')) self.assertFalse(P('/dab/c.py').match('a*/*.py')) self.assertFalse(P('ab/c.py/d').match('a*/*.py')) # Absolute pattern. self.assertTrue(P('/b.py').match('/*.py')) self.assertFalse(P('b.py').match('/*.py')) self.assertFalse(P('a/b.py').match('/*.py')) self.assertFalse(P('/a/b.py').match('/*.py')) # Multi-part absolute pattern. self.assertTrue(P('/a/b.py').match('/a/*.py')) self.assertFalse(P('/ab.py').match('/a/*.py')) self.assertFalse(P('/a/b/c.py').match('/a/*.py')) # Multi-part glob-style pattern. self.assertFalse(P('/a/b/c.py').match('/**/*.py')) self.assertTrue(P('/a/b/c.py').match('/a/**/*.py')) def test_ordering_common(self): # Ordering is tuple-alike. def assertLess(a, b): self.assertLess(a, b) self.assertGreater(b, a) P = self.cls a = P('a') b = P('a/b') c = P('abc') d = P('b') assertLess(a, b) assertLess(a, c) assertLess(a, d) assertLess(b, c) assertLess(c, d) P = self.cls a = P('/a') b = P('/a/b') c = P('/abc') d = P('/b') assertLess(a, b) assertLess(a, c) assertLess(a, d) assertLess(b, c) assertLess(c, d) with self.assertRaises(TypeError): P() < {} def test_parts_common(self): # `parts` returns a tuple. sep = self.sep P = self.cls p = P('a/b') parts = p.parts self.assertEqual(parts, ('a', 'b')) # The object gets reused. self.assertIs(parts, p.parts) # When the path is absolute, the anchor is a separate part. p = P('/a/b') parts = p.parts self.assertEqual(parts, (sep, 'a', 'b')) def test_fspath_common(self): P = self.cls p = P('a/b') self._check_str(p.__fspath__(), ('a/b',)) self._check_str(os.fspath(p), ('a/b',)) def test_equivalences(self): for k, tuples in self.equivalences.items(): canon = k.replace('/', self.sep) posix = k.replace(self.sep, '/') if canon != posix: tuples = tuples + [ tuple(part.replace('/', self.sep) for part in t) for t in tuples ] tuples.append((posix, )) pcanon = self.cls(canon) for t in tuples: p = self.cls(*t) self.assertEqual(p, pcanon, "failed with args {}".format(t)) self.assertEqual(hash(p), hash(pcanon)) self.assertEqual(str(p), canon) self.assertEqual(p.as_posix(), posix) def test_parent_common(self): # Relative P = self.cls p = P('a/b/c') self.assertEqual(p.parent, P('a/b')) self.assertEqual(p.parent.parent, P('a')) self.assertEqual(p.parent.parent.parent, P()) self.assertEqual(p.parent.parent.parent.parent, P()) # Anchored p = P('/a/b/c') self.assertEqual(p.parent, P('/a/b')) self.assertEqual(p.parent.parent, P('/a')) self.assertEqual(p.parent.parent.parent, P('/')) self.assertEqual(p.parent.parent.parent.parent, P('/')) def test_parents_common(self): # Relative P = self.cls p = P('a/b/c') par = p.parents self.assertEqual(len(par), 3) self.assertEqual(par[0], P('a/b')) self.assertEqual(par[1], P('a')) self.assertEqual(par[2], P('.')) self.assertEqual(list(par), [P('a/b'), P('a'), P('.')]) with self.assertRaises(IndexError): par[-1] with self.assertRaises(IndexError): par[3] with self.assertRaises(TypeError): par[0] = p # Anchored p = P('/a/b/c') par = p.parents self.assertEqual(len(par), 3) self.assertEqual(par[0], P('/a/b')) self.assertEqual(par[1], P('/a')) self.assertEqual(par[2], P('/')) self.assertEqual(list(par), [P('/a/b'), P('/a'), P('/')]) with self.assertRaises(IndexError): par[3] def test_drive_common(self): P = self.cls self.assertEqual(P('a/b').drive, '') self.assertEqual(P('/a/b').drive, '') self.assertEqual(P('').drive, '') def test_root_common(self): P = self.cls sep = self.sep self.assertEqual(P('').root, '') self.assertEqual(P('a/b').root, '') self.assertEqual(P('/').root, sep) self.assertEqual(P('/a/b').root, sep) def test_anchor_common(self): P = self.cls sep = self.sep self.assertEqual(P('').anchor, '') self.assertEqual(P('a/b').anchor, '') self.assertEqual(P('/').anchor, sep) self.assertEqual(P('/a/b').anchor, sep) def test_name_common(self): P = self.cls self.assertEqual(P('').name, '') self.assertEqual(P('.').name, '') self.assertEqual(P('/').name, '') self.assertEqual(P('a/b').name, 'b') self.assertEqual(P('/a/b').name, 'b') self.assertEqual(P('/a/b/.').name, 'b') self.assertEqual(P('a/b.py').name, 'b.py') self.assertEqual(P('/a/b.py').name, 'b.py') def test_suffix_common(self): P = self.cls self.assertEqual(P('').suffix, '') self.assertEqual(P('.').suffix, '') self.assertEqual(P('..').suffix, '') self.assertEqual(P('/').suffix, '') self.assertEqual(P('a/b').suffix, '') self.assertEqual(P('/a/b').suffix, '') self.assertEqual(P('/a/b/.').suffix, '') self.assertEqual(P('a/b.py').suffix, '.py') self.assertEqual(P('/a/b.py').suffix, '.py') self.assertEqual(P('a/.hgrc').suffix, '') self.assertEqual(P('/a/.hgrc').suffix, '') self.assertEqual(P('a/.hg.rc').suffix, '.rc') self.assertEqual(P('/a/.hg.rc').suffix, '.rc') self.assertEqual(P('a/b.tar.gz').suffix, '.gz') self.assertEqual(P('/a/b.tar.gz').suffix, '.gz') self.assertEqual(P('a/Some name. Ending with a dot.').suffix, '') self.assertEqual(P('/a/Some name. Ending with a dot.').suffix, '') def test_suffixes_common(self): P = self.cls self.assertEqual(P('').suffixes, []) self.assertEqual(P('.').suffixes, []) self.assertEqual(P('/').suffixes, []) self.assertEqual(P('a/b').suffixes, []) self.assertEqual(P('/a/b').suffixes, []) self.assertEqual(P('/a/b/.').suffixes, []) self.assertEqual(P('a/b.py').suffixes, ['.py']) self.assertEqual(P('/a/b.py').suffixes, ['.py']) self.assertEqual(P('a/.hgrc').suffixes, []) self.assertEqual(P('/a/.hgrc').suffixes, []) self.assertEqual(P('a/.hg.rc').suffixes, ['.rc']) self.assertEqual(P('/a/.hg.rc').suffixes, ['.rc']) self.assertEqual(P('a/b.tar.gz').suffixes, ['.tar', '.gz']) self.assertEqual(P('/a/b.tar.gz').suffixes, ['.tar', '.gz']) self.assertEqual(P('a/Some name. Ending with a dot.').suffixes, []) self.assertEqual(P('/a/Some name. Ending with a dot.').suffixes, []) def test_stem_common(self): P = self.cls self.assertEqual(P('').stem, '') self.assertEqual(P('.').stem, '') self.assertEqual(P('..').stem, '..') self.assertEqual(P('/').stem, '') self.assertEqual(P('a/b').stem, 'b') self.assertEqual(P('a/b.py').stem, 'b') self.assertEqual(P('a/.hgrc').stem, '.hgrc') self.assertEqual(P('a/.hg.rc').stem, '.hg') self.assertEqual(P('a/b.tar.gz').stem, 'b.tar') self.assertEqual(P('a/Some name. Ending with a dot.').stem, 'Some name. Ending with a dot.') def test_with_name_common(self): P = self.cls self.assertEqual(P('a/b').with_name('d.xml'), P('a/d.xml')) self.assertEqual(P('/a/b').with_name('d.xml'), P('/a/d.xml')) self.assertEqual(P('a/b.py').with_name('d.xml'), P('a/d.xml')) self.assertEqual(P('/a/b.py').with_name('d.xml'), P('/a/d.xml')) self.assertEqual(P('a/Dot ending.').with_name('d.xml'), P('a/d.xml')) self.assertEqual(P('/a/Dot ending.').with_name('d.xml'), P('/a/d.xml')) self.assertRaises(ValueError, P('').with_name, 'd.xml') self.assertRaises(ValueError, P('.').with_name, 'd.xml') self.assertRaises(ValueError, P('/').with_name, 'd.xml') self.assertRaises(ValueError, P('a/b').with_name, '') self.assertRaises(ValueError, P('a/b').with_name, '/c') self.assertRaises(ValueError, P('a/b').with_name, 'c/') self.assertRaises(ValueError, P('a/b').with_name, 'c/d') def test_with_suffix_common(self): P = self.cls self.assertEqual(P('a/b').with_suffix('.gz'), P('a/b.gz')) self.assertEqual(P('/a/b').with_suffix('.gz'), P('/a/b.gz')) self.assertEqual(P('a/b.py').with_suffix('.gz'), P('a/b.gz')) self.assertEqual(P('/a/b.py').with_suffix('.gz'), P('/a/b.gz')) # Stripping suffix. self.assertEqual(P('a/b.py').with_suffix(''), P('a/b')) self.assertEqual(P('/a/b').with_suffix(''), P('/a/b')) # Path doesn't have a "filename" component. self.assertRaises(ValueError, P('').with_suffix, '.gz') self.assertRaises(ValueError, P('.').with_suffix, '.gz') self.assertRaises(ValueError, P('/').with_suffix, '.gz') # Invalid suffix. self.assertRaises(ValueError, P('a/b').with_suffix, 'gz') self.assertRaises(ValueError, P('a/b').with_suffix, '/') self.assertRaises(ValueError, P('a/b').with_suffix, '.') self.assertRaises(ValueError, P('a/b').with_suffix, '/.gz') self.assertRaises(ValueError, P('a/b').with_suffix, 'c/d') self.assertRaises(ValueError, P('a/b').with_suffix, '.c/.d') self.assertRaises(ValueError, P('a/b').with_suffix, './.d') self.assertRaises(ValueError, P('a/b').with_suffix, '.d/.') self.assertRaises(ValueError, P('a/b').with_suffix, (self.flavour.sep, 'd')) def test_relative_to_common(self): P = self.cls p = P('a/b') self.assertRaises(TypeError, p.relative_to) self.assertRaises(TypeError, p.relative_to, b'a') self.assertEqual(p.relative_to(P()), P('a/b')) self.assertEqual(p.relative_to(''), P('a/b')) self.assertEqual(p.relative_to(P('a')), P('b')) self.assertEqual(p.relative_to('a'), P('b')) self.assertEqual(p.relative_to('a/'), P('b')) self.assertEqual(p.relative_to(P('a/b')), P()) self.assertEqual(p.relative_to('a/b'), P()) # With several args. self.assertEqual(p.relative_to('a', 'b'), P()) # Unrelated paths. self.assertRaises(ValueError, p.relative_to, P('c')) self.assertRaises(ValueError, p.relative_to, P('a/b/c')) self.assertRaises(ValueError, p.relative_to, P('a/c')) self.assertRaises(ValueError, p.relative_to, P('/a')) p = P('/a/b') self.assertEqual(p.relative_to(P('/')), P('a/b')) self.assertEqual(p.relative_to('/'), P('a/b')) self.assertEqual(p.relative_to(P('/a')), P('b')) self.assertEqual(p.relative_to('/a'), P('b')) self.assertEqual(p.relative_to('/a/'), P('b')) self.assertEqual(p.relative_to(P('/a/b')), P()) self.assertEqual(p.relative_to('/a/b'), P()) # Unrelated paths. self.assertRaises(ValueError, p.relative_to, P('/c')) self.assertRaises(ValueError, p.relative_to, P('/a/b/c')) self.assertRaises(ValueError, p.relative_to, P('/a/c')) self.assertRaises(ValueError, p.relative_to, P()) self.assertRaises(ValueError, p.relative_to, '') self.assertRaises(ValueError, p.relative_to, P('a')) def test_pickling_common(self): P = self.cls p = P('/a/b') for proto in range(0, pickle.HIGHEST_PROTOCOL + 1): dumped = pickle.dumps(p, proto) pp = pickle.loads(dumped) self.assertIs(pp.__class__, p.__class__) self.assertEqual(pp, p) self.assertEqual(hash(pp), hash(p)) self.assertEqual(str(pp), str(p)) class PurePosixPathTest(_BasePurePathTest, unittest.TestCase): cls = pathlib.PurePosixPath def test_root(self): P = self.cls self.assertEqual(P('/a/b').root, '/') self.assertEqual(P('///a/b').root, '/') # POSIX special case for two leading slashes. self.assertEqual(P('//a/b').root, '//') def test_eq(self): P = self.cls self.assertNotEqual(P('a/b'), P('A/b')) self.assertEqual(P('/a'), P('///a')) self.assertNotEqual(P('/a'), P('//a')) def test_as_uri(self): P = self.cls self.assertEqual(P('/').as_uri(), 'file:///') self.assertEqual(P('/a/b.c').as_uri(), 'file:///a/b.c') self.assertEqual(P('/a/b%#c').as_uri(), 'file:///a/b%25%23c') def test_as_uri_non_ascii(self): from urllib.parse import quote_from_bytes P = self.cls try: os.fsencode('\xe9') except UnicodeEncodeError: self.skipTest("\\xe9 cannot be encoded to the filesystem encoding") self.assertEqual(P('/a/b\xe9').as_uri(), 'file:///a/b' + quote_from_bytes(os.fsencode('\xe9'))) def test_match(self): P = self.cls self.assertFalse(P('A.py').match('a.PY')) def test_is_absolute(self): P = self.cls self.assertFalse(P().is_absolute()) self.assertFalse(P('a').is_absolute()) self.assertFalse(P('a/b/').is_absolute()) self.assertTrue(P('/').is_absolute()) self.assertTrue(P('/a').is_absolute()) self.assertTrue(P('/a/b/').is_absolute()) self.assertTrue(P('//a').is_absolute()) self.assertTrue(P('//a/b').is_absolute()) def test_is_reserved(self): P = self.cls self.assertIs(False, P('').is_reserved()) self.assertIs(False, P('/').is_reserved()) self.assertIs(False, P('/foo/bar').is_reserved()) self.assertIs(False, P('/dev/con/PRN/NUL').is_reserved()) def test_join(self): P = self.cls p = P('//a') pp = p.joinpath('b') self.assertEqual(pp, P('//a/b')) pp = P('/a').joinpath('//c') self.assertEqual(pp, P('//c')) pp = P('//a').joinpath('/c') self.assertEqual(pp, P('/c')) def test_div(self): # Basically the same as joinpath(). P = self.cls p = P('//a') pp = p / 'b' self.assertEqual(pp, P('//a/b')) pp = P('/a') / '//c' self.assertEqual(pp, P('//c')) pp = P('//a') / '/c' self.assertEqual(pp, P('/c')) class PureWindowsPathTest(_BasePurePathTest, unittest.TestCase): cls = pathlib.PureWindowsPath equivalences = _BasePurePathTest.equivalences.copy() equivalences.update({ 'c:a': [ ('c:', 'a'), ('c:', 'a/'), ('/', 'c:', 'a') ], 'c:/a': [ ('c:/', 'a'), ('c:', '/', 'a'), ('c:', '/a'), ('/z', 'c:/', 'a'), ('//x/y', 'c:/', 'a'), ], '//a/b/': [ ('//a/b',) ], '//a/b/c': [ ('//a/b', 'c'), ('//a/b/', 'c'), ], }) def test_str(self): p = self.cls('a/b/c') self.assertEqual(str(p), 'a\\b\\c') p = self.cls('c:/a/b/c') self.assertEqual(str(p), 'c:\\a\\b\\c') p = self.cls('//a/b') self.assertEqual(str(p), '\\\\a\\b\\') p = self.cls('//a/b/c') self.assertEqual(str(p), '\\\\a\\b\\c') p = self.cls('//a/b/c/d') self.assertEqual(str(p), '\\\\a\\b\\c\\d') def test_str_subclass(self): self._check_str_subclass('c:') self._check_str_subclass('c:a') self._check_str_subclass('c:a\\b.txt') self._check_str_subclass('c:\\') self._check_str_subclass('c:\\a') self._check_str_subclass('c:\\a\\b.txt') self._check_str_subclass('\\\\some\\share') self._check_str_subclass('\\\\some\\share\\a') self._check_str_subclass('\\\\some\\share\\a\\b.txt') def test_eq(self): P = self.cls self.assertEqual(P('c:a/b'), P('c:a/b')) self.assertEqual(P('c:a/b'), P('c:', 'a', 'b')) self.assertNotEqual(P('c:a/b'), P('d:a/b')) self.assertNotEqual(P('c:a/b'), P('c:/a/b')) self.assertNotEqual(P('/a/b'), P('c:/a/b')) # Case-insensitivity. self.assertEqual(P('a/B'), P('A/b')) self.assertEqual(P('C:a/B'), P('c:A/b')) self.assertEqual(P('//Some/SHARE/a/B'), P('//somE/share/A/b')) def test_as_uri(self): P = self.cls with self.assertRaises(ValueError): P('/a/b').as_uri() with self.assertRaises(ValueError): P('c:a/b').as_uri() self.assertEqual(P('c:/').as_uri(), 'file:///c:/') self.assertEqual(P('c:/a/b.c').as_uri(), 'file:///c:/a/b.c') self.assertEqual(P('c:/a/b%#c').as_uri(), 'file:///c:/a/b%25%23c') self.assertEqual(P('c:/a/b\xe9').as_uri(), 'file:///c:/a/b%C3%A9') self.assertEqual(P('//some/share/').as_uri(), 'file://some/share/') self.assertEqual(P('//some/share/a/b.c').as_uri(), 'file://some/share/a/b.c') self.assertEqual(P('//some/share/a/b%#c\xe9').as_uri(), 'file://some/share/a/b%25%23c%C3%A9') def test_match_common(self): P = self.cls # Absolute patterns. self.assertTrue(P('c:/b.py').match('/*.py')) self.assertTrue(P('c:/b.py').match('c:*.py')) self.assertTrue(P('c:/b.py').match('c:/*.py')) self.assertFalse(P('d:/b.py').match('c:/*.py')) # wrong drive self.assertFalse(P('b.py').match('/*.py')) self.assertFalse(P('b.py').match('c:*.py')) self.assertFalse(P('b.py').match('c:/*.py')) self.assertFalse(P('c:b.py').match('/*.py')) self.assertFalse(P('c:b.py').match('c:/*.py')) self.assertFalse(P('/b.py').match('c:*.py')) self.assertFalse(P('/b.py').match('c:/*.py')) # UNC patterns. self.assertTrue(P('//some/share/a.py').match('/*.py')) self.assertTrue(P('//some/share/a.py').match('//some/share/*.py')) self.assertFalse(P('//other/share/a.py').match('//some/share/*.py')) self.assertFalse(P('//some/share/a/b.py').match('//some/share/*.py')) # Case-insensitivity. self.assertTrue(P('B.py').match('b.PY')) self.assertTrue(P('c:/a/B.Py').match('C:/A/*.pY')) self.assertTrue(P('//Some/Share/B.Py').match('//somE/sharE/*.pY')) def test_ordering_common(self): # Case-insensitivity. def assertOrderedEqual(a, b): self.assertLessEqual(a, b) self.assertGreaterEqual(b, a) P = self.cls p = P('c:A/b') q = P('C:a/B') assertOrderedEqual(p, q) self.assertFalse(p < q) self.assertFalse(p > q) p = P('//some/Share/A/b') q = P('//Some/SHARE/a/B') assertOrderedEqual(p, q) self.assertFalse(p < q) self.assertFalse(p > q) def test_parts(self): P = self.cls p = P('c:a/b') parts = p.parts self.assertEqual(parts, ('c:', 'a', 'b')) p = P('c:/a/b') parts = p.parts self.assertEqual(parts, ('c:\\', 'a', 'b')) p = P('//a/b/c/d') parts = p.parts self.assertEqual(parts, ('\\\\a\\b\\', 'c', 'd')) def test_parent(self): # Anchored P = self.cls p = P('z:a/b/c') self.assertEqual(p.parent, P('z:a/b')) self.assertEqual(p.parent.parent, P('z:a')) self.assertEqual(p.parent.parent.parent, P('z:')) self.assertEqual(p.parent.parent.parent.parent, P('z:')) p = P('z:/a/b/c') self.assertEqual(p.parent, P('z:/a/b')) self.assertEqual(p.parent.parent, P('z:/a')) self.assertEqual(p.parent.parent.parent, P('z:/')) self.assertEqual(p.parent.parent.parent.parent, P('z:/')) p = P('//a/b/c/d') self.assertEqual(p.parent, P('//a/b/c')) self.assertEqual(p.parent.parent, P('//a/b')) self.assertEqual(p.parent.parent.parent, P('//a/b')) def test_parents(self): # Anchored P = self.cls p = P('z:a/b/') par = p.parents self.assertEqual(len(par), 2) self.assertEqual(par[0], P('z:a')) self.assertEqual(par[1], P('z:')) self.assertEqual(list(par), [P('z:a'), P('z:')]) with self.assertRaises(IndexError): par[2] p = P('z:/a/b/') par = p.parents self.assertEqual(len(par), 2) self.assertEqual(par[0], P('z:/a')) self.assertEqual(par[1], P('z:/')) self.assertEqual(list(par), [P('z:/a'), P('z:/')]) with self.assertRaises(IndexError): par[2] p = P('//a/b/c/d') par = p.parents self.assertEqual(len(par), 2) self.assertEqual(par[0], P('//a/b/c')) self.assertEqual(par[1], P('//a/b')) self.assertEqual(list(par), [P('//a/b/c'), P('//a/b')]) with self.assertRaises(IndexError): par[2] def test_drive(self): P = self.cls self.assertEqual(P('c:').drive, 'c:') self.assertEqual(P('c:a/b').drive, 'c:') self.assertEqual(P('c:/').drive, 'c:') self.assertEqual(P('c:/a/b/').drive, 'c:') self.assertEqual(P('//a/b').drive, '\\\\a\\b') self.assertEqual(P('//a/b/').drive, '\\\\a\\b') self.assertEqual(P('//a/b/c/d').drive, '\\\\a\\b') def test_root(self): P = self.cls self.assertEqual(P('c:').root, '') self.assertEqual(P('c:a/b').root, '') self.assertEqual(P('c:/').root, '\\') self.assertEqual(P('c:/a/b/').root, '\\') self.assertEqual(P('//a/b').root, '\\') self.assertEqual(P('//a/b/').root, '\\') self.assertEqual(P('//a/b/c/d').root, '\\') def test_anchor(self): P = self.cls self.assertEqual(P('c:').anchor, 'c:') self.assertEqual(P('c:a/b').anchor, 'c:') self.assertEqual(P('c:/').anchor, 'c:\\') self.assertEqual(P('c:/a/b/').anchor, 'c:\\') self.assertEqual(P('//a/b').anchor, '\\\\a\\b\\') self.assertEqual(P('//a/b/').anchor, '\\\\a\\b\\') self.assertEqual(P('//a/b/c/d').anchor, '\\\\a\\b\\') def test_name(self): P = self.cls self.assertEqual(P('c:').name, '') self.assertEqual(P('c:/').name, '') self.assertEqual(P('c:a/b').name, 'b') self.assertEqual(P('c:/a/b').name, 'b') self.assertEqual(P('c:a/b.py').name, 'b.py') self.assertEqual(P('c:/a/b.py').name, 'b.py') self.assertEqual(P('//My.py/Share.php').name, '') self.assertEqual(P('//My.py/Share.php/a/b').name, 'b') def test_suffix(self): P = self.cls self.assertEqual(P('c:').suffix, '') self.assertEqual(P('c:/').suffix, '') self.assertEqual(P('c:a/b').suffix, '') self.assertEqual(P('c:/a/b').suffix, '') self.assertEqual(P('c:a/b.py').suffix, '.py') self.assertEqual(P('c:/a/b.py').suffix, '.py') self.assertEqual(P('c:a/.hgrc').suffix, '') self.assertEqual(P('c:/a/.hgrc').suffix, '') self.assertEqual(P('c:a/.hg.rc').suffix, '.rc') self.assertEqual(P('c:/a/.hg.rc').suffix, '.rc') self.assertEqual(P('c:a/b.tar.gz').suffix, '.gz') self.assertEqual(P('c:/a/b.tar.gz').suffix, '.gz') self.assertEqual(P('c:a/Some name. Ending with a dot.').suffix, '') self.assertEqual(P('c:/a/Some name. Ending with a dot.').suffix, '') self.assertEqual(P('//My.py/Share.php').suffix, '') self.assertEqual(P('//My.py/Share.php/a/b').suffix, '') def test_suffixes(self): P = self.cls self.assertEqual(P('c:').suffixes, []) self.assertEqual(P('c:/').suffixes, []) self.assertEqual(P('c:a/b').suffixes, []) self.assertEqual(P('c:/a/b').suffixes, []) self.assertEqual(P('c:a/b.py').suffixes, ['.py']) self.assertEqual(P('c:/a/b.py').suffixes, ['.py']) self.assertEqual(P('c:a/.hgrc').suffixes, []) self.assertEqual(P('c:/a/.hgrc').suffixes, []) self.assertEqual(P('c:a/.hg.rc').suffixes, ['.rc']) self.assertEqual(P('c:/a/.hg.rc').suffixes, ['.rc']) self.assertEqual(P('c:a/b.tar.gz').suffixes, ['.tar', '.gz']) self.assertEqual(P('c:/a/b.tar.gz').suffixes, ['.tar', '.gz']) self.assertEqual(P('//My.py/Share.php').suffixes, []) self.assertEqual(P('//My.py/Share.php/a/b').suffixes, []) self.assertEqual(P('c:a/Some name. Ending with a dot.').suffixes, []) self.assertEqual(P('c:/a/Some name. Ending with a dot.').suffixes, []) def test_stem(self): P = self.cls self.assertEqual(P('c:').stem, '') self.assertEqual(P('c:.').stem, '') self.assertEqual(P('c:..').stem, '..') self.assertEqual(P('c:/').stem, '') self.assertEqual(P('c:a/b').stem, 'b') self.assertEqual(P('c:a/b.py').stem, 'b') self.assertEqual(P('c:a/.hgrc').stem, '.hgrc') self.assertEqual(P('c:a/.hg.rc').stem, '.hg') self.assertEqual(P('c:a/b.tar.gz').stem, 'b.tar') self.assertEqual(P('c:a/Some name. Ending with a dot.').stem, 'Some name. Ending with a dot.') def test_with_name(self): P = self.cls self.assertEqual(P('c:a/b').with_name('d.xml'), P('c:a/d.xml')) self.assertEqual(P('c:/a/b').with_name('d.xml'), P('c:/a/d.xml')) self.assertEqual(P('c:a/Dot ending.').with_name('d.xml'), P('c:a/d.xml')) self.assertEqual(P('c:/a/Dot ending.').with_name('d.xml'), P('c:/a/d.xml')) self.assertRaises(ValueError, P('c:').with_name, 'd.xml') self.assertRaises(ValueError, P('c:/').with_name, 'd.xml') self.assertRaises(ValueError, P('//My/Share').with_name, 'd.xml') self.assertRaises(ValueError, P('c:a/b').with_name, 'd:') self.assertRaises(ValueError, P('c:a/b').with_name, 'd:e') self.assertRaises(ValueError, P('c:a/b').with_name, 'd:/e') self.assertRaises(ValueError, P('c:a/b').with_name, '//My/Share') def test_with_suffix(self): P = self.cls self.assertEqual(P('c:a/b').with_suffix('.gz'), P('c:a/b.gz')) self.assertEqual(P('c:/a/b').with_suffix('.gz'), P('c:/a/b.gz')) self.assertEqual(P('c:a/b.py').with_suffix('.gz'), P('c:a/b.gz')) self.assertEqual(P('c:/a/b.py').with_suffix('.gz'), P('c:/a/b.gz')) # Path doesn't have a "filename" component. self.assertRaises(ValueError, P('').with_suffix, '.gz') self.assertRaises(ValueError, P('.').with_suffix, '.gz') self.assertRaises(ValueError, P('/').with_suffix, '.gz') self.assertRaises(ValueError, P('//My/Share').with_suffix, '.gz') # Invalid suffix. self.assertRaises(ValueError, P('c:a/b').with_suffix, 'gz') self.assertRaises(ValueError, P('c:a/b').with_suffix, '/') self.assertRaises(ValueError, P('c:a/b').with_suffix, '\\') self.assertRaises(ValueError, P('c:a/b').with_suffix, 'c:') self.assertRaises(ValueError, P('c:a/b').with_suffix, '/.gz') self.assertRaises(ValueError, P('c:a/b').with_suffix, '\\.gz') self.assertRaises(ValueError, P('c:a/b').with_suffix, 'c:.gz') self.assertRaises(ValueError, P('c:a/b').with_suffix, 'c/d') self.assertRaises(ValueError, P('c:a/b').with_suffix, 'c\\d') self.assertRaises(ValueError, P('c:a/b').with_suffix, '.c/d') self.assertRaises(ValueError, P('c:a/b').with_suffix, '.c\\d') def test_relative_to(self): P = self.cls p = P('C:Foo/Bar') self.assertEqual(p.relative_to(P('c:')), P('Foo/Bar')) self.assertEqual(p.relative_to('c:'), P('Foo/Bar')) self.assertEqual(p.relative_to(P('c:foO')), P('Bar')) self.assertEqual(p.relative_to('c:foO'), P('Bar')) self.assertEqual(p.relative_to('c:foO/'), P('Bar')) self.assertEqual(p.relative_to(P('c:foO/baR')), P()) self.assertEqual(p.relative_to('c:foO/baR'), P()) # Unrelated paths. self.assertRaises(ValueError, p.relative_to, P()) self.assertRaises(ValueError, p.relative_to, '') self.assertRaises(ValueError, p.relative_to, P('d:')) self.assertRaises(ValueError, p.relative_to, P('/')) self.assertRaises(ValueError, p.relative_to, P('Foo')) self.assertRaises(ValueError, p.relative_to, P('/Foo')) self.assertRaises(ValueError, p.relative_to, P('C:/Foo')) self.assertRaises(ValueError, p.relative_to, P('C:Foo/Bar/Baz')) self.assertRaises(ValueError, p.relative_to, P('C:Foo/Baz')) p = P('C:/Foo/Bar') self.assertEqual(p.relative_to(P('c:')), P('/Foo/Bar')) self.assertEqual(p.relative_to('c:'), P('/Foo/Bar')) self.assertEqual(str(p.relative_to(P('c:'))), '\\Foo\\Bar') self.assertEqual(str(p.relative_to('c:')), '\\Foo\\Bar') self.assertEqual(p.relative_to(P('c:/')), P('Foo/Bar')) self.assertEqual(p.relative_to('c:/'), P('Foo/Bar')) self.assertEqual(p.relative_to(P('c:/foO')), P('Bar')) self.assertEqual(p.relative_to('c:/foO'), P('Bar')) self.assertEqual(p.relative_to('c:/foO/'), P('Bar')) self.assertEqual(p.relative_to(P('c:/foO/baR')), P()) self.assertEqual(p.relative_to('c:/foO/baR'), P()) # Unrelated paths. self.assertRaises(ValueError, p.relative_to, P('C:/Baz')) self.assertRaises(ValueError, p.relative_to, P('C:/Foo/Bar/Baz')) self.assertRaises(ValueError, p.relative_to, P('C:/Foo/Baz')) self.assertRaises(ValueError, p.relative_to, P('C:Foo')) self.assertRaises(ValueError, p.relative_to, P('d:')) self.assertRaises(ValueError, p.relative_to, P('d:/')) self.assertRaises(ValueError, p.relative_to, P('/')) self.assertRaises(ValueError, p.relative_to, P('/Foo')) self.assertRaises(ValueError, p.relative_to, P('//C/Foo')) # UNC paths. p = P('//Server/Share/Foo/Bar') self.assertEqual(p.relative_to(P('//sErver/sHare')), P('Foo/Bar')) self.assertEqual(p.relative_to('//sErver/sHare'), P('Foo/Bar')) self.assertEqual(p.relative_to('//sErver/sHare/'), P('Foo/Bar')) self.assertEqual(p.relative_to(P('//sErver/sHare/Foo')), P('Bar')) self.assertEqual(p.relative_to('//sErver/sHare/Foo'), P('Bar')) self.assertEqual(p.relative_to('//sErver/sHare/Foo/'), P('Bar')) self.assertEqual(p.relative_to(P('//sErver/sHare/Foo/Bar')), P()) self.assertEqual(p.relative_to('//sErver/sHare/Foo/Bar'), P()) # Unrelated paths. self.assertRaises(ValueError, p.relative_to, P('/Server/Share/Foo')) self.assertRaises(ValueError, p.relative_to, P('c:/Server/Share/Foo')) self.assertRaises(ValueError, p.relative_to, P('//z/Share/Foo')) self.assertRaises(ValueError, p.relative_to, P('//Server/z/Foo')) def test_is_absolute(self): P = self.cls # Under NT, only paths with both a drive and a root are absolute. self.assertFalse(P().is_absolute()) self.assertFalse(P('a').is_absolute()) self.assertFalse(P('a/b/').is_absolute()) self.assertFalse(P('/').is_absolute()) self.assertFalse(P('/a').is_absolute()) self.assertFalse(P('/a/b/').is_absolute()) self.assertFalse(P('c:').is_absolute()) self.assertFalse(P('c:a').is_absolute()) self.assertFalse(P('c:a/b/').is_absolute()) self.assertTrue(P('c:/').is_absolute()) self.assertTrue(P('c:/a').is_absolute()) self.assertTrue(P('c:/a/b/').is_absolute()) # UNC paths are absolute by definition. self.assertTrue(P('//a/b').is_absolute()) self.assertTrue(P('//a/b/').is_absolute()) self.assertTrue(P('//a/b/c').is_absolute()) self.assertTrue(P('//a/b/c/d').is_absolute()) def test_join(self): P = self.cls p = P('C:/a/b') pp = p.joinpath('x/y') self.assertEqual(pp, P('C:/a/b/x/y')) pp = p.joinpath('/x/y') self.assertEqual(pp, P('C:/x/y')) # Joining with a different drive => the first path is ignored, even # if the second path is relative. pp = p.joinpath('D:x/y') self.assertEqual(pp, P('D:x/y')) pp = p.joinpath('D:/x/y') self.assertEqual(pp, P('D:/x/y')) pp = p.joinpath('//host/share/x/y') self.assertEqual(pp, P('//host/share/x/y')) # Joining with the same drive => the first path is appended to if # the second path is relative. pp = p.joinpath('c:x/y') self.assertEqual(pp, P('C:/a/b/x/y')) pp = p.joinpath('c:/x/y') self.assertEqual(pp, P('C:/x/y')) def test_div(self): # Basically the same as joinpath(). P = self.cls p = P('C:/a/b') self.assertEqual(p / 'x/y', P('C:/a/b/x/y')) self.assertEqual(p / 'x' / 'y', P('C:/a/b/x/y')) self.assertEqual(p / '/x/y', P('C:/x/y')) self.assertEqual(p / '/x' / 'y', P('C:/x/y')) # Joining with a different drive => the first path is ignored, even # if the second path is relative. self.assertEqual(p / 'D:x/y', P('D:x/y')) self.assertEqual(p / 'D:' / 'x/y', P('D:x/y')) self.assertEqual(p / 'D:/x/y', P('D:/x/y')) self.assertEqual(p / 'D:' / '/x/y', P('D:/x/y')) self.assertEqual(p / '//host/share/x/y', P('//host/share/x/y')) # Joining with the same drive => the first path is appended to if # the second path is relative. self.assertEqual(p / 'c:x/y', P('C:/a/b/x/y')) self.assertEqual(p / 'c:/x/y', P('C:/x/y')) def test_is_reserved(self): P = self.cls self.assertIs(False, P('').is_reserved()) self.assertIs(False, P('/').is_reserved()) self.assertIs(False, P('/foo/bar').is_reserved()) self.assertIs(True, P('con').is_reserved()) self.assertIs(True, P('NUL').is_reserved()) self.assertIs(True, P('NUL.txt').is_reserved()) self.assertIs(True, P('com1').is_reserved()) self.assertIs(True, P('com9.bar').is_reserved()) self.assertIs(False, P('bar.com9').is_reserved()) self.assertIs(True, P('lpt1').is_reserved()) self.assertIs(True, P('lpt9.bar').is_reserved()) self.assertIs(False, P('bar.lpt9').is_reserved()) # Only the last component matters. self.assertIs(False, P('c:/NUL/con/baz').is_reserved()) # UNC paths are never reserved. self.assertIs(False, P('//my/share/nul/con/aux').is_reserved()) class PurePathTest(_BasePurePathTest, unittest.TestCase): cls = pathlib.PurePath def test_concrete_class(self): p = self.cls('a') self.assertIs(type(p), pathlib.PureWindowsPath if os.name == 'nt' else pathlib.PurePosixPath) def test_different_flavours_unequal(self): p = pathlib.PurePosixPath('a') q = pathlib.PureWindowsPath('a') self.assertNotEqual(p, q) def test_different_flavours_unordered(self): p = pathlib.PurePosixPath('a') q = pathlib.PureWindowsPath('a') with self.assertRaises(TypeError): p < q with self.assertRaises(TypeError): p <= q with self.assertRaises(TypeError): p > q with self.assertRaises(TypeError): p >= q # # Tests for the concrete classes. # # Make sure any symbolic links in the base test path are resolved. BASE = os.path.realpath(TESTFN) join = lambda *x: os.path.join(BASE, *x) rel_join = lambda *x: os.path.join(TESTFN, *x) only_nt = unittest.skipIf(os.name != 'nt', 'test requires a Windows-compatible system') only_posix = unittest.skipIf(os.name == 'nt', 'test requires a POSIX-compatible system') @only_posix class PosixPathAsPureTest(PurePosixPathTest): cls = pathlib.PosixPath @only_nt class WindowsPathAsPureTest(PureWindowsPathTest): cls = pathlib.WindowsPath def test_owner(self): P = self.cls with self.assertRaises(NotImplementedError): P('c:/').owner() def test_group(self): P = self.cls with self.assertRaises(NotImplementedError): P('c:/').group() class _BasePathTest(object): """Tests for the FS-accessing functionalities of the Path classes.""" # (BASE) # | # |-- brokenLink -> non-existing # |-- dirA # | `-- linkC -> ../dirB # |-- dirB # | |-- fileB # | `-- linkD -> ../dirB # |-- dirC # | |-- dirD # | | `-- fileD # | `-- fileC # |-- dirE # No permissions # |-- fileA # |-- linkA -> fileA # |-- linkB -> dirB # `-- brokenLinkLoop -> brokenLinkLoop # def setUp(self): def cleanup(): os.chmod(join('dirE'), 0o777) support.rmtree(BASE) self.addCleanup(cleanup) os.mkdir(BASE) os.mkdir(join('dirA')) os.mkdir(join('dirB')) os.mkdir(join('dirC')) os.mkdir(join('dirC', 'dirD')) os.mkdir(join('dirE')) with open(join('fileA'), 'wb') as f: f.write(b"this is file A\n") with open(join('dirB', 'fileB'), 'wb') as f: f.write(b"this is file B\n") with open(join('dirC', 'fileC'), 'wb') as f: f.write(b"this is file C\n") with open(join('dirC', 'dirD', 'fileD'), 'wb') as f: f.write(b"this is file D\n") os.chmod(join('dirE'), 0) if support.can_symlink(): # Relative symlinks. os.symlink('fileA', join('linkA')) os.symlink('non-existing', join('brokenLink')) self.dirlink('dirB', join('linkB')) self.dirlink(os.path.join('..', 'dirB'), join('dirA', 'linkC')) # This one goes upwards, creating a loop. self.dirlink(os.path.join('..', 'dirB'), join('dirB', 'linkD')) # Broken symlink (pointing to itself). os.symlink('brokenLinkLoop', join('brokenLinkLoop')) if os.name == 'nt': # Workaround for http://bugs.python.org/issue13772. def dirlink(self, src, dest): os.symlink(src, dest, target_is_directory=True) else: def dirlink(self, src, dest): os.symlink(src, dest) def assertSame(self, path_a, path_b): self.assertTrue(os.path.samefile(str(path_a), str(path_b)), "%r and %r don't point to the same file" % (path_a, path_b)) def assertFileNotFound(self, func, *args, **kwargs): with self.assertRaises(FileNotFoundError) as cm: func(*args, **kwargs) self.assertEqual(cm.exception.errno, errno.ENOENT) def _test_cwd(self, p): q = self.cls(os.getcwd()) self.assertEqual(p, q) self.assertEqual(str(p), str(q)) self.assertIs(type(p), type(q)) self.assertTrue(p.is_absolute()) def test_cwd(self): p = self.cls.cwd() self._test_cwd(p) def _test_home(self, p): q = self.cls(os.path.expanduser('~')) self.assertEqual(p, q) self.assertEqual(str(p), str(q)) self.assertIs(type(p), type(q)) self.assertTrue(p.is_absolute()) def test_home(self): p = self.cls.home() self._test_home(p) def test_samefile(self): fileA_path = os.path.join(BASE, 'fileA') fileB_path = os.path.join(BASE, 'dirB', 'fileB') p = self.cls(fileA_path) pp = self.cls(fileA_path) q = self.cls(fileB_path) self.assertTrue(p.samefile(fileA_path)) self.assertTrue(p.samefile(pp)) self.assertFalse(p.samefile(fileB_path)) self.assertFalse(p.samefile(q)) # Test the non-existent file case non_existent = os.path.join(BASE, 'foo') r = self.cls(non_existent) self.assertRaises(FileNotFoundError, p.samefile, r) self.assertRaises(FileNotFoundError, p.samefile, non_existent) self.assertRaises(FileNotFoundError, r.samefile, p) self.assertRaises(FileNotFoundError, r.samefile, non_existent) self.assertRaises(FileNotFoundError, r.samefile, r) self.assertRaises(FileNotFoundError, r.samefile, non_existent) def test_empty_path(self): # The empty path points to '.' p = self.cls('') self.assertEqual(p.stat(), os.stat('.')) def test_expanduser_common(self): P = self.cls p = P('~') self.assertEqual(p.expanduser(), P(os.path.expanduser('~'))) p = P('foo') self.assertEqual(p.expanduser(), p) p = P('/~') self.assertEqual(p.expanduser(), p) p = P('../~') self.assertEqual(p.expanduser(), p) p = P(P('').absolute().anchor) / '~' self.assertEqual(p.expanduser(), p) def test_exists(self): P = self.cls p = P(BASE) self.assertIs(True, p.exists()) self.assertIs(True, (p / 'dirA').exists()) self.assertIs(True, (p / 'fileA').exists()) self.assertIs(False, (p / 'fileA' / 'bah').exists()) if support.can_symlink(): self.assertIs(True, (p / 'linkA').exists()) self.assertIs(True, (p / 'linkB').exists()) self.assertIs(True, (p / 'linkB' / 'fileB').exists()) self.assertIs(False, (p / 'linkA' / 'bah').exists()) self.assertIs(False, (p / 'foo').exists()) self.assertIs(False, P('/xyzzy').exists()) self.assertIs(False, P(BASE + '\udfff').exists()) self.assertIs(False, P(BASE + '\x00').exists()) def test_open_common(self): p = self.cls(BASE) with (p / 'fileA').open('r') as f: self.assertIsInstance(f, io.TextIOBase) self.assertEqual(f.read(), "this is file A\n") with (p / 'fileA').open('rb') as f: self.assertIsInstance(f, io.BufferedIOBase) self.assertEqual(f.read().strip(), b"this is file A") with (p / 'fileA').open('rb', buffering=0) as f: self.assertIsInstance(f, io.RawIOBase) self.assertEqual(f.read().strip(), b"this is file A") def test_read_write_bytes(self): p = self.cls(BASE) (p / 'fileA').write_bytes(b'abcdefg') self.assertEqual((p / 'fileA').read_bytes(), b'abcdefg') # Check that trying to write str does not truncate the file. self.assertRaises(TypeError, (p / 'fileA').write_bytes, 'somestr') self.assertEqual((p / 'fileA').read_bytes(), b'abcdefg') def test_read_write_text(self): p = self.cls(BASE) (p / 'fileA').write_text('äbcdefg', encoding='latin-1') self.assertEqual((p / 'fileA').read_text( encoding='utf-8', errors='ignore'), 'bcdefg') # Check that trying to write bytes does not truncate the file. self.assertRaises(TypeError, (p / 'fileA').write_text, b'somebytes') self.assertEqual((p / 'fileA').read_text(encoding='latin-1'), 'äbcdefg') def test_iterdir(self): P = self.cls p = P(BASE) it = p.iterdir() paths = set(it) expected = ['dirA', 'dirB', 'dirC', 'dirE', 'fileA'] if support.can_symlink(): expected += ['linkA', 'linkB', 'brokenLink', 'brokenLinkLoop'] self.assertEqual(paths, { P(BASE, q) for q in expected }) @support.skip_unless_symlink def test_iterdir_symlink(self): # __iter__ on a symlink to a directory. P = self.cls p = P(BASE, 'linkB') paths = set(p.iterdir()) expected = { P(BASE, 'linkB', q) for q in ['fileB', 'linkD'] } self.assertEqual(paths, expected) def test_iterdir_nodir(self): # __iter__ on something that is not a directory. p = self.cls(BASE, 'fileA') with self.assertRaises(OSError) as cm: next(p.iterdir()) # ENOENT or EINVAL under Windows, ENOTDIR otherwise # (see issue #12802). self.assertIn(cm.exception.errno, (errno.ENOTDIR, errno.ENOENT, errno.EINVAL)) def test_glob_common(self): def _check(glob, expected): self.assertEqual(set(glob), { P(BASE, q) for q in expected }) P = self.cls p = P(BASE) it = p.glob("fileA") self.assertIsInstance(it, collections.abc.Iterator) _check(it, ["fileA"]) _check(p.glob("fileB"), []) _check(p.glob("dir*/file*"), ["dirB/fileB", "dirC/fileC"]) if not support.can_symlink(): _check(p.glob("*A"), ['dirA', 'fileA']) else: _check(p.glob("*A"), ['dirA', 'fileA', 'linkA']) if not support.can_symlink(): _check(p.glob("*B/*"), ['dirB/fileB']) else: _check(p.glob("*B/*"), ['dirB/fileB', 'dirB/linkD', 'linkB/fileB', 'linkB/linkD']) if not support.can_symlink(): _check(p.glob("*/fileB"), ['dirB/fileB']) else: _check(p.glob("*/fileB"), ['dirB/fileB', 'linkB/fileB']) def test_rglob_common(self): def _check(glob, expected): self.assertEqual(set(glob), { P(BASE, q) for q in expected }) P = self.cls p = P(BASE) it = p.rglob("fileA") self.assertIsInstance(it, collections.abc.Iterator) _check(it, ["fileA"]) _check(p.rglob("fileB"), ["dirB/fileB"]) _check(p.rglob("*/fileA"), []) if not support.can_symlink(): _check(p.rglob("*/fileB"), ["dirB/fileB"]) else: _check(p.rglob("*/fileB"), ["dirB/fileB", "dirB/linkD/fileB", "linkB/fileB", "dirA/linkC/fileB"]) _check(p.rglob("file*"), ["fileA", "dirB/fileB", "dirC/fileC", "dirC/dirD/fileD"]) p = P(BASE, "dirC") _check(p.rglob("file*"), ["dirC/fileC", "dirC/dirD/fileD"]) _check(p.rglob("*/*"), ["dirC/dirD/fileD"]) @support.skip_unless_symlink def test_rglob_symlink_loop(self): # Don't get fooled by symlink loops (Issue #26012). P = self.cls p = P(BASE) given = set(p.rglob('*')) expect = {'brokenLink', 'dirA', 'dirA/linkC', 'dirB', 'dirB/fileB', 'dirB/linkD', 'dirC', 'dirC/dirD', 'dirC/dirD/fileD', 'dirC/fileC', 'dirE', 'fileA', 'linkA', 'linkB', 'brokenLinkLoop', } self.assertEqual(given, {p / x for x in expect}) def test_glob_dotdot(self): # ".." is not special in globs. P = self.cls p = P(BASE) self.assertEqual(set(p.glob("..")), { P(BASE, "..") }) self.assertEqual(set(p.glob("dirA/../file*")), { P(BASE, "dirA/../fileA") }) self.assertEqual(set(p.glob("../xyzzy")), set()) def _check_resolve(self, p, expected, strict=True): q = p.resolve(strict) self.assertEqual(q, expected) # This can be used to check both relative and absolute resolutions. _check_resolve_relative = _check_resolve_absolute = _check_resolve @support.skip_unless_symlink def test_resolve_common(self): P = self.cls p = P(BASE, 'foo') with self.assertRaises(OSError) as cm: p.resolve(strict=True) self.assertEqual(cm.exception.errno, errno.ENOENT) # Non-strict self.assertEqual(str(p.resolve(strict=False)), os.path.join(BASE, 'foo')) p = P(BASE, 'foo', 'in', 'spam') self.assertEqual(str(p.resolve(strict=False)), os.path.join(BASE, 'foo', 'in', 'spam')) p = P(BASE, '..', 'foo', 'in', 'spam') self.assertEqual(str(p.resolve(strict=False)), os.path.abspath(os.path.join('foo', 'in', 'spam'))) # These are all relative symlinks. p = P(BASE, 'dirB', 'fileB') self._check_resolve_relative(p, p) p = P(BASE, 'linkA') self._check_resolve_relative(p, P(BASE, 'fileA')) p = P(BASE, 'dirA', 'linkC', 'fileB') self._check_resolve_relative(p, P(BASE, 'dirB', 'fileB')) p = P(BASE, 'dirB', 'linkD', 'fileB') self._check_resolve_relative(p, P(BASE, 'dirB', 'fileB')) # Non-strict p = P(BASE, 'dirA', 'linkC', 'fileB', 'foo', 'in', 'spam') self._check_resolve_relative(p, P(BASE, 'dirB', 'fileB', 'foo', 'in', 'spam'), False) p = P(BASE, 'dirA', 'linkC', '..', 'foo', 'in', 'spam') if os.name == 'nt': # In Windows, if linkY points to dirB, 'dirA\linkY\..' # resolves to 'dirA' without resolving linkY first. self._check_resolve_relative(p, P(BASE, 'dirA', 'foo', 'in', 'spam'), False) else: # In Posix, if linkY points to dirB, 'dirA/linkY/..' # resolves to 'dirB/..' first before resolving to parent of dirB. self._check_resolve_relative(p, P(BASE, 'foo', 'in', 'spam'), False) # Now create absolute symlinks. d = support._longpath(tempfile.mkdtemp(suffix='-dirD', dir=os.getcwd())) self.addCleanup(support.rmtree, d) os.symlink(os.path.join(d), join('dirA', 'linkX')) os.symlink(join('dirB'), os.path.join(d, 'linkY')) p = P(BASE, 'dirA', 'linkX', 'linkY', 'fileB') self._check_resolve_absolute(p, P(BASE, 'dirB', 'fileB')) # Non-strict p = P(BASE, 'dirA', 'linkX', 'linkY', 'foo', 'in', 'spam') self._check_resolve_relative(p, P(BASE, 'dirB', 'foo', 'in', 'spam'), False) p = P(BASE, 'dirA', 'linkX', 'linkY', '..', 'foo', 'in', 'spam') if os.name == 'nt': # In Windows, if linkY points to dirB, 'dirA\linkY\..' # resolves to 'dirA' without resolving linkY first. self._check_resolve_relative(p, P(d, 'foo', 'in', 'spam'), False) else: # In Posix, if linkY points to dirB, 'dirA/linkY/..' # resolves to 'dirB/..' first before resolving to parent of dirB. self._check_resolve_relative(p, P(BASE, 'foo', 'in', 'spam'), False) @support.skip_unless_symlink def test_resolve_dot(self): # See https://bitbucket.org/pitrou/pathlib/issue/9/pathresolve-fails-on-complex-symlinks p = self.cls(BASE) self.dirlink('.', join('0')) self.dirlink(os.path.join('0', '0'), join('1')) self.dirlink(os.path.join('1', '1'), join('2')) q = p / '2' self.assertEqual(q.resolve(strict=True), p) r = q / '3' / '4' self.assertRaises(FileNotFoundError, r.resolve, strict=True) # Non-strict self.assertEqual(r.resolve(strict=False), p / '3' / '4') def test_with(self): p = self.cls(BASE) it = p.iterdir() it2 = p.iterdir() next(it2) with p: pass # I/O operation on closed path. self.assertRaises(ValueError, next, it) self.assertRaises(ValueError, next, it2) self.assertRaises(ValueError, p.open) self.assertRaises(ValueError, p.resolve) self.assertRaises(ValueError, p.absolute) self.assertRaises(ValueError, p.__enter__) def test_chmod(self): p = self.cls(BASE) / 'fileA' mode = p.stat().st_mode # Clear writable bit. new_mode = mode & ~0o222 p.chmod(new_mode) self.assertEqual(p.stat().st_mode, new_mode) # Set writable bit. new_mode = mode | 0o222 p.chmod(new_mode) self.assertEqual(p.stat().st_mode, new_mode) # XXX also need a test for lchmod. def test_stat(self): p = self.cls(BASE) / 'fileA' st = p.stat() self.assertEqual(p.stat(), st) # Change file mode by flipping write bit. p.chmod(st.st_mode ^ 0o222) self.addCleanup(p.chmod, st.st_mode) self.assertNotEqual(p.stat(), st)
def test_lstat(self): p = self.cls(BASE)/ 'linkA' st = p.stat() self.assertNotEqual(st, p.lstat()) def test_lstat_nosymlink(self): p = self.cls(BASE) / 'fileA' st = p.stat() self.assertEqual(st, p.lstat()) @unittest.skipUnless(pwd, "the pwd module is needed for this test") def test_owner(self): p = self.cls(BASE) / 'fileA' uid = p.stat().st_uid try: name = pwd.getpwuid(uid).pw_name except KeyError: self.skipTest( "user %d doesn't have an entry in the system database" % uid) self.assertEqual(name, p.owner()) @unittest.skipUnless(grp, "the grp module is needed for this test") def test_group(self): p = self.cls(BASE) / 'fileA' gid = p.stat().st_gid try: name = grp.getgrgid(gid).gr_name except KeyError: self.skipTest( "group %d doesn't have an entry in the system database" % gid) self.assertEqual(name, p.group()) def test_unlink(self): p = self.cls(BASE) / 'fileA' p.unlink() self.assertFileNotFound(p.stat) self.assertFileNotFound(p.unlink) def test_unlink_missing_ok(self): p = self.cls(BASE) / 'fileAAA' self.assertFileNotFound(p.unlink) p.unlink(missing_ok=True) def test_rmdir(self): p = self.cls(BASE) / 'dirA' for q in p.iterdir(): q.unlink() p.rmdir() self.assertFileNotFound(p.stat) self.assertFileNotFound(p.unlink) def test_link_to(self): P = self.cls(BASE) p = P / 'fileA' size = p.stat().st_size # linking to another path. q = P / 'dirA' / 'fileAA' try: p.link_to(q) except PermissionError as e: self.skipTest('os.link(): %s' % e) self.assertEqual(q.stat().st_size, size) self.assertEqual(os.path.samefile(p, q), True) self.assertTrue(p.stat) # Linking to a str of a relative path. r = rel_join('fileAAA') q.link_to(r) self.assertEqual(os.stat(r).st_size, size) self.assertTrue(q.stat) def test_rename(self): P = self.cls(BASE) p = P / 'fileA' size = p.stat().st_size # Renaming to another path. q = P / 'dirA' / 'fileAA' p.rename(q) self.assertEqual(q.stat().st_size, size) self.assertFileNotFound(p.stat) # Renaming to a str of a relative path. r = rel_join('fileAAA') q.rename(r) self.assertEqual(os.stat(r).st_size, size) self.assertFileNotFound(q.stat) def test_replace(self): P = self.cls(BASE) p = P / 'fileA' size = p.stat().st_size # Replacing a non-existing path. q = P / 'dirA' / 'fileAA' p.replace(q) self.assertEqual(q.stat().st_size, size) self.assertFileNotFound(p.stat) # Replacing another (existing) path. r = rel_join('dirB', 'fileB') q.replace(r) self.assertEqual(os.stat(r).st_size, size) self.assertFileNotFound(q.stat) def test_touch_common(self): P = self.cls(BASE) p = P / 'newfileA' self.assertFalse(p.exists()) p.touch() self.assertTrue(p.exists()) st = p.stat() old_mtime = st.st_mtime old_mtime_ns = st.st_mtime_ns # Rewind the mtime sufficiently far in the past to work around # filesystem-specific timestamp granularity. os.utime(str(p), (old_mtime - 10, old_mtime - 10)) # The file mtime should be refreshed by calling touch() again. p.touch() st = p.stat() self.assertGreaterEqual(st.st_mtime_ns, old_mtime_ns) self.assertGreaterEqual(st.st_mtime, old_mtime) # Now with exist_ok=False. p = P / 'newfileB' self.assertFalse(p.exists()) p.touch(mode=0o700, exist_ok=False) self.assertTrue(p.exists()) self.assertRaises(OSError, p.touch, exist_ok=False) def test_touch_nochange(self): P = self.cls(BASE) p = P / 'fileA' p.touch() with p.open('rb') as f: self.assertEqual(f.read().strip(), b"this is file A") def test_mkdir(self): P = self.cls(BASE) p = P / 'newdirA' self.assertFalse(p.exists()) p.mkdir() self.assertTrue(p.exists()) self.assertTrue(p.is_dir()) with self.assertRaises(OSError) as cm: p.mkdir() self.assertEqual(cm.exception.errno, errno.EEXIST) def test_mkdir_parents(self): # Creating a chain of directories. p = self.cls(BASE, 'newdirB', 'newdirC') self.assertFalse(p.exists()) with self.assertRaises(OSError) as cm: p.mkdir() self.assertEqual(cm.exception.errno, errno.ENOENT) p.mkdir(parents=True) self.assertTrue(p.exists()) self.assertTrue(p.is_dir()) with self.assertRaises(OSError) as cm: p.mkdir(parents=True) self.assertEqual(cm.exception.errno, errno.EEXIST) # Test `mode` arg. mode = stat.S_IMODE(p.stat().st_mode) # Default mode. p = self.cls(BASE, 'newdirD', 'newdirE') p.mkdir(0o555, parents=True) self.assertTrue(p.exists()) self.assertTrue(p.is_dir()) if os.name != 'nt': # The directory's permissions follow the mode argument. self.assertEqual(stat.S_IMODE(p.stat().st_mode), 0o7555 & mode) # The parent's permissions follow the default process settings. self.assertEqual(stat.S_IMODE(p.parent.stat().st_mode), mode) def test_mkdir_exist_ok(self): p = self.cls(BASE, 'dirB') st_ctime_first = p.stat().st_ctime self.assertTrue(p.exists()) self.assertTrue(p.is_dir()) with self.assertRaises(FileExistsError) as cm: p.mkdir() self.assertEqual(cm.exception.errno, errno.EEXIST) p.mkdir(exist_ok=True) self.assertTrue(p.exists()) self.assertEqual(p.stat().st_ctime, st_ctime_first) def test_mkdir_exist_ok_with_parent(self): p = self.cls(BASE, 'dirC') self.assertTrue(p.exists()) with self.assertRaises(FileExistsError) as cm: p.mkdir() self.assertEqual(cm.exception.errno, errno.EEXIST) p = p / 'newdirC' p.mkdir(parents=True) st_ctime_first = p.stat().st_ctime self.assertTrue(p.exists()) with self.assertRaises(FileExistsError) as cm: p.mkdir(parents=True) self.assertEqual(cm.exception.errno, errno.EEXIST) p.mkdir(parents=True, exist_ok=True) self.assertTrue(p.exists()) self.assertEqual(p.stat().st_ctime, st_ctime_first) def test_mkdir_exist_ok_root(self): # Issue #25803: A drive root could raise PermissionError on Windows. self.cls('/').resolve().mkdir(exist_ok=True) self.cls('/').resolve().mkdir(parents=True, exist_ok=True) @only_nt # XXX: not sure how to test this on POSIX. def test_mkdir_with_unknown_drive(self): for d in 'ZYXWVUTSRQPONMLKJIHGFEDCBA': p = self.cls(d + ':\\') if not p.is_dir(): break else: self.skipTest("cannot find a drive that doesn't exist") with self.assertRaises(OSError): (p / 'child' / 'path').mkdir(parents=True) def test_mkdir_with_child_file(self): p = self.cls(BASE, 'dirB', 'fileB') self.assertTrue(p.exists()) # An exception is raised when the last path component is an existing # regular file, regardless of whether exist_ok is true or not. with self.assertRaises(FileExistsError) as cm: p.mkdir(parents=True) self.assertEqual(cm.exception.errno, errno.EEXIST) with self.assertRaises(FileExistsError) as cm: p.mkdir(parents=True, exist_ok=True) self.assertEqual(cm.exception.errno, errno.EEXIST) def test_mkdir_no_parents_file(self): p = self.cls(BASE, 'fileA') self.assertTrue(p.exists()) # An exception is raised when the last path component is an existing # regular file, regardless of whether exist_ok is true or not. with self.assertRaises(FileExistsError) as cm: p.mkdir() self.assertEqual(cm.exception.errno, errno.EEXIST) with self.assertRaises(FileExistsError) as cm: p.mkdir(exist_ok=True) self.assertEqual(cm.exception.errno, errno.EEXIST) def test_mkdir_concurrent_parent_creation(self): for pattern_num in range(32): p = self.cls(BASE, 'dirCPC%d' % pattern_num) self.assertFalse(p.exists()) def my_mkdir(path, mode=0o777): path = str(path) # Emulate another process that would create the directory # just before we try to create it ourselves. We do it # in all possible pattern combinations, assuming that this # function is called at most 5 times (dirCPC/dir1/dir2, # dirCPC/dir1, dirCPC, dirCPC/dir1, dirCPC/dir1/dir2). if pattern.pop(): os.mkdir(path, mode) # From another process. concurrently_created.add(path) os.mkdir(path, mode) # Our real call. pattern = [bool(pattern_num & (1 << n)) for n in range(5)] concurrently_created = set() p12 = p / 'dir1' / 'dir2' try: with mock.patch("pathlib._normal_accessor.mkdir", my_mkdir): p12.mkdir(parents=True, exist_ok=False) except FileExistsError: self.assertIn(str(p12), concurrently_created) else: self.assertNotIn(str(p12), concurrently_created) self.assertTrue(p.exists()) @support.skip_unless_symlink def test_symlink_to(self): P = self.cls(BASE) target = P / 'fileA' # Symlinking a path target. link = P / 'dirA' / 'linkAA' link.symlink_to(target) self.assertEqual(link.stat(), target.stat()) self.assertNotEqual(link.lstat(), target.stat()) # Symlinking a str target. link = P / 'dirA' / 'linkAAA' link.symlink_to(str(target)) self.assertEqual(link.stat(), target.stat()) self.assertNotEqual(link.lstat(), target.stat()) self.assertFalse(link.is_dir()) # Symlinking to a directory. target = P / 'dirB' link = P / 'dirA' / 'linkAAAA' link.symlink_to(target, target_is_directory=True) self.assertEqual(link.stat(), target.stat()) self.assertNotEqual(link.lstat(), target.stat()) self.assertTrue(link.is_dir()) self.assertTrue(list(link.iterdir())) def test_is_dir(self): P = self.cls(BASE) self.assertTrue((P / 'dirA').is_dir()) self.assertFalse((P / 'fileA').is_dir()) self.assertFalse((P / 'non-existing').is_dir()) self.assertFalse((P / 'fileA' / 'bah').is_dir()) if support.can_symlink(): self.assertFalse((P / 'linkA').is_dir()) self.assertTrue((P / 'linkB').is_dir()) self.assertFalse((P/ 'brokenLink').is_dir(), False) self.assertIs((P / 'dirA\udfff').is_dir(), False) self.assertIs((P / 'dirA\x00').is_dir(), False) def test_is_file(self): P = self.cls(BASE) self.assertTrue((P / 'fileA').is_file()) self.assertFalse((P / 'dirA').is_file()) self.assertFalse((P / 'non-existing').is_file()) self.assertFalse((P / 'fileA' / 'bah').is_file()) if support.can_symlink(): self.assertTrue((P / 'linkA').is_file()) self.assertFalse((P / 'linkB').is_file()) self.assertFalse((P/ 'brokenLink').is_file()) self.assertIs((P / 'fileA\udfff').is_file(), False) self.assertIs((P / 'fileA\x00').is_file(), False) @only_posix def test_is_mount(self): P = self.cls(BASE) R = self.cls('/') # TODO: Work out Windows. self.assertFalse((P / 'fileA').is_mount()) self.assertFalse((P / 'dirA').is_mount()) self.assertFalse((P / 'non-existing').is_mount()) self.assertFalse((P / 'fileA' / 'bah').is_mount()) self.assertTrue(R.is_mount()) if support.can_symlink(): self.assertFalse((P / 'linkA').is_mount()) self.assertIs(self.cls('/\udfff').is_mount(), False) self.assertIs(self.cls('/\x00').is_mount(), False) def test_is_symlink(self): P = self.cls(BASE) self.assertFalse((P / 'fileA').is_symlink()) self.assertFalse((P / 'dirA').is_symlink()) self.assertFalse((P / 'non-existing').is_symlink()) self.assertFalse((P / 'fileA' / 'bah').is_symlink()) if support.can_symlink(): self.assertTrue((P / 'linkA').is_symlink()) self.assertTrue((P / 'linkB').is_symlink()) self.assertTrue((P/ 'brokenLink').is_symlink()) self.assertIs((P / 'fileA\udfff').is_file(), False) self.assertIs((P / 'fileA\x00').is_file(), False) if support.can_symlink(): self.assertIs((P / 'linkA\udfff').is_file(), False) self.assertIs((P / 'linkA\x00').is_file(), False) def test_is_fifo_false(self): P = self.cls(BASE) self.assertFalse((P / 'fileA').is_fifo()) self.assertFalse((P / 'dirA').is_fifo()) self.assertFalse((P / 'non-existing').is_fifo()) self.assertFalse((P / 'fileA' / 'bah').is_fifo()) self.assertIs((P / 'fileA\udfff').is_fifo(), False) self.assertIs((P / 'fileA\x00').is_fifo(), False) @unittest.skipUnless(hasattr(os, "mkfifo"), "os.mkfifo() required") def test_is_fifo_true(self): P = self.cls(BASE, 'myfifo') try: os.mkfifo(str(P)) except PermissionError as e: self.skipTest('os.mkfifo(): %s' % e) self.assertTrue(P.is_fifo()) self.assertFalse(P.is_socket()) self.assertFalse(P.is_file()) self.assertIs(self.cls(BASE, 'myfifo\udfff').is_fifo(), False) self.assertIs(self.cls(BASE, 'myfifo\x00').is_fifo(), False) def test_is_socket_false(self): P = self.cls(BASE) self.assertFalse((P / 'fileA').is_socket()) self.assertFalse((P / 'dirA').is_socket()) self.assertFalse((P / 'non-existing').is_socket()) self.assertFalse((P / 'fileA' / 'bah').is_socket()) self.assertIs((P / 'fileA\udfff').is_socket(), False) self.assertIs((P / 'fileA\x00').is_socket(), False) @unittest.skipUnless(hasattr(socket, "AF_UNIX"), "Unix sockets required") def test_is_socket_true(self): P = self.cls(BASE, 'mysock') sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) self.addCleanup(sock.close) try: sock.bind(str(P)) except OSError as e: if (isinstance(e, PermissionError) or "AF_UNIX path too long" in str(e)): self.skipTest("cannot bind Unix socket: " + str(e)) self.assertTrue(P.is_socket()) self.assertFalse(P.is_fifo()) self.assertFalse(P.is_file()) self.assertIs(self.cls(BASE, 'mysock\udfff').is_socket(), False) self.assertIs(self.cls(BASE, 'mysock\x00').is_socket(), False) def test_is_block_device_false(self): P = self.cls(BASE) self.assertFalse((P / 'fileA').is_block_device()) self.assertFalse((P / 'dirA').is_block_device()) self.assertFalse((P / 'non-existing').is_block_device()) self.assertFalse((P / 'fileA' / 'bah').is_block_device()) self.assertIs((P / 'fileA\udfff').is_block_device(), False) self.assertIs((P / 'fileA\x00').is_block_device(), False) def test_is_char_device_false(self): P = self.cls(BASE) self.assertFalse((P / 'fileA').is_char_device()) self.assertFalse((P / 'dirA').is_char_device()) self.assertFalse((P / 'non-existing').is_char_device()) self.assertFalse((P / 'fileA' / 'bah').is_char_device()) self.assertIs((P / 'fileA\udfff').is_char_device(), False) self.assertIs((P / 'fileA\x00').is_char_device(), False) def test_is_char_device_true(self): # Under Unix, /dev/null should generally be a char device. P = self.cls('/dev/null') if not P.exists(): self.skipTest("/dev/null required") self.assertTrue(P.is_char_device()) self.assertFalse(P.is_block_device()) self.assertFalse(P.is_file()) self.assertIs(self.cls('/dev/null\udfff').is_char_device(), False) self.assertIs(self.cls('/dev/null\x00').is_char_device(), False) def test_pickling_common(self): p = self.cls(BASE, 'fileA') for proto in range(0, pickle.HIGHEST_PROTOCOL + 1): dumped = pickle.dumps(p, proto) pp = pickle.loads(dumped) self.assertEqual(pp.stat(), p.stat()) def test_parts_interning(self): P = self.cls p = P('/usr/bin/foo') q = P('/usr/local/bin') # 'usr' self.assertIs(p.parts[1], q.parts[1]) # 'bin' self.assertIs(p.parts[2], q.parts[3]) def _check_complex_symlinks(self, link0_target): # Test solving a non-looping chain of symlinks (issue #19887). P = self.cls(BASE) self.dirlink(os.path.join('link0', 'link0'), join('link1')) self.dirlink(os.path.join('link1', 'link1'), join('link2')) self.dirlink(os.path.join('link2', 'link2'), join('link3')) self.dirlink(link0_target, join('link0')) # Resolve absolute paths. p = (P / 'link0').resolve() self.assertEqual(p, P) self.assertEqual(str(p), BASE) p = (P / 'link1').resolve() self.assertEqual(p, P) self.assertEqual(str(p), BASE) p = (P / 'link2').resolve() self.assertEqual(p, P) self.assertEqual(str(p), BASE) p = (P / 'link3').resolve() self.assertEqual(p, P) self.assertEqual(str(p), BASE) # Resolve relative paths. old_path = os.getcwd() os.chdir(BASE) try: p = self.cls('link0').resolve() self.assertEqual(p, P) self.assertEqual(str(p), BASE) p = self.cls('link1').resolve() self.assertEqual(p, P) self.assertEqual(str(p), BASE) p = self.cls('link2').resolve() self.assertEqual(p, P) self.assertEqual(str(p), BASE) p = self.cls('link3').resolve() self.assertEqual(p, P) self.assertEqual(str(p), BASE) finally: os.chdir(old_path) @support.skip_unless_symlink def test_complex_symlinks_absolute(self): self._check_complex_symlinks(BASE) @support.skip_unless_symlink def test_complex_symlinks_relative(self): self._check_complex_symlinks('.') @support.skip_unless_symlink def test_complex_symlinks_relative_dot_dot(self): self._check_complex_symlinks(os.path.join('dirA', '..')) class PathTest(_BasePathTest, unittest.TestCase): cls = pathlib.Path def test_concrete_class(self): p = self.cls('a') self.assertIs(type(p), pathlib.WindowsPath if os.name == 'nt' else pathlib.PosixPath) def test_unsupported_flavour(self): if os.name == 'nt': self.assertRaises(NotImplementedError, pathlib.PosixPath) else: self.assertRaises(NotImplementedError, pathlib.WindowsPath) def test_glob_empty_pattern(self): p = self.cls() with self.assertRaisesRegex(ValueError, 'Unacceptable pattern'): list(p.glob('')) @only_posix class PosixPathTest(_BasePathTest, unittest.TestCase): cls = pathlib.PosixPath def _check_symlink_loop(self, *args, strict=True): path = self.cls(*args) with self.assertRaises(RuntimeError): print(path.resolve(strict)) def test_open_mode(self): old_mask = os.umask(0) self.addCleanup(os.umask, old_mask) p = self.cls(BASE) with (p / 'new_file').open('wb'): pass st = os.stat(join('new_file')) self.assertEqual(stat.S_IMODE(st.st_mode), 0o666) os.umask(0o022) with (p / 'other_new_file').open('wb'): pass st = os.stat(join('other_new_file')) self.assertEqual(stat.S_IMODE(st.st_mode), 0o644) def test_touch_mode(self): old_mask = os.umask(0) self.addCleanup(os.umask, old_mask) p = self.cls(BASE) (p / 'new_file').touch() st = os.stat(join('new_file')) self.assertEqual(stat.S_IMODE(st.st_mode), 0o666) os.umask(0o022) (p / 'other_new_file').touch() st = os.stat(join('other_new_file')) self.assertEqual(stat.S_IMODE(st.st_mode), 0o644) (p / 'masked_new_file').touch(mode=0o750) st = os.stat(join('masked_new_file')) self.assertEqual(stat.S_IMODE(st.st_mode), 0o750) @support.skip_unless_symlink def test_resolve_loop(self): # Loops with relative symlinks. os.symlink('linkX/inside', join('linkX')) self._check_symlink_loop(BASE, 'linkX') os.symlink('linkY', join('linkY')) self._check_symlink_loop(BASE, 'linkY') os.symlink('linkZ/../linkZ', join('linkZ')) self._check_symlink_loop(BASE, 'linkZ') # Non-strict self._check_symlink_loop(BASE, 'linkZ', 'foo', strict=False) # Loops with absolute symlinks. os.symlink(join('linkU/inside'), join('linkU')) self._check_symlink_loop(BASE, 'linkU') os.symlink(join('linkV'), join('linkV')) self._check_symlink_loop(BASE, 'linkV') os.symlink(join('linkW/../linkW'), join('linkW')) self._check_symlink_loop(BASE, 'linkW') # Non-strict self._check_symlink_loop(BASE, 'linkW', 'foo', strict=False) def test_glob(self): P = self.cls p = P(BASE) given = set(p.glob("FILEa")) expect = set() if not support.fs_is_case_insensitive(BASE) else given self.assertEqual(given, expect) self.assertEqual(set(p.glob("FILEa*")), set()) def test_rglob(self): P = self.cls p = P(BASE, "dirC") given = set(p.rglob("FILEd")) expect = set() if not support.fs_is_case_insensitive(BASE) else given self.assertEqual(given, expect) self.assertEqual(set(p.rglob("FILEd*")), set()) @unittest.skipUnless(hasattr(pwd, 'getpwall'), 'pwd module does not expose getpwall()') def test_expanduser(self): P = self.cls support.import_module('pwd') import pwd pwdent = pwd.getpwuid(os.getuid()) username = pwdent.pw_name userhome = pwdent.pw_dir.rstrip('/') or '/' # Find arbitrary different user (if exists). for pwdent in pwd.getpwall(): othername = pwdent.pw_name otherhome = pwdent.pw_dir.rstrip('/') if othername != username and otherhome: break else: othername = username otherhome = userhome p1 = P('~/Documents') p2 = P('~' + username + '/Documents') p3 = P('~' + othername + '/Documents') p4 = P('../~' + username + '/Documents') p5 = P('/~' + username + '/Documents') p6 = P('') p7 = P('~fakeuser/Documents') with support.EnvironmentVarGuard() as env: env.pop('HOME', None) self.assertEqual(p1.expanduser(), P(userhome) / 'Documents') self.assertEqual(p2.expanduser(), P(userhome) / 'Documents') self.assertEqual(p3.expanduser(), P(otherhome) / 'Documents') self.assertEqual(p4.expanduser(), p4) self.assertEqual(p5.expanduser(), p5) self.assertEqual(p6.expanduser(), p6) self.assertRaises(RuntimeError, p7.expanduser) env['HOME'] = '/tmp' self.assertEqual(p1.expanduser(), P('/tmp/Documents')) self.assertEqual(p2.expanduser(), P(userhome) / 'Documents') self.assertEqual(p3.expanduser(), P(otherhome) / 'Documents') self.assertEqual(p4.expanduser(), p4) self.assertEqual(p5.expanduser(), p5) self.assertEqual(p6.expanduser(), p6) self.assertRaises(RuntimeError, p7.expanduser) @unittest.skipIf(sys.platform != "darwin", "Bad file descriptor in /dev/fd affects only macOS") def test_handling_bad_descriptor(self): try: file_descriptors = list(pathlib.Path('/dev/fd').rglob("*"))[3:] if not file_descriptors: self.skipTest("no file descriptors - issue was not reproduced") # Checking all file descriptors because there is no guarantee # which one will fail. for f in file_descriptors: f.exists() f.is_dir() f.is_file() f.is_symlink() f.is_block_device() f.is_char_device() f.is_fifo() f.is_socket() except OSError as e: if e.errno == errno.EBADF: self.fail("Bad file descriptor not handled.") raise @only_nt class WindowsPathTest(_BasePathTest, unittest.TestCase): cls = pathlib.WindowsPath def test_glob(self): P = self.cls p = P(BASE) self.assertEqual(set(p.glob("FILEa")), { P(BASE, "fileA") }) def test_rglob(self): P = self.cls p = P(BASE, "dirC") self.assertEqual(set(p.rglob("FILEd")), { P(BASE, "dirC/dirD/fileD") }) def test_expanduser(self): P = self.cls with support.EnvironmentVarGuard() as env: env.pop('HOME', None) env.pop('USERPROFILE', None) env.pop('HOMEPATH', None) env.pop('HOMEDRIVE', None) env['USERNAME'] = 'alice' # test that the path returns unchanged p1 = P('~/My Documents') p2 = P('~alice/My Documents') p3 = P('~bob/My Documents') p4 = P('/~/My Documents') p5 = P('d:~/My Documents') p6 = P('') self.assertRaises(RuntimeError, p1.expanduser) self.assertRaises(RuntimeError, p2.expanduser) self.assertRaises(RuntimeError, p3.expanduser) self.assertEqual(p4.expanduser(), p4) self.assertEqual(p5.expanduser(), p5) self.assertEqual(p6.expanduser(), p6) def check(): env.pop('USERNAME', None) self.assertEqual(p1.expanduser(), P('C:/Users/alice/My Documents')) self.assertRaises(KeyError, p2.expanduser) env['USERNAME'] = 'alice' self.assertEqual(p2.expanduser(), P('C:/Users/alice/My Documents')) self.assertEqual(p3.expanduser(), P('C:/Users/bob/My Documents')) self.assertEqual(p4.expanduser(), p4) self.assertEqual(p5.expanduser(), p5) self.assertEqual(p6.expanduser(), p6) # Test the first lookup key in the env vars. env['HOME'] = 'C:\\Users\\alice' check() # Test that HOMEPATH is available instead. env.pop('HOME', None) env['HOMEPATH'] = 'C:\\Users\\alice' check() env['HOMEDRIVE'] = 'C:\\' env['HOMEPATH'] = 'Users\\alice' check() env.pop('HOMEDRIVE', None) env.pop('HOMEPATH', None) env['USERPROFILE'] = 'C:\\Users\\alice' check() class CompatiblePathTest(unittest.TestCase): """ Test that a type can be made compatible with PurePath derivatives by implementing division operator overloads. """ class CompatPath: """ Minimum viable class to test PurePath compatibility. Simply uses the division operator to join a given string and the string value of another object with a forward slash. """ def __init__(self, string): self.string = string def __truediv__(self, other): return type(self)(f"{self.string}/{other}") def __rtruediv__(self, other): return type(self)(f"{other}/{self.string}") def test_truediv(self): result = pathlib.PurePath("test") / self.CompatPath("right") self.assertIsInstance(result, self.CompatPath) self.assertEqual(result.string, "test/right") with self.assertRaises(TypeError): # Verify improper operations still raise a TypeError pathlib.PurePath("test") / 10 def test_rtruediv(self): result = self.CompatPath("left") / pathlib.PurePath("test") self.assertIsInstance(result, self.CompatPath) self.assertEqual(result.string, "left/test") with self.assertRaises(TypeError): # Verify improper operations still raise a TypeError 10 / pathlib.PurePath("test") if __name__ == "__main__": unittest.main()
@support.skip_unless_symlink
tags.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import sys import six import llnl.util.tty as tty import llnl.util.tty.colify as colify import spack.repo import spack.store import spack.tag description = "Show package tags and associated packages" section = "basic" level = "long" def report_tags(category, tags): buffer = six.StringIO() isatty = sys.stdout.isatty() if isatty: num = len(tags) fmt = '{0} package tag'.format(category) buffer.write("{0}:\n".format(spack.util.string.plural(num, fmt))) if tags: colify.colify(tags, output=buffer, tty=isatty, indent=4) else: buffer.write(" None\n") print(buffer.getvalue()) def setup_parser(subparser): subparser.epilog = ( "Tags from known packages will be used if no tags are provided on " "the command\nline. If tags are provided, packages with at least one " "will be reported.\n\nYou are not allowed to provide tags and use " "'--all' at the same time." ) subparser.add_argument( '-i', '--installed', action='store_true', default=False, help="show information for installed packages only" ) subparser.add_argument( '-a', '--all', action='store_true', default=False, help="show packages for all available tags" ) subparser.add_argument( 'tag', nargs='*', help="show packages with the specified tag" ) def tags(parser, args): # Disallow combining all option with (positional) tags to avoid confusion if args.all and args.tag: tty.die("Use the '--all' option OR provide tag(s) on the command line") # Provide a nice, simple message if database is empty if args.installed and not spack.environment.installed_specs(): tty.msg("No installed packages") return # unique list of available tags available_tags = sorted(spack.repo.path.tag_index.keys()) if not available_tags: tty.msg("No tagged packages") return show_packages = args.tag or args.all # Only report relevant, available tags if no packages are to be shown if not show_packages: if not args.installed: report_tags("available", available_tags) else: tag_pkgs = spack.tag.packages_with_tags(available_tags, True, True) tags = tag_pkgs.keys() if tag_pkgs else [] report_tags("installed", tags) return # Report packages associated with tags buffer = six.StringIO() isatty = sys.stdout.isatty() tags = args.tag if args.tag else available_tags tag_pkgs = spack.tag.packages_with_tags(tags, args.installed, False) missing = 'No installed packages' if args.installed else 'None' for tag in sorted(tag_pkgs): # TODO: Remove the sorting once we're sure noone has an old # TODO: tag cache since it can accumulate duplicates. packages = sorted(list(set(tag_pkgs[tag]))) if isatty: buffer.write("{0}:\n".format(tag))
if packages: colify.colify(packages, output=buffer, tty=isatty, indent=4) else: buffer.write(" {0}\n".format(missing)) buffer.write("\n") print(buffer.getvalue())
sql_parser.rs
// Copyright 2021 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::collections::HashMap; use common_exception::Result; use common_meta_types::Compression; use common_meta_types::Credentials; use common_meta_types::FileFormat; use common_meta_types::Format; use common_meta_types::StageParams; use common_meta_types::UserIdentity; use common_meta_types::UserPrivilegeSet; use common_meta_types::UserPrivilegeType; use common_planners::Optimization; use databend_query::sql::statements::DfAlterUDF; use databend_query::sql::statements::DfAlterUser; use databend_query::sql::statements::DfAuthOption; use databend_query::sql::statements::DfCopy; use databend_query::sql::statements::DfCreateDatabase; use databend_query::sql::statements::DfCreateStage; use databend_query::sql::statements::DfCreateTable; use databend_query::sql::statements::DfCreateUDF; use databend_query::sql::statements::DfCreateUser; use databend_query::sql::statements::DfDescribeTable; use databend_query::sql::statements::DfDropDatabase; use databend_query::sql::statements::DfDropStage; use databend_query::sql::statements::DfDropTable; use databend_query::sql::statements::DfDropUDF; use databend_query::sql::statements::DfDropUser; use databend_query::sql::statements::DfGrantObject; use databend_query::sql::statements::DfGrantStatement; use databend_query::sql::statements::DfOptimizeTable; use databend_query::sql::statements::DfQueryStatement; use databend_query::sql::statements::DfRevokeStatement; use databend_query::sql::statements::DfShowCreateDatabase; use databend_query::sql::statements::DfShowCreateTable; use databend_query::sql::statements::DfShowDatabases; use databend_query::sql::statements::DfShowGrants; use databend_query::sql::statements::DfShowTables; use databend_query::sql::statements::DfTruncateTable; use databend_query::sql::statements::DfUseDatabase; use databend_query::sql::statements::DfUseTenant; use databend_query::sql::*; use sqlparser::ast::*; use sqlparser::dialect::GenericDialect; use sqlparser::parser::Parser; use sqlparser::parser::ParserError; use sqlparser::tokenizer::Tokenizer; fn expect_parse_ok(sql: &str, expected: DfStatement) -> Result<()>
fn expect_parse_err(sql: &str, expected: String) -> Result<()> { let result = DfParser::parse_sql(sql); assert!(result.is_err(), "'{}' SHOULD BE '{}'", sql, expected); assert_eq!( result.unwrap_err().message(), expected, "'{}' SHOULD BE '{}'", sql, expected ); Ok(()) } fn expect_parse_err_contains(sql: &str, expected: String) -> Result<()> { let result = DfParser::parse_sql(sql); assert!(result.is_err(), "'{}' SHOULD CONTAINS '{}'", sql, expected); assert!( result.unwrap_err().message().contains(&expected), "'{}' SHOULD CONTAINS '{}'", sql, expected ); Ok(()) } fn verified_query(sql: &str) -> Result<Box<DfQueryStatement>> { let mut parser = DfParser::new_with_dialect(sql, &GenericDialect {})?; let stmt = parser.parse_statement()?; if let DfStatement::Query(query) = stmt { return Ok(query); } Err(ParserError::ParserError("Expect query statement".to_string()).into()) } fn make_column_def(name: impl Into<String>, data_type: DataType) -> ColumnDef { ColumnDef { name: Ident { value: name.into(), quote_style: None, }, data_type, collation: None, options: vec![], } } fn parse_sql_to_expr(query_expr: &str) -> Expr { let dialect = GenericDialect {}; let mut tokenizer = Tokenizer::new(&dialect, query_expr); let tokens = tokenizer.tokenize().unwrap(); let mut parser = Parser::new(tokens, &dialect); parser.parse_expr().unwrap() } #[test] fn create_database() -> Result<()> { { let sql = "CREATE DATABASE db1"; let expected = DfStatement::CreateDatabase(DfCreateDatabase { if_not_exists: false, name: ObjectName(vec![Ident::new("db1")]), engine: "".to_string(), engine_options: HashMap::new(), options: HashMap::new(), }); expect_parse_ok(sql, expected)?; } { let sql = "CREATE DATABASE db1 engine = github"; let expected = DfStatement::CreateDatabase(DfCreateDatabase { if_not_exists: false, name: ObjectName(vec![Ident::new("db1")]), engine: "github".to_string(), engine_options: HashMap::new(), options: HashMap::new(), }); expect_parse_ok(sql, expected)?; } { let sql = "CREATE DATABASE IF NOT EXISTS db1"; let expected = DfStatement::CreateDatabase(DfCreateDatabase { if_not_exists: true, name: ObjectName(vec![Ident::new("db1")]), engine: "".to_string(), engine_options: HashMap::new(), options: HashMap::new(), }); expect_parse_ok(sql, expected)?; } Ok(()) } #[test] fn drop_database() -> Result<()> { { let sql = "DROP DATABASE db1"; let expected = DfStatement::DropDatabase(DfDropDatabase { if_exists: false, name: ObjectName(vec![Ident::new("db1")]), }); expect_parse_ok(sql, expected)?; } { let sql = "DROP DATABASE IF EXISTS db1"; let expected = DfStatement::DropDatabase(DfDropDatabase { if_exists: true, name: ObjectName(vec![Ident::new("db1")]), }); expect_parse_ok(sql, expected)?; } Ok(()) } #[test] fn create_table() -> Result<()> { // positive case let sql = "CREATE TABLE t(c1 int) ENGINE = Fuse location = '/data/33.csv' "; let expected = DfStatement::CreateTable(DfCreateTable { if_not_exists: false, name: ObjectName(vec![Ident::new("t")]), columns: vec![make_column_def("c1", DataType::Int(None))], engine: "Fuse".to_string(), options: maplit::hashmap! {"location".into() => "/data/33.csv".into()}, like: None, query: None, }); expect_parse_ok(sql, expected)?; // positive case: it is ok for parquet files not to have columns specified let sql = "CREATE TABLE t(c1 int, c2 bigint, c3 varchar(255) ) ENGINE = Fuse location = 'foo.parquet' comment = 'foo'"; let expected = DfStatement::CreateTable(DfCreateTable { if_not_exists: false, name: ObjectName(vec![Ident::new("t")]), columns: vec![ make_column_def("c1", DataType::Int(None)), make_column_def("c2", DataType::BigInt(None)), make_column_def("c3", DataType::Varchar(Some(255))), ], engine: "Fuse".to_string(), options: maplit::hashmap! { "location".into() => "foo.parquet".into(), "comment".into() => "foo".into(), }, like: None, query: None, }); expect_parse_ok(sql, expected)?; // create table like statement let sql = "CREATE TABLE db1.test1 LIKE db2.test2 ENGINE = Parquet location = 'batcave'"; let expected = DfStatement::CreateTable(DfCreateTable { if_not_exists: false, name: ObjectName(vec![Ident::new("db1"), Ident::new("test1")]), columns: vec![], engine: "Parquet".to_string(), options: maplit::hashmap! {"location".into() => "batcave".into()}, like: Some(ObjectName(vec![Ident::new("db2"), Ident::new("test2")])), query: None, }); expect_parse_ok(sql, expected)?; // create table as select statement let sql = "CREATE TABLE db1.test1(c1 int, c2 varchar(255)) ENGINE = Parquet location = 'batcave' AS SELECT * FROM t2"; let expected = DfStatement::CreateTable(DfCreateTable { if_not_exists: false, name: ObjectName(vec![Ident::new("db1"), Ident::new("test1")]), columns: vec![ make_column_def("c1", DataType::Int(None)), make_column_def("c2", DataType::Varchar(Some(255))), ], engine: "Parquet".to_string(), options: maplit::hashmap! {"location".into() => "batcave".into()}, like: None, query: Some(Box::new(DfQueryStatement { from: vec![TableWithJoins { relation: TableFactor::Table { name: ObjectName(vec![Ident::new("t2")]), alias: None, args: vec![], with_hints: vec![], }, joins: vec![], }], projection: vec![SelectItem::Wildcard], selection: None, group_by: vec![], having: None, order_by: vec![], limit: None, offset: None, })), }); expect_parse_ok(sql, expected)?; Ok(()) } #[test] fn drop_table() -> Result<()> { { let sql = "DROP TABLE t1"; let expected = DfStatement::DropTable(DfDropTable { if_exists: false, name: ObjectName(vec![Ident::new("t1")]), }); expect_parse_ok(sql, expected)?; } { let sql = "DROP TABLE IF EXISTS t1"; let expected = DfStatement::DropTable(DfDropTable { if_exists: true, name: ObjectName(vec![Ident::new("t1")]), }); expect_parse_ok(sql, expected)?; } Ok(()) } #[test] fn describe_table() -> Result<()> { { let sql = "DESCRIBE t1"; let expected = DfStatement::DescribeTable(DfDescribeTable { name: ObjectName(vec![Ident::new("t1")]), }); expect_parse_ok(sql, expected)?; } { let sql = "DESC t1"; let expected = DfStatement::DescribeTable(DfDescribeTable { name: ObjectName(vec![Ident::new("t1")]), }); expect_parse_ok(sql, expected)?; } Ok(()) } #[test] fn show_queries() -> Result<()> { use databend_query::sql::statements::DfShowSettings; use databend_query::sql::statements::DfShowTables; // positive case expect_parse_ok("SHOW TABLES", DfStatement::ShowTables(DfShowTables::All))?; expect_parse_ok("SHOW TABLES;", DfStatement::ShowTables(DfShowTables::All))?; expect_parse_ok("SHOW SETTINGS", DfStatement::ShowSettings(DfShowSettings))?; expect_parse_ok( "SHOW TABLES LIKE 'aaa'", DfStatement::ShowTables(DfShowTables::Like(Ident::with_quote('\'', "aaa"))), )?; expect_parse_ok( "SHOW TABLES --comments should not in sql case1", DfStatement::ShowTables(DfShowTables::All), )?; expect_parse_ok( "SHOW TABLES LIKE 'aaa' --comments should not in sql case2", DfStatement::ShowTables(DfShowTables::Like(Ident::with_quote('\'', "aaa"))), )?; expect_parse_ok( "SHOW TABLES WHERE t LIKE 'aaa'", DfStatement::ShowTables(DfShowTables::Where(parse_sql_to_expr("t LIKE 'aaa'"))), )?; expect_parse_ok( "SHOW TABLES LIKE 'aaa' --comments should not in sql case2", DfStatement::ShowTables(DfShowTables::Like(Ident::with_quote('\'', "aaa"))), )?; expect_parse_ok( "SHOW TABLES WHERE t LIKE 'aaa' AND t LIKE 'a%'", DfStatement::ShowTables(DfShowTables::Where(parse_sql_to_expr( "t LIKE 'aaa' AND t LIKE 'a%'", ))), )?; Ok(()) } #[test] fn show_tables_test() -> Result<()> { let mut ident = Ident::new("ss"); ident.quote_style = Some('`'); let v = vec![ident]; let name = ObjectName(v); let name_two = name.clone(); expect_parse_ok( "SHOW TABLES FROM `ss`", DfStatement::ShowTables(DfShowTables::FromOrIn(name)), )?; expect_parse_ok( "SHOW TABLES IN `ss`", DfStatement::ShowTables(DfShowTables::FromOrIn(name_two)), )?; Ok(()) } #[test] fn show_grants_test() -> Result<()> { expect_parse_ok( "SHOW GRANTS", DfStatement::ShowGrants(DfShowGrants { user_identity: None, }), )?; expect_parse_ok( "SHOW GRANTS FOR 'u1'@'%'", DfStatement::ShowGrants(DfShowGrants { user_identity: Some(UserIdentity { username: "u1".into(), hostname: "%".into(), }), }), )?; Ok(()) } #[test] fn show_functions_tests() -> Result<()> { use databend_query::sql::statements::DfShowFunctions; // positive case expect_parse_ok( "SHOW FUNCTIONS", DfStatement::ShowFunctions(DfShowFunctions::All), )?; expect_parse_ok( "SHOW FUNCTIONS;", DfStatement::ShowFunctions(DfShowFunctions::All), )?; expect_parse_ok( "SHOW FUNCTIONS --comments should not in sql case1", DfStatement::ShowFunctions(DfShowFunctions::All), )?; expect_parse_ok( "SHOW FUNCTIONS LIKE 'aaa'", DfStatement::ShowFunctions(DfShowFunctions::Like(Ident::with_quote('\'', "aaa"))), )?; expect_parse_ok( "SHOW FUNCTIONS LIKE 'aaa';", DfStatement::ShowFunctions(DfShowFunctions::Like(Ident::with_quote('\'', "aaa"))), )?; expect_parse_ok( "SHOW FUNCTIONS LIKE 'aaa' --comments should not in sql case2", DfStatement::ShowFunctions(DfShowFunctions::Like(Ident::with_quote('\'', "aaa"))), )?; expect_parse_ok( "SHOW FUNCTIONS WHERE t LIKE 'aaa'", DfStatement::ShowFunctions(DfShowFunctions::Where(parse_sql_to_expr("t LIKE 'aaa'"))), )?; expect_parse_ok( "SHOW FUNCTIONS LIKE 'aaa' --comments should not in sql case2", DfStatement::ShowFunctions(DfShowFunctions::Like(Ident::with_quote('\'', "aaa"))), )?; expect_parse_ok( "SHOW FUNCTIONS WHERE t LIKE 'aaa' AND t LIKE 'a%'", DfStatement::ShowFunctions(DfShowFunctions::Where(parse_sql_to_expr( "t LIKE 'aaa' AND t LIKE 'a%'", ))), )?; Ok(()) } #[test] fn use_test() -> Result<()> { expect_parse_ok( "USe db1", DfStatement::UseDatabase(DfUseDatabase { name: ObjectName(vec![Ident::new("db1")]), }), )?; expect_parse_ok( "use db1", DfStatement::UseDatabase(DfUseDatabase { name: ObjectName(vec![Ident::new("db1")]), }), )?; Ok(()) } #[test] fn sudo_command_test() -> Result<()> { expect_parse_ok( "use `tenant`", DfStatement::UseDatabase(DfUseDatabase { name: ObjectName(vec![Ident::with_quote('`', "tenant")]), }), )?; expect_parse_ok( "sudo use tenant 'xx'", DfStatement::UseTenant(DfUseTenant { name: ObjectName(vec![Ident::with_quote('\'', "xx")]), }), )?; expect_parse_err( "sudo yy", String::from("sql parser error: Expected Unsupported sudo command, found: yy"), )?; expect_parse_err( "sudo use xx", String::from("sql parser error: Expected Unsupported sudo command, found: xx"), )?; Ok(()) } #[test] fn truncate_table() -> Result<()> { { let sql = "TRUNCATE TABLE t1"; let expected = DfStatement::TruncateTable(DfTruncateTable { name: ObjectName(vec![Ident::new("t1")]), purge: false, }); expect_parse_ok(sql, expected)?; } { let sql = "TRUNCATE TABLE t1 purge"; let expected = DfStatement::TruncateTable(DfTruncateTable { name: ObjectName(vec![Ident::new("t1")]), purge: true, }); expect_parse_ok(sql, expected)?; } Ok(()) } #[test] fn hint_test() -> Result<()> { { let comment = " { ErrorCode 1002 }"; let expected = DfHint::create_from_comment(comment, "--"); assert_eq!(expected.error_code, Some(1002)); } { let comment = " { ErrorCode1002 }"; let expected = DfHint::create_from_comment(comment, "--"); assert_eq!(expected.error_code, None); } { let comment = " { ErrorCode 22}"; let expected = DfHint::create_from_comment(comment, "--"); assert_eq!(expected.error_code, Some(22)); } { let comment = " { ErrorCode: 22}"; let expected = DfHint::create_from_comment(comment, "--"); assert_eq!(expected.error_code, None); } { let comment = " { Errorcode 22}"; let expected = DfHint::create_from_comment(comment, "--"); assert_eq!(expected.error_code, None); } Ok(()) } #[test] fn copy_test() -> Result<()> { let ident = Ident::new("test_csv"); let v = vec![ident]; let name = ObjectName(v); expect_parse_ok( "copy into test_csv from '@my_ext_stage/tutorials/sample.csv' format csv csv_header = 1 field_delimitor = ',';", DfStatement::Copy(DfCopy { name, columns: vec![], location: "@my_ext_stage/tutorials/sample.csv".to_string(), format: "csv".to_string(), options: maplit::hashmap! { "csv_header".into() => "1".into(), "field_delimitor".into() => ",".into(), } }), )?; Ok(()) } #[test] fn show_databases_test() -> Result<()> { expect_parse_ok( "SHOW DATABASES", DfStatement::ShowDatabases(DfShowDatabases { where_opt: None }), )?; expect_parse_ok( "SHOW DATABASES;", DfStatement::ShowDatabases(DfShowDatabases { where_opt: None }), )?; expect_parse_ok( "SHOW DATABASES WHERE Database = 'ss'", DfStatement::ShowDatabases(DfShowDatabases { where_opt: Some(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("name"))), op: BinaryOperator::Eq, right: Box::new(Expr::Value(Value::SingleQuotedString("ss".to_string()))), }), }), )?; expect_parse_ok( "SHOW DATABASES WHERE Database Like 'ss%'", DfStatement::ShowDatabases(DfShowDatabases { where_opt: Some(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("name"))), op: BinaryOperator::Like, right: Box::new(Expr::Value(Value::SingleQuotedString("ss%".to_string()))), }), }), )?; expect_parse_ok( "SHOW DATABASES LIKE 'ss%'", DfStatement::ShowDatabases(DfShowDatabases { where_opt: Some(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("name"))), op: BinaryOperator::Like, right: Box::new(Expr::Value(Value::SingleQuotedString("ss%".to_string()))), }), }), )?; expect_parse_ok( "SHOW DATABASES LIKE SUBSTRING('ss%' FROM 1 FOR 3)", DfStatement::ShowDatabases(DfShowDatabases { where_opt: Some(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("name"))), op: BinaryOperator::Like, right: Box::new(Expr::Substring { expr: Box::new(Expr::Value(Value::SingleQuotedString("ss%".to_string()))), substring_from: Some(Box::new(Expr::Value(Value::Number( "1".to_string(), false, )))), substring_for: Some(Box::new(Expr::Value(Value::Number( "3".to_string(), false, )))), }), }), }), )?; expect_parse_ok( "SHOW DATABASES LIKE POSITION('012345' IN 'abcdef')", DfStatement::ShowDatabases(DfShowDatabases { where_opt: Some(Expr::BinaryOp { left: Box::new(Expr::Identifier(Ident::new("name"))), op: BinaryOperator::Like, right: Box::new(Expr::Position { substr_expr: Box::new(Expr::Value(Value::SingleQuotedString( "012345".to_string(), ))), str_expr: Box::new(Expr::Value(Value::SingleQuotedString( "abcdef".to_string(), ))), }), }), }), )?; Ok(()) } #[test] fn show_create_test() -> Result<()> { expect_parse_ok( "SHOW CREATE TABLE test", DfStatement::ShowCreateTable(DfShowCreateTable { name: ObjectName(vec![Ident::new("test")]), }), )?; expect_parse_ok( "SHOW CREATE DATABASE test", DfStatement::ShowCreateDatabase(DfShowCreateDatabase { name: ObjectName(vec![Ident::new("test")]), }), )?; Ok(()) } fn create_user_auth_test( auth_clause: &str, auth_type: Option<String>, auth_string: Option<String>, ) -> Result<()> { expect_parse_ok( &format!("CREATE USER 'test'@'localhost' {}", auth_clause), DfStatement::CreateUser(DfCreateUser { if_not_exists: false, name: String::from("test"), hostname: String::from("localhost"), auth_options: DfAuthOption { auth_type, by_value: auth_string, }, }), ) } fn create_user_auth_test_normal(plugin_name: &str) -> Result<()> { let password = "password"; let sql = format!("IDENTIFIED with {} BY '{}'", plugin_name, password); create_user_auth_test( &sql, Some(plugin_name.to_string()), Some(password.to_string()), ) } #[test] fn create_user_test() -> Result<()> { // normal create_user_auth_test_normal("plaintext_password")?; create_user_auth_test_normal("sha256_password")?; create_user_auth_test_normal("double_sha1_password")?; create_user_auth_test( "IDENTIFIED BY 'password'", None, Some("password".to_string()), )?; create_user_auth_test( "IDENTIFIED WITH no_password", Some("no_password".to_string()), None, )?; create_user_auth_test("NOT IDENTIFIED", Some("no_password".to_string()), None)?; create_user_auth_test("", None, None)?; // username contains '@' expect_parse_ok( "CREATE USER 'test@localhost'", DfStatement::CreateUser(DfCreateUser { if_not_exists: false, name: String::from("test@localhost"), hostname: String::from("%"), auth_options: DfAuthOption::default(), }), )?; // errors expect_parse_err( "CREATE USER 'test'@'localhost' IDENTIFIED WITH no_password BY 'password'", String::from("sql parser error: Expected end of statement, found: BY"), )?; expect_parse_err( "CREATE USER 'test'@'localhost' IDENTIFIED WITH sha256_password BY", String::from("sql parser error: Expected literal string, found: EOF"), )?; Ok(()) } fn alter_user_auth_test( auth_clause: &str, auth_type: Option<String>, auth_string: Option<String>, ) -> Result<()> { expect_parse_ok( &format!("ALTER USER 'test'@'localhost' {}", auth_clause), DfStatement::AlterUser(DfAlterUser { if_current_user: false, name: String::from("test"), hostname: String::from("localhost"), auth_option: DfAuthOption { auth_type, by_value: auth_string, }, }), ) } fn alter_user_auth_test_normal(plugin_name: &str) -> Result<()> { let password = "password"; let sql = format!("IDENTIFIED with {} BY '{}'", plugin_name, password); alter_user_auth_test( &sql, Some(plugin_name.to_string()), Some(password.to_string()), ) } #[test] fn alter_user_test() -> Result<()> { let password = "password".to_string(); alter_user_auth_test_normal("plaintext_password")?; alter_user_auth_test_normal("sha256_password")?; alter_user_auth_test_normal("double_sha1_password")?; alter_user_auth_test( "IDENTIFIED WITH no_password", Some("no_password".to_string()), None, )?; alter_user_auth_test("IDENTIFIED BY 'password'", None, Some(password.clone()))?; alter_user_auth_test("NOT IDENTIFIED", Some("no_password".to_string()), None)?; alter_user_auth_test("", None, None)?; expect_parse_ok( "ALTER USER USER() IDENTIFIED BY 'password'", DfStatement::AlterUser(DfAlterUser { if_current_user: true, name: String::from(""), hostname: String::from(""), auth_option: DfAuthOption { auth_type: None, by_value: Some(password), }, }), )?; expect_parse_ok( "ALTER USER 'test@localhost' IDENTIFIED WITH sha256_password BY 'password'", DfStatement::AlterUser(DfAlterUser { if_current_user: false, name: String::from("test@localhost"), hostname: String::from("%"), auth_option: DfAuthOption { auth_type: Some("sha256_password".to_string()), by_value: Some("password".to_string()), }, }), )?; expect_parse_err( "ALTER USER 'test'@'localhost' IDENTIFIED WITH no_password BY 'password'", String::from("sql parser error: Expected end of statement, found: BY"), )?; expect_parse_err( "ALTER USER 'test'@'localhost' IDENTIFIED WITH sha256_password BY", String::from("sql parser error: Expected literal string, found: EOF"), )?; Ok(()) } #[test] fn drop_user_test() -> Result<()> { expect_parse_ok( "DROP USER 'test'@'localhost'", DfStatement::DropUser(DfDropUser { if_exists: false, name: String::from("test"), hostname: String::from("localhost"), }), )?; expect_parse_ok( "DROP USER 'test'@'127.0.0.1'", DfStatement::DropUser(DfDropUser { if_exists: false, name: String::from("test"), hostname: String::from("127.0.0.1"), }), )?; expect_parse_ok( "DROP USER 'test'", DfStatement::DropUser(DfDropUser { if_exists: false, name: String::from("test"), hostname: String::from("%"), }), )?; expect_parse_ok( "DROP USER IF EXISTS 'test'@'localhost'", DfStatement::DropUser(DfDropUser { if_exists: true, name: String::from("test"), hostname: String::from("localhost"), }), )?; expect_parse_ok( "DROP USER IF EXISTS 'test'@'127.0.0.1'", DfStatement::DropUser(DfDropUser { if_exists: true, name: String::from("test"), hostname: String::from("127.0.0.1"), }), )?; expect_parse_ok( "DROP USER IF EXISTS 'test'", DfStatement::DropUser(DfDropUser { if_exists: true, name: String::from("test"), hostname: String::from("%"), }), )?; Ok(()) } #[test] fn grant_privilege_test() -> Result<()> { expect_parse_ok( "GRANT ALL ON * TO 'test'@'localhost'", DfStatement::GrantPrivilege(DfGrantStatement { name: String::from("test"), hostname: String::from("localhost"), on: DfGrantObject::Database(None), priv_types: UserPrivilegeSet::all_privileges(), }), )?; expect_parse_ok( "GRANT ALL PRIVILEGES ON * TO 'test'@'localhost'", DfStatement::GrantPrivilege(DfGrantStatement { name: String::from("test"), hostname: String::from("localhost"), on: DfGrantObject::Database(None), priv_types: UserPrivilegeSet::all_privileges(), }), )?; expect_parse_ok( "GRANT INSERT ON `db1`.`tb1` TO 'test'@'localhost'", DfStatement::GrantPrivilege(DfGrantStatement { name: String::from("test"), hostname: String::from("localhost"), on: DfGrantObject::Table(Some("db1".into()), "tb1".into()), priv_types: { let mut privileges = UserPrivilegeSet::empty(); privileges.set_privilege(UserPrivilegeType::Insert); privileges }, }), )?; expect_parse_ok( "GRANT INSERT ON `tb1` TO 'test'@'localhost'", DfStatement::GrantPrivilege(DfGrantStatement { name: String::from("test"), hostname: String::from("localhost"), on: DfGrantObject::Table(None, "tb1".into()), priv_types: { let mut privileges = UserPrivilegeSet::empty(); privileges.set_privilege(UserPrivilegeType::Insert); privileges }, }), )?; expect_parse_ok( "GRANT INSERT ON `db1`.'*' TO 'test'@'localhost'", DfStatement::GrantPrivilege(DfGrantStatement { name: String::from("test"), hostname: String::from("localhost"), on: DfGrantObject::Database(Some("db1".into())), priv_types: { let mut privileges = UserPrivilegeSet::empty(); privileges.set_privilege(UserPrivilegeType::Insert); privileges }, }), )?; expect_parse_ok( "GRANT CREATE, SELECT ON * TO 'test'@'localhost'", DfStatement::GrantPrivilege(DfGrantStatement { name: String::from("test"), hostname: String::from("localhost"), on: DfGrantObject::Database(None), priv_types: { let mut privileges = UserPrivilegeSet::empty(); privileges.set_privilege(UserPrivilegeType::Select); privileges.set_privilege(UserPrivilegeType::Create); privileges }, }), )?; expect_parse_ok( "GRANT CREATE USER, CREATE ROLE, CREATE, SELECT ON * TO 'test'@'localhost'", DfStatement::GrantPrivilege(DfGrantStatement { name: String::from("test"), hostname: String::from("localhost"), on: DfGrantObject::Database(None), priv_types: { let mut privileges = UserPrivilegeSet::empty(); privileges.set_privilege(UserPrivilegeType::Create); privileges.set_privilege(UserPrivilegeType::CreateUser); privileges.set_privilege(UserPrivilegeType::CreateRole); privileges.set_privilege(UserPrivilegeType::Select); privileges }, }), )?; expect_parse_err( "GRANT TEST, ON * TO 'test'@'localhost'", String::from("sql parser error: Expected privilege type, found: TEST"), )?; expect_parse_err( "GRANT SELECT, ON * TO 'test'@'localhost'", String::from("sql parser error: Expected privilege type, found: ON"), )?; expect_parse_err( "GRANT SELECT IN * TO 'test'@'localhost'", String::from("sql parser error: Expected keyword ON, found: IN"), )?; expect_parse_err( "GRANT SELECT ON * 'test'@'localhost'", String::from("sql parser error: Expected keyword TO, found: 'test'"), )?; expect_parse_err( "GRANT INSERT ON *.`tb1` TO 'test'@'localhost'", String::from("sql parser error: Expected whitespace, found: ."), )?; Ok(()) } #[test] fn revoke_privilege_test() -> Result<()> { expect_parse_ok( "REVOKE ALL ON * FROM 'test'@'localhost'", DfStatement::RevokePrivilege(DfRevokeStatement { username: String::from("test"), hostname: String::from("localhost"), on: DfGrantObject::Database(None), priv_types: UserPrivilegeSet::all_privileges(), }), )?; expect_parse_err( "REVOKE SELECT ON * 'test'@'localhost'", String::from("sql parser error: Expected keyword FROM, found: 'test'"), )?; Ok(()) } #[test] fn create_stage_test() -> Result<()> { expect_parse_ok( "CREATE STAGE test_stage url='s3://load/files/' credentials=(access_key_id='1a2b3c' secret_access_key='4x5y6z')", DfStatement::CreateStage(DfCreateStage { if_not_exists: false, stage_name: "test_stage".to_string(), stage_params: StageParams::new("s3://load/files/", Credentials { access_key_id: "1a2b3c".to_string(), secret_access_key: "4x5y6z".to_string() }), file_format: FileFormat::default(), comments: "".to_string(), }), )?; expect_parse_ok( "CREATE STAGE IF NOT EXISTS test_stage url='s3://load/files/' credentials=(access_key_id='1a2b3c' secret_access_key='4x5y6z')", DfStatement::CreateStage(DfCreateStage { if_not_exists: true, stage_name: "test_stage".to_string(), stage_params: StageParams::new("s3://load/files/", Credentials { access_key_id: "1a2b3c".to_string(), secret_access_key: "4x5y6z".to_string() }), file_format: FileFormat::default(), comments: "".to_string(), }), )?; expect_parse_ok( "CREATE STAGE IF NOT EXISTS test_stage url='s3://load/files/' credentials=(access_key_id='1a2b3c' secret_access_key='4x5y6z') file_format=(FORMAT=CSV compression=GZIP record_delimiter=',')", DfStatement::CreateStage(DfCreateStage { if_not_exists: true, stage_name: "test_stage".to_string(), stage_params: StageParams::new("s3://load/files/", Credentials { access_key_id: "1a2b3c".to_string(), secret_access_key: "4x5y6z".to_string() }), file_format: FileFormat { compression: Compression::Gzip, record_delimiter: ",".to_string(),..Default::default()}, comments: "".to_string(), }), )?; expect_parse_ok( "CREATE STAGE IF NOT EXISTS test_stage url='s3://load/files/' credentials=(access_key_id='1a2b3c' secret_access_key='4x5y6z') file_format=(FORMAT=CSV compression=GZIP record_delimiter=',') comments='test'", DfStatement::CreateStage(DfCreateStage { if_not_exists: true, stage_name: "test_stage".to_string(), stage_params: StageParams::new("s3://load/files/", Credentials { access_key_id: "1a2b3c".to_string(), secret_access_key: "4x5y6z".to_string() }), file_format: FileFormat { compression: Compression::Gzip, record_delimiter: ",".to_string(),..Default::default()}, comments: "test".to_string(), }), )?; expect_parse_ok( "CREATE STAGE test_stage url='s3://load/files/' credentials=(access_key_id='1a2b3c' secret_access_key='4x5y6z') file_format=(FORMAT=Parquet compression=AUTO) comments='test'", DfStatement::CreateStage(DfCreateStage { if_not_exists: false, stage_name: "test_stage".to_string(), stage_params: StageParams::new("s3://load/files/", Credentials { access_key_id: "1a2b3c".to_string(), secret_access_key: "4x5y6z".to_string() }), file_format: FileFormat { format: Format::Parquet, compression: Compression::Auto ,..Default::default()}, comments: "test".to_string(), }), )?; expect_parse_ok( "CREATE STAGE test_stage url='s3://load/files/' credentials=(access_key_id='1a2b3c' secret_access_key='4x5y6z') file_format=(FORMAT=csv compression=AUTO) comments='test'", DfStatement::CreateStage(DfCreateStage { if_not_exists: false, stage_name: "test_stage".to_string(), stage_params: StageParams::new("s3://load/files/", Credentials { access_key_id: "1a2b3c".to_string(), secret_access_key: "4x5y6z".to_string() }), file_format: FileFormat { format: Format::Csv, compression: Compression::Auto,..Default::default()}, comments: "test".to_string(), }), )?; expect_parse_ok( "CREATE STAGE test_stage url='s3://load/files/' credentials=(access_key_id='1a2b3c' secret_access_key='4x5y6z') file_format=(FORMAT=json) comments='test'", DfStatement::CreateStage(DfCreateStage { if_not_exists: false, stage_name: "test_stage".to_string(), stage_params: StageParams::new("s3://load/files/", Credentials { access_key_id: "1a2b3c".to_string(), secret_access_key: "4x5y6z".to_string() }), file_format: FileFormat { format: Format::Json,..Default::default()}, comments: "test".to_string(), }), )?; expect_parse_err( "CREATE STAGE test_stage credentials=(access_key_id='1a2b3c' secret_access_key='4x5y6z') file_format=(FORMAT=csv compression=AUTO record_delimiter=NONE) comments='test'", String::from("sql parser error: Missing URL"), )?; expect_parse_err( "CREATE STAGE test_stage url='s3://load/files/' password=(access_key_id='1a2b3c' secret_access_key='4x5y6z') file_format=(FORMAT=csv compression=AUTO record_delimiter=NONE) comments='test'", String::from("sql parser error: Expected end of statement, found: password"), )?; expect_parse_err( "CREATE STAGE test_stage url='s4://load/files/' credentials=(access_key_id='1a2b3c' secret_access_key='4x5y6z') file_format=(FORMAT=csv compression=AUTO record_delimiter=NONE) comments='test'", String::from("sql parser error: Not supported storage"), )?; expect_parse_err( "CREATE STAGE test_stage url='s3://load/files/' credentials=(access_key='1a2b3c' secret_access_key='4x5y6z') file_format=(FORMAT=csv compression=AUTO record_delimiter=NONE) comments='test'", String::from("sql parser error: Invalid credentials options: unknown field `access_key`, expected `access_key_id` or `secret_access_key`"), )?; expect_parse_err( "CREATE STAGE test_stage url='s3://load/files/' credentials=(access_key_id='1a2b3c' aecret_access_key='4x5y6z') file_format=(FORMAT=csv compression=AUTO record_delimiter=NONE) comments='test'", String::from("sql parser error: Invalid credentials options: unknown field `aecret_access_key`, expected `access_key_id` or `secret_access_key`"), )?; expect_parse_err_contains( "CREATE STAGE test_stage url='s3://load/files/' credentials=(access_key_id='1a2b3c' secret_access_key='4x5y6z') file_format=(type=csv compression=AUTO record_delimiter=NONE) comments='test'", String::from("unknown field `type`"), )?; expect_parse_err_contains( "CREATE STAGE test_stage url='s3://load/files/' credentials=(access_key_id='1a2b3c' secret_access_key='4x5y6z') file_format=(format=csv compression=AUTO1 record_delimiter=NONE) comments='test'", String::from("unknown variant `auto1`"), )?; expect_parse_err_contains( "CREATE STAGE test_stage url='s3://load/files/' credentials=(access_key_id='1a2b3c' secret_access_key='4x5y6z') file_format=(format=csv1 compression=AUTO record_delimiter=NONE) comments='test'", String::from("unknown variant `csv1`"), )?; Ok(()) } #[test] fn create_table_select() -> Result<()> { expect_parse_ok( "CREATE TABLE foo AS SELECT a, b FROM bar", DfStatement::CreateTable(DfCreateTable { if_not_exists: false, name: ObjectName(vec![Ident::new("foo")]), columns: vec![], engine: "FUSE".to_string(), options: maplit::hashmap! {}, like: None, query: Some(verified_query("SELECT a, b FROM bar")?), }), )?; expect_parse_ok( "CREATE TABLE foo (a INT) SELECT a, b FROM bar", DfStatement::CreateTable(DfCreateTable { if_not_exists: false, name: ObjectName(vec![Ident::new("foo")]), columns: vec![make_column_def("a", DataType::Int(None))], engine: "FUSE".to_string(), options: maplit::hashmap! {}, like: None, query: Some(verified_query("SELECT a, b FROM bar")?), }), )?; Ok(()) } #[test] fn optimize_table() -> Result<()> { { let sql = "optimize TABLE t1"; let expected = DfStatement::OptimizeTable(DfOptimizeTable { name: ObjectName(vec![Ident::new("t1")]), operation: Optimization::PURGE, }); expect_parse_ok(sql, expected)?; } { let sql = "OPTIMIZE tABLE t1"; let expected = DfStatement::OptimizeTable(DfOptimizeTable { name: ObjectName(vec![Ident::new("t1")]), operation: Optimization::PURGE, }); expect_parse_ok(sql, expected)?; } { let sql = "optimize TABLE t1 purge"; let expected = DfStatement::OptimizeTable(DfOptimizeTable { name: ObjectName(vec![Ident::new("t1")]), operation: Optimization::PURGE, }); expect_parse_ok(sql, expected)?; } { let sql = "optimize TABLE t1 compact"; let expected = DfStatement::OptimizeTable(DfOptimizeTable { name: ObjectName(vec![Ident::new("t1")]), operation: Optimization::COMPACT, }); expect_parse_ok(sql, expected)?; } { let sql = "optimize TABLE t1 all"; let expected = DfStatement::OptimizeTable(DfOptimizeTable { name: ObjectName(vec![Ident::new("t1")]), operation: Optimization::ALL, }); expect_parse_ok(sql, expected)?; } { let sql = "optimize TABLE t1 unacceptable"; expect_parse_err( sql, "sql parser error: Expected one of PURGE, COMPACT, ALL, found: unacceptable" .to_string(), )?; } { let sql = "optimize TABLE t1 ("; expect_parse_err( sql, "sql parser error: Expected Nothing, or one of PURGE, COMPACT, ALL, found: (" .to_string(), )?; } Ok(()) } #[test] fn drop_stage_test() -> Result<()> { expect_parse_ok( "DROP STAGE test_stage", DfStatement::DropStage(DfDropStage { if_exists: false, stage_name: "test_stage".to_string(), }), )?; expect_parse_ok( "DROP STAGE IF EXISTS test_stage", DfStatement::DropStage(DfDropStage { if_exists: true, stage_name: "test_stage".to_string(), }), )?; Ok(()) } #[test] fn test_create_udf() -> Result<()> { expect_parse_err_contains( "CREATE FUNCTION test_udf AS p -> not(isnotnull(p))", "Expected (, found: p".to_string(), )?; expect_parse_err_contains( "CREATE FUNCTION test_udf AS (as) -> not(isnotnull(as))", "Keyword can not be parameter, got: as".to_string(), )?; expect_parse_err_contains( "CREATE FUNCTION test_udf AS (\"p\") -> not(isnotnull(p))", "Quote is not allowed in parameters, remove: \"".to_string(), )?; expect_parse_err_contains( "CREATE FUNCTION test_udf AS (p, p) -> not(isnotnull(p))", "Duplicate parameter is not allowed, keep only one: p".to_string(), )?; expect_parse_err_contains( "CREATE FUNCTION test_udf AS (p:) -> not(isnotnull(p))", "Expect words or comma, but got: :".to_string(), )?; expect_parse_err_contains( "CREATE FUNCTION test_udf AS (p,) -> not(isnotnull(p))", "Found a redundant `,` in the parameters".to_string(), )?; expect_parse_err_contains( "CREATE FUNCTION test_udf AS (p;) -> not(isnotnull(p))", "Can not find complete parameters, `)` is missing".to_string(), )?; expect_parse_ok( "CREATE FUNCTION test_udf AS (p) -> not(isnotnull(p))", DfStatement::CreateUDF(DfCreateUDF { if_not_exists: false, udf_name: "test_udf".to_string(), parameters: vec!["p".to_string()], definition: "not(isnotnull(p))".to_string(), description: "".to_string(), }), )?; expect_parse_ok( "CREATE FUNCTION test_udf AS (p, d) -> not(isnotnull(p, d))", DfStatement::CreateUDF(DfCreateUDF { if_not_exists: false, udf_name: "test_udf".to_string(), parameters: vec!["p".to_string(), "d".to_string()], definition: "not(isnotnull(p,d))".to_string(), description: "".to_string(), }), )?; expect_parse_err_contains( "CREATE FUNCTION test_udf AS (p) -> not(isnotnull(p)) DESC", "Expected =, found: ".to_string(), )?; expect_parse_err_contains( "CREATE FUNCTION test_udf AS (p) -> not(isnotnull(p)) DESC =", "Expected literal string, found: EOF".to_string(), )?; expect_parse_ok( "CREATE FUNCTION test_udf AS (p, d) -> not(isnotnull(p, d)) DESC = 'this is a description'", DfStatement::CreateUDF(DfCreateUDF { if_not_exists: false, udf_name: "test_udf".to_string(), parameters: vec!["p".to_string(), "d".to_string()], definition: "not(isnotnull(p,d))".to_string(), description: "this is a description".to_string(), }), )?; expect_parse_ok( "CREATE FUNCTION test_udf as (p, d) -> not(isnotnull(p, d)) DESC = 'this is a description'", DfStatement::CreateUDF(DfCreateUDF { if_not_exists: false, udf_name: "test_udf".to_string(), parameters: vec!["p".to_string(), "d".to_string()], definition: "not(isnotnull(p,d))".to_string(), description: "this is a description".to_string(), }), )?; Ok(()) } #[test] fn test_drop_udf() -> Result<()> { expect_parse_ok( "DROP FUNCTION test_udf", DfStatement::DropUDF(DfDropUDF { if_exists: false, udf_name: "test_udf".to_string(), }), )?; expect_parse_ok( "DROP FUNCTION IF EXISTS test_udf", DfStatement::DropUDF(DfDropUDF { if_exists: true, udf_name: "test_udf".to_string(), }), )?; Ok(()) } #[test] fn test_alter_udf() -> Result<()> { expect_parse_err_contains( "ALTER FUNCTION test_udf AS p -> not(isnotnull(p))", "Expected (, found: p".to_string(), )?; expect_parse_err_contains( "ALTER FUNCTION test_udf AS (as) -> not(isnotnull(as))", "Keyword can not be parameter, got: as".to_string(), )?; expect_parse_err_contains( "ALTER FUNCTION test_udf AS (\"p\") -> not(isnotnull(p))", "Quote is not allowed in parameters, remove: \"".to_string(), )?; expect_parse_err_contains( "ALTER FUNCTION test_udf AS (p, p) -> not(isnotnull(p))", "Duplicate parameter is not allowed, keep only one: p".to_string(), )?; expect_parse_err_contains( "ALTER FUNCTION test_udf AS (p:) -> not(isnotnull(p))", "Expect words or comma, but got: :".to_string(), )?; expect_parse_err_contains( "ALTER FUNCTION test_udf AS (p,) -> not(isnotnull(p))", "Found a redundant `,` in the parameters".to_string(), )?; expect_parse_err_contains( "ALTER FUNCTION test_udf AS (p;) -> not(isnotnull(p))", "Can not find complete parameters, `)` is missing".to_string(), )?; expect_parse_ok( "ALTER FUNCTION test_udf AS (p) -> not(isnotnull(p))", DfStatement::AlterUDF(DfAlterUDF { udf_name: "test_udf".to_string(), parameters: vec!["p".to_string()], definition: "not(isnotnull(p))".to_string(), description: "".to_string(), }), )?; expect_parse_ok( "ALTER FUNCTION test_udf AS (p, d) -> not(isnotnull(p, d))", DfStatement::AlterUDF(DfAlterUDF { udf_name: "test_udf".to_string(), parameters: vec!["p".to_string(), "d".to_string()], definition: "not(isnotnull(p,d))".to_string(), description: "".to_string(), }), )?; expect_parse_err_contains( "ALTER FUNCTION test_udf AS (p) -> not(isnotnull(p)) DESC", "Expected =, found: ".to_string(), )?; expect_parse_err_contains( "ALTER FUNCTION test_udf AS (p) -> not(isnotnull(p)) DESC =", "Expected literal string, found: EOF".to_string(), )?; expect_parse_ok( "ALTER FUNCTION test_udf AS (p, d) -> not(isnotnull(p, d)) DESC = 'this is a description'", DfStatement::AlterUDF(DfAlterUDF { udf_name: "test_udf".to_string(), parameters: vec!["p".to_string(), "d".to_string()], definition: "not(isnotnull(p,d))".to_string(), description: "this is a description".to_string(), }), )?; Ok(()) }
{ let (statements, _) = DfParser::parse_sql(sql)?; assert_eq!( statements.len(), 1, "Expected to parse exactly one statement" ); assert_eq!(statements[0], expected); Ok(()) }
test_models.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """ test_dj-prosftpd ------------ Tests for `dj-prosftpd` models module. """
from django.test import TestCase from dj_prosftpd import models class TestDj_prosftpd(TestCase): def setUp(self): pass def test_something(self): pass def tearDown(self): pass
project.user.controller.ts
import { GetLocationProjectDto } from './dto/get-location-project'; import { RealIP } from 'nestjs-real-ip'; import { GetProjectDto } from './../project/dto/get-project.dto'; import { ApiOperation, ApiTags } from '@nestjs/swagger'; import { Controller, Get, Param, Query, Request } from '@nestjs/common'; import { ProjectService } from './project.service'; @ApiTags('User Projects') @Controller('user/projects') export class
{ constructor(private readonly projectService: ProjectService) {} @Get() @ApiOperation({ summary: 'AS AN USER, I CAN FIND ALL PROJECTS' }) findAll(@Query() getProjectDto: GetProjectDto) { return this.projectService.findAll(getProjectDto); } @Get('/nearby') findAllByRadius( @Request() req, @Query() getLocationProjectDto: GetLocationProjectDto ) { const ip = req.clientIp; getLocationProjectDto.ip_address = ip return this.projectService.findAllByRadius( getLocationProjectDto ); } @Get(':id') findOne(@Param('id') id: string) { return this.projectService.findOne(id, []); } }
ProjectUserController
day24.py
from __future__ import print_function, division, absolute_import import copy import time import numpy as np import sys class Bridge(object): def __init__(self, initial_components, available_components): self.components = list(initial_components) self.score = sum([sum(tup) for tup in self.components]) self.available_components = available_components def next_required_number(self): if len(self.components) == 1: c = self.components[0] nrn = c[0] if c.index(0) == 1 else c[1] else: c1 = self.components[-1] c2 = self.components[-2] nrn = c1[0] if c1[1] in c2 else c1[1] return nrn def add_component(self, c):
# def score(self): # return sum([sum(tup) for tup in self.components]) def length(self): return len(self.components) def assemble_next(self): """ Find the next required number in the bridge. Return a *new* list of bridges each with a different valid component on the end, depending on the available components. Returns ------- """ nrn = self.next_required_number() next_components = [c for c in self.available_components if nrn in c] new_bridges = [] for nx in next_components: b = Bridge(initial_components=tuple(self.components), available_components=self.available_components.copy()) b.add_component(nx) new_bridges.append(b) return new_bridges def __str__(self): s = '--'.join(['{0}/{1}'.format(*c) for c in self.components]) return s def solve(inp): components = [(int(line.split('/')[0]), int(line.split('/')[1])) for line in inp] starting_comps = [c for c in components if 0 in c] bridges = [] for sc in starting_comps: bridges.append(Bridge((sc,), set(components)-set((sc,)))) complete_bridges = [] complete_bridges.extend(bridges) for i in range(1000): print('.', end='') sys.stdout.flush() new_bridges = [] for b in bridges: new_bridges.extend(b.assemble_next()) if not new_bridges: # Terminate once no new bridges can be built break bridges = new_bridges complete_bridges.extend(new_bridges) strongest_bridge = complete_bridges[np.argmax([b.score for b in complete_bridges])] print() print('Strongest bridge:') print(' ', str(strongest_bridge)) print(' strength = ', strongest_bridge.score, 'length =', strongest_bridge.length()) longest_length = np.max([b.length() for b in complete_bridges]) longest_bridges = [b for b in bridges if b.length() == longest_length] strongest_longest_bridge = longest_bridges[np.argmax([b.score for b in longest_bridges])] print('Strongest longest bridge:') print(' ', str(strongest_longest_bridge)) print(' strength = ', strongest_longest_bridge.score, 'length =', strongest_longest_bridge.length()) if __name__ == '__main__': with open('test_input.txt', 'r') as f: puzzle_input = [line.strip() for line in f.readlines() if line] t0 = time.time() solve(puzzle_input) print('Time to solve test:', time.time()-t0, 'sec') with open('input.txt', 'r') as f: puzzle_input = [line.strip() for line in f.readlines() if line] t0 = time.time() solve(puzzle_input) print('Time to solve:', time.time()-t0, 'sec')
nrn = self.next_required_number() if nrn not in c: raise ValueError('Invalid connection, wrong port. Needed: {0} Got: {1}'.format(nrn, str(c))) if c not in self.available_components: raise ValueError('Component unavailable:', c) self.components.append(c) self.score += sum(c) self.available_components.remove(c)
cyclo6.go
package ff import "fmt" // Cyclo6 represents an element of the 6th cyclotomic group. // // References: https://eprint.iacr.org/2009/565 type Cyclo6 [2]Fp6 func (z Cyclo6) String() string { return fmt.Sprintf("\n0: %v\n1: %v", z[0], z[1]) } func (z *Cyclo6) Set(x *Cyclo6) { z[0].Set(&x[0]); z[1].Set(&x[1]) } func (z *Cyclo6) Inv(x *Cyclo6) { z.Set(x); z[1].Neg() } func (z *Cyclo6) SetIdentity() { z[0].SetOne(); z[1].SetZero() } func (z *Cyclo6) IsIdentity() bool { i := new(Cyclo6); i.SetIdentity(); return z.IsEqual(i) } func (z *Cyclo6) IsEqual(x *Cyclo6) bool { return z[0].IsEqual(&x[0]) && z[1].IsEqual(&x[1]) } func (z *Cyclo6) Frob(x *Cyclo6) { z[0].Frob(&x[0]); z[1].Frob(&x[1]); z[1].Mul(&z[1], &frob12W1) } func (z *Cyclo6) Mul(x, y *Cyclo6) { var x0y0, x1y1, sx, sy, k Fp6 x0y0.Mul(&x[0], &y[0]) x1y1.Mul(&x[1], &y[1]) sx.Add(&x[0], &x[1]) sy.Add(&y[0], &y[1]) k.Mul(&sx, &sy) z[1].Sub(&k, &x0y0) z[1].Sub(&z[1], &x1y1) x1y1.MulBeta() z[0].Add(&x0y0, &x1y1) } func (z *Cyclo6) Sqr(x *Cyclo6) { var x02, x12, k Fp6 x02.Sqr(&x[0]) x12.Sqr(&x[1]) x12.MulBeta() k.Mul(&x[0], &x[1]) z[0].Add(&x02, &x12) z[1].Add(&k, &k) } // PowToX computes z = x^paramX, where paramX is the parameter of the BLS curve. func (z *Cyclo6) PowToX(x *Cyclo6) { var t Cyclo6 t.Set(x) const lenX = 64 for i := lenX - 2; i >= 0; i-- { t.Sqr(&t) // paramX is -2 ^ 63 - 2 ^ 62 - 2 ^ 60 - 2 ^ 57 - 2 ^ 48 - 2 ^ 16 if (i == 62) || (i == 60) || (i == 57) || (i == 48) || (i == 16) { t.Mul(&t, x) } } z.Inv(&t) } // EasyExponentiation raises f^(p^6-1)(p^2+1) and returns an element in the // 6-th cyclotomic group. func
(f *Fp12) *Cyclo6 { var t0, t1, p Fp12 p.Frob(f) // p = f^(p) p.Frob(&p) // p = f^(p^2) t0.Mul(&p, f) // t0 = f^(p^2 + 1) t1.Frob(&t0) // t1 = f^(p^2 + 1)*(p) t1.Frob(&t1) // t1 = f^(p^2 + 1)*(p^2) t1.Frob(&t1) // t1 = f^(p^2 + 1)*(p^3) t1.Frob(&t1) // t1 = f^(p^2 + 1)*(p^4) t1.Frob(&t1) // t1 = f^(p^2 + 1)*(p^5) t1.Frob(&t1) // t1 = f^(p^2 + 1)*(p^6) t0.Inv(&t0) // t0 = f^-(p^2 + 1) t0.Mul(&t0, &t1) // t0 = f^(p^2 + 1)*(p^6 - 1) var g Cyclo6 g[0].Set(&t0[0]) g[1].Set(&t0[1]) return &g }
EasyExponentiation
web.go
package injector import ( "github.com/LyricTian/gin-admin/v6/internal/app/config" "github.com/LyricTian/gin-admin/v6/internal/app/middleware" "github.com/LyricTian/gin-admin/v6/internal/app/router" "github.com/LyricTian/gzip" "github.com/gin-gonic/gin" ginSwagger "github.com/swaggo/gin-swagger" swaggerFiles "github.com/swaggo/gin-swagger/swaggerFiles" ) // InitGinEngine 初始化gin引擎 func InitGinEngine(r router.IRouter) *gin.Engine { gin.SetMode(config.C.RunMode) app := gin.New() app.NoMethod(middleware.NoMethodHandler()) app.NoRoute(middleware.NoRouteHandler())
// 跟踪ID app.Use(middleware.TraceMiddleware(middleware.AllowPathPrefixNoSkipper(prefixes...))) // 访问日志 app.Use(middleware.LoggerMiddleware(middleware.AllowPathPrefixNoSkipper(prefixes...))) // 崩溃恢复 app.Use(middleware.RecoveryMiddleware()) // 跨域请求 if config.C.CORS.Enable { app.Use(middleware.CORSMiddleware()) } // gzip压缩 if config.C.GZIP.Enable { app.Use(gzip.Gzip(gzip.BestCompression, gzip.WithExcludedExtensions(config.C.GZIP.ExcludedExtentions), gzip.WithExcludedPaths(config.C.GZIP.ExcludedPaths), )) } // 注册路由 r.Register(app) // swagger文档 if config.C.Swagger { app.GET("/swagger/*any", ginSwagger.WrapHandler(swaggerFiles.Handler)) } // 静态站点 if dir := config.C.WWW; dir != "" { app.Use(middleware.WWWMiddleware(dir, middleware.AllowPathPrefixSkipper(prefixes...))) } return app }
prefixes := r.Prefixes()
manage.py
#!/usr/bin/env python """Django's command-line utility for administrative tasks.""" import os import sys from store.settings.base import get_config_type def
(): # Set path to the current config file os.environ.setdefault( 'DJANGO_SETTINGS_MODULE', 'store.settings.' + get_config_type() ) try: from django.core.management import execute_from_command_line except ImportError as exc: raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) from exc execute_from_command_line(sys.argv) if __name__ == '__main__': main()
main
mod.rs
extern crate asio_sys as sys; use { BuildStreamError, DefaultFormatError, DeviceNameError, DevicesError, Format, PauseStreamError, PlayStreamError, StreamDataResult, SupportedFormatsError, }; use traits::{ DeviceTrait, EventLoopTrait, HostTrait, StreamIdTrait, }; pub use self::device::{Device, Devices, SupportedInputFormats, SupportedOutputFormats}; pub use self::stream::{EventLoop, StreamId}; use std::sync::Arc; mod device; mod stream; /// The host for ASIO. #[derive(Debug)] pub struct Host { asio: Arc<sys::Asio>, } impl Host { pub fn new() -> Result<Self, crate::HostUnavailable>
} impl HostTrait for Host { type Devices = Devices; type Device = Device; type EventLoop = EventLoop; fn is_available() -> bool { true //unimplemented!("check how to do this using asio-sys") } fn devices(&self) -> Result<Self::Devices, DevicesError> { Devices::new(self.asio.clone()) } fn default_input_device(&self) -> Option<Self::Device> { // ASIO has no concept of a default device, so just use the first. self.input_devices().ok().and_then(|mut ds| ds.next()) } fn default_output_device(&self) -> Option<Self::Device> { // ASIO has no concept of a default device, so just use the first. self.output_devices().ok().and_then(|mut ds| ds.next()) } fn event_loop(&self) -> Self::EventLoop { EventLoop::new() } } impl DeviceTrait for Device { type SupportedInputFormats = SupportedInputFormats; type SupportedOutputFormats = SupportedOutputFormats; fn name(&self) -> Result<String, DeviceNameError> { Device::name(self) } fn supported_input_formats(&self) -> Result<Self::SupportedInputFormats, SupportedFormatsError> { Device::supported_input_formats(self) } fn supported_output_formats(&self) -> Result<Self::SupportedOutputFormats, SupportedFormatsError> { Device::supported_output_formats(self) } fn default_input_format(&self) -> Result<Format, DefaultFormatError> { Device::default_input_format(self) } fn default_output_format(&self) -> Result<Format, DefaultFormatError> { Device::default_output_format(self) } } impl EventLoopTrait for EventLoop { type Device = Device; type StreamId = StreamId; fn build_input_stream( &self, device: &Self::Device, format: &Format, ) -> Result<Self::StreamId, BuildStreamError> { EventLoop::build_input_stream(self, device, format) } fn build_output_stream( &self, device: &Self::Device, format: &Format, ) -> Result<Self::StreamId, BuildStreamError> { EventLoop::build_output_stream(self, device, format) } fn play_stream(&self, stream: Self::StreamId) -> Result<(), PlayStreamError> { EventLoop::play_stream(self, stream) } fn pause_stream(&self, stream: Self::StreamId) -> Result<(), PauseStreamError> { EventLoop::pause_stream(self, stream) } fn destroy_stream(&self, stream: Self::StreamId) { EventLoop::destroy_stream(self, stream) } fn run<F>(&self, callback: F) -> ! where F: FnMut(Self::StreamId, StreamDataResult) + Send, { EventLoop::run(self, callback) } } impl StreamIdTrait for StreamId {}
{ let asio = Arc::new(sys::Asio::new()); let host = Host { asio }; Ok(host) }
pack_test.go
package internal import ( "testing" "circl/sign/dilithium/internal/common" ) func TestPolyPackLeqEta(t *testing.T) { var p1, p2 common.Poly var seed [32]byte var buf [PolyLeqEtaSize]byte for i := uint16(0); i < 100; i++ { // Note that DeriveUniformLeqEta sets p to the right kind of // unnormalized vector. PolyDeriveUniformLeqEta(&p1, &seed, i) for j := 0; j < PolyLeqEtaSize; j++ { if p1[j] < common.Q-Eta || p1[j] > common.Q+Eta { t.Fatalf("DerveUniformLeqEta out of bounds") } } PolyPackLeqEta(&p1, buf[:]) PolyUnpackLeqEta(&p2, buf[:]) if p1 != p2 { t.Fatalf("%v != %v", p1, p2) } } } func
(t *testing.T) { var p1, p2 common.Poly var seed [32]byte var buf [common.PolyT1Size]byte for i := uint16(0); i < 100; i++ { PolyDeriveUniform(&p1, &seed, i) p1.Normalize() for j := 0; j < common.N; j++ { p1[j] &= 0x1ff } p1.PackT1(buf[:]) p2.UnpackT1(buf[:]) if p1 != p2 { t.Fatalf("%v != %v", p1, p2) } } } func TestPolyPackT0(t *testing.T) { var p, p0, p1, p2 common.Poly var seed [32]byte var buf [common.PolyT0Size]byte for i := uint16(0); i < 100; i++ { PolyDeriveUniform(&p, &seed, i) p.Normalize() p.Power2Round(&p0, &p1) p0.PackT0(buf[:]) p2.UnpackT0(buf[:]) if p0 != p2 { t.Fatalf("%v != %v", p1, p2) } } } func TestPolyPackLeGamma1(t *testing.T) { var p0, p1 common.Poly var seed [48]byte var buf [common.PolyLeGamma1Size]byte for i := uint16(0); i < 100; i++ { PolyDeriveUniformLeGamma1(&p0, &seed, i) p0.Normalize() p0.PackLeGamma1(buf[:]) p1.UnpackLeGamma1(buf[:]) if p0 != p1 { t.Fatalf("%v != %v", p0, p1) } } }
TestPolyPackT1
mysql.rs
mod conversion; mod error; use async_trait::async_trait; use mysql_async::{ self as my, prelude::{Query as _, Queryable as _}, }; use percent_encoding::percent_decode; use std::{ borrow::Cow, future::Future, path::Path, sync::atomic::{AtomicBool, Ordering}, time::Duration, };
use crate::{ ast::{Query, Value}, connector::{metrics, queryable::*, ResultSet}, error::{Error, ErrorKind}, visitor::{self, Visitor}, }; /// A connector interface for the MySQL database. #[derive(Debug)] #[cfg_attr(feature = "docs", doc(cfg(feature = "mysql")))] pub struct Mysql { pub(crate) conn: Mutex<my::Conn>, pub(crate) url: MysqlUrl, socket_timeout: Option<Duration>, is_healthy: AtomicBool, } /// Wraps a connection url and exposes the parsing logic used by quaint, including default values. #[derive(Debug, Clone)] #[cfg_attr(feature = "docs", doc(cfg(feature = "mysql")))] pub struct MysqlUrl { url: Url, query_params: MysqlUrlQueryParams, } impl MysqlUrl { /// Parse `Url` to `MysqlUrl`. Returns error for mistyped connection /// parameters. pub fn new(url: Url) -> Result<Self, Error> { let query_params = Self::parse_query_params(&url)?; Ok(Self { url, query_params }) } /// The bare `Url` to the database. pub fn url(&self) -> &Url { &self.url } /// The percent-decoded database username. pub fn username(&self) -> Cow<str> { match percent_decode(self.url.username().as_bytes()).decode_utf8() { Ok(username) => username, Err(_) => { tracing::warn!("Couldn't decode username to UTF-8, using the non-decoded version."); self.url.username().into() } } } /// The percent-decoded database password. pub fn password(&self) -> Option<Cow<str>> { match self .url .password() .and_then(|pw| percent_decode(pw.as_bytes()).decode_utf8().ok()) { Some(password) => Some(password), None => self.url.password().map(|s| s.into()), } } /// Name of the database connected. Defaults to `mysql`. pub fn dbname(&self) -> &str { match self.url.path_segments() { Some(mut segments) => segments.next().unwrap_or("mysql"), None => "mysql", } } /// The database host. If `socket` and `host` are not set, defaults to `localhost`. pub fn host(&self) -> &str { self.url.host_str().unwrap_or("localhost") } /// If set, connected to the database through a Unix socket. pub fn socket(&self) -> &Option<String> { &self.query_params.socket } /// The database port, defaults to `3306`. pub fn port(&self) -> u16 { self.url.port().unwrap_or(3306) } /// The connection timeout. pub fn connect_timeout(&self) -> Option<Duration> { self.query_params.connect_timeout } /// The pool check_out timeout pub fn pool_timeout(&self) -> Option<Duration> { self.query_params.pool_timeout } /// The socket timeout pub fn socket_timeout(&self) -> Option<Duration> { self.query_params.socket_timeout } /// The maximum connection lifetime pub fn max_connection_lifetime(&self) -> Option<Duration> { self.query_params.max_connection_lifetime } /// The maximum idle connection lifetime pub fn max_idle_connection_lifetime(&self) -> Option<Duration> { self.query_params.max_idle_connection_lifetime } fn parse_query_params(url: &Url) -> Result<MysqlUrlQueryParams, Error> { let mut ssl_opts = my::SslOpts::default(); ssl_opts = ssl_opts.with_danger_accept_invalid_certs(true); let mut connection_limit = None; let mut use_ssl = false; let mut socket = None; let mut socket_timeout = None; let mut connect_timeout = Some(Duration::from_secs(5)); let mut pool_timeout = Some(Duration::from_secs(10)); let mut max_connection_lifetime = None; let mut max_idle_connection_lifetime = Some(Duration::from_secs(300)); for (k, v) in url.query_pairs() { match k.as_ref() { "connection_limit" => { let as_int: usize = v .parse() .map_err(|_| Error::builder(ErrorKind::InvalidConnectionArguments).build())?; connection_limit = Some(as_int); } "sslcert" => { use_ssl = true; ssl_opts = ssl_opts.with_root_cert_path(Some(Path::new(&*v).to_path_buf())); } "sslidentity" => { use_ssl = true; ssl_opts = ssl_opts.with_pkcs12_path(Some(Path::new(&*v).to_path_buf())); } "sslpassword" => { use_ssl = true; ssl_opts = ssl_opts.with_password(Some(v.to_string())); } "socket" => { socket = Some(v.replace("(", "").replace(")", "")); } "socket_timeout" => { let as_int = v .parse() .map_err(|_| Error::builder(ErrorKind::InvalidConnectionArguments).build())?; socket_timeout = Some(Duration::from_secs(as_int)); } "connect_timeout" => { let as_int = v .parse::<u64>() .map_err(|_| Error::builder(ErrorKind::InvalidConnectionArguments).build())?; connect_timeout = match as_int { 0 => None, _ => Some(Duration::from_secs(as_int)), }; } "pool_timeout" => { let as_int = v .parse::<u64>() .map_err(|_| Error::builder(ErrorKind::InvalidConnectionArguments).build())?; pool_timeout = match as_int { 0 => None, _ => Some(Duration::from_secs(as_int)), }; } "sslaccept" => { match v.as_ref() { "strict" => { ssl_opts = ssl_opts.with_danger_accept_invalid_certs(false); } "accept_invalid_certs" => {} _ => { tracing::debug!( message = "Unsupported SSL accept mode, defaulting to `accept_invalid_certs`", mode = &*v ); } }; } "max_connection_lifetime" => { let as_int = v .parse() .map_err(|_| Error::builder(ErrorKind::InvalidConnectionArguments).build())?; if as_int == 0 { max_connection_lifetime = None; } else { max_connection_lifetime = Some(Duration::from_secs(as_int)); } } "max_idle_connection_lifetime" => { let as_int = v .parse() .map_err(|_| Error::builder(ErrorKind::InvalidConnectionArguments).build())?; if as_int == 0 { max_idle_connection_lifetime = None; } else { max_idle_connection_lifetime = Some(Duration::from_secs(as_int)); } } _ => { tracing::trace!(message = "Discarding connection string param", param = &*k); } }; } Ok(MysqlUrlQueryParams { ssl_opts, connection_limit, use_ssl, socket, socket_timeout, connect_timeout, pool_timeout, max_connection_lifetime, max_idle_connection_lifetime, }) } #[cfg(feature = "pooled")] pub(crate) fn connection_limit(&self) -> Option<usize> { self.query_params.connection_limit } pub(crate) fn to_opts_builder(&self) -> my::OptsBuilder { let mut config = my::OptsBuilder::default() .user(Some(self.username())) .pass(self.password()) .db_name(Some(self.dbname())); match self.socket() { Some(ref socket) => { config = config.socket(Some(socket)); } None => { config = config.ip_or_hostname(self.host()).tcp_port(self.port()); } } config = config.stmt_cache_size(Some(1000)); config = config.conn_ttl(Some(Duration::from_secs(5))); if self.query_params.use_ssl { config = config.ssl_opts(Some(self.query_params.ssl_opts.clone())); } config } } #[derive(Debug, Clone)] pub(crate) struct MysqlUrlQueryParams { ssl_opts: my::SslOpts, connection_limit: Option<usize>, use_ssl: bool, socket: Option<String>, socket_timeout: Option<Duration>, connect_timeout: Option<Duration>, pool_timeout: Option<Duration>, max_connection_lifetime: Option<Duration>, max_idle_connection_lifetime: Option<Duration>, } impl Mysql { /// Create a new MySQL connection using `OptsBuilder` from the `mysql` crate. #[tracing::instrument(name = "new_connection", skip(url))] pub async fn new(url: MysqlUrl) -> crate::Result<Self> { let conn = super::timeout::connect(url.connect_timeout(), my::Conn::new(url.to_opts_builder())).await?; Ok(Self { socket_timeout: url.query_params.socket_timeout, conn: Mutex::new(conn), url, is_healthy: AtomicBool::new(true), }) } async fn perform_io<F, T>(&self, fut: F) -> crate::Result<T> where F: Future<Output = Result<T, my::Error>>, { match super::timeout::socket(self.socket_timeout, fut).await { Err(e) if e.is_closed() => { self.is_healthy.store(false, Ordering::SeqCst); Err(e) } res => res, } } } impl TransactionCapable for Mysql {} #[async_trait] impl Queryable for Mysql { async fn query(&self, q: Query<'_>) -> crate::Result<ResultSet> { let (sql, params) = visitor::Mysql::build(q)?; self.query_raw(&sql, &params).await } async fn execute(&self, q: Query<'_>) -> crate::Result<u64> { let (sql, params) = visitor::Mysql::build(q)?; self.execute_raw(&sql, &params).await } #[tracing::instrument(skip(self, params))] async fn query_raw(&self, sql: &str, params: &[Value<'_>]) -> crate::Result<ResultSet> { metrics::query("mysql.query_raw", sql, params, move || async move { let mut conn = self.conn.lock().await; let stmt = self.perform_io(conn.prep(sql)).await?; let rows: Vec<my::Row> = self .perform_io(conn.exec(&stmt, conversion::conv_params(params)?)) .await?; let columns = stmt.columns().iter().map(|s| s.name_str().into_owned()).collect(); let last_id = conn.last_insert_id(); let mut result_set = ResultSet::new(columns, Vec::new()); for mut row in rows { result_set.rows.push(row.take_result_row()?); } if let Some(id) = last_id { result_set.set_last_insert_id(id); }; Ok(result_set) }) .await } #[tracing::instrument(skip(self, params))] async fn execute_raw(&self, sql: &str, params: &[Value<'_>]) -> crate::Result<u64> { metrics::query("mysql.execute_raw", sql, params, move || async move { let mut conn = self.conn.lock().await; self.perform_io(conn.exec_drop(sql, conversion::conv_params(params)?)) .await?; Ok(conn.affected_rows()) }) .await } #[tracing::instrument(skip(self))] async fn raw_cmd(&self, cmd: &str) -> crate::Result<()> { metrics::query("mysql.raw_cmd", cmd, &[], move || async move { let mut conn = self.conn.lock().await; let fut = async { let mut result = cmd.run(&mut *conn).await?; loop { result.map(drop).await?; if result.is_empty() { result.map(drop).await?; break; } } Ok(()) }; self.perform_io(fut).await?; Ok(()) }) .await } #[tracing::instrument(skip(self))] async fn version(&self) -> crate::Result<Option<String>> { let query = r#"SELECT @@GLOBAL.version version"#; let rows = super::timeout::socket(self.socket_timeout, self.query_raw(query, &[])).await?; let version_string = rows .get(0) .and_then(|row| row.get("version").and_then(|version| version.to_string())); Ok(version_string) } fn is_healthy(&self) -> bool { self.is_healthy.load(Ordering::SeqCst) } } #[cfg(test)] mod tests { use super::MysqlUrl; use crate::tests::test_api::mysql::CONN_STR; use crate::{error::*, single::Quaint}; use url::Url; #[test] fn should_parse_socket_url() { let url = MysqlUrl::new(Url::parse("mysql://root@localhost/dbname?socket=(/tmp/mysql.sock)").unwrap()).unwrap(); assert_eq!("dbname", url.dbname()); assert_eq!(&Some(String::from("/tmp/mysql.sock")), url.socket()); } #[tokio::test] async fn should_map_nonexisting_database_error() { let mut url = Url::parse(&*CONN_STR).unwrap(); url.set_username("root").unwrap(); url.set_path("/this_does_not_exist"); let url = url.as_str().to_string(); let res = Quaint::new(&url).await; let err = res.unwrap_err(); match err.kind() { ErrorKind::DatabaseDoesNotExist { db_name } => { assert_eq!(Some("1049"), err.original_code()); assert_eq!(Some("Unknown database \'this_does_not_exist\'"), err.original_message()); assert_eq!(&Name::available("this_does_not_exist"), db_name) } e => panic!("Expected `DatabaseDoesNotExist`, got {:?}", e), } } #[tokio::test] async fn should_map_wrong_credentials_error() { let mut url = Url::parse(&CONN_STR).unwrap(); url.set_username("WRONG").unwrap(); let res = Quaint::new(url.as_str()).await; assert!(res.is_err()); let err = res.unwrap_err(); assert!(matches!(err.kind(), ErrorKind::AuthenticationFailed { user } if user == &Name::available("WRONG"))); } }
use tokio::sync::Mutex; use url::Url;
test_op_level3.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """ Support level3 operator test cases. """ import numpy as np import pytest import tvm from tvm import te from tvm import relay from tvm.error import TVMError from tvm.relay import create_executor, transform from tvm.relay.testing import check_grad, run_infer_type import tvm.testing def test_zeros_ones(): for op, ref in [(relay.zeros, np.zeros), (relay.ones, np.ones)]: y = op(shape=(124, 50), dtype="float64") yy = run_infer_type(y) assert yy.checked_type == relay.TensorType((124, 50), "float64") intrp = create_executor() intrp_res = intrp.evaluate(y).asnumpy() np.testing.assert_allclose(intrp_res, ref((124, 50), 'float64')) def test_unary_identity(): for op, ref in [(relay.zeros_like, np.zeros_like), (relay.ones_like, np.ones_like), (relay.ceil, np.ceil), (relay.floor, np.floor), (relay.trunc, np.trunc), (relay.round, np.round), (relay.abs, np.abs), (relay.copy, None), # np.copy (relay.negative, np.negative), (relay.sign, np.sign)]: shape = (8, 9, 4) x = relay.var("x", relay.TensorType(shape, "float32")) y = op(x) yy = run_infer_type(y) assert yy.checked_type == relay.TensorType(shape, "float32") if ref is not None: data = np.random.rand(*shape).astype('float32') intrp = create_executor() op_res = intrp.evaluate(y, { x: relay.const(data) }) ref_res = ref(data) np.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=0.01) def test_cast(): x = relay.var("x", relay.TensorType((8, 9, 4), "float32")) y = x.astype("int32") yy = run_infer_type(y) assert "dtype=" in yy.astext() assert yy.checked_type == relay.TensorType((8, 9, 4), "int32") x = relay.var("x", relay.TensorType((8, 9, 4), "float32")) y = relay.cast(x, "int32") yy = run_infer_type(y) assert "dtype=" in yy.astext() assert yy.checked_type == relay.TensorType((8, 9, 4), "int32") def test_clip(): a = relay.var("a", relay.TensorType((10, 4), "float32")) y = relay.clip(a, 1., 4.) yy = run_infer_type(y) assert yy.checked_type == relay.TensorType((10, 4), "float32") data = np.random.rand(10, 4).astype('float32') intrp = create_executor() op_res = intrp.evaluate(y, { a: relay.const(data) }) ref_res = np.clip(data, 1., 4.) np.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=0.01) def test_fixed_point_multiply(): # Test 23 * 1/16 # [m,s] = [0.5, -3] = frexp(1/16) # M = 0.5*2^31 = 1073741824 # so M = 1073741824 and s = -3 a = relay.var("a", relay.TensorType((10, 4), "int32")) y = relay.fixed_point_multiply(a, 1073741824, -3) yy = run_infer_type(y) assert yy.checked_type == relay.TensorType((10, 4), "int32") data = 23*np.ones((10, 4)).astype('int32') intrp = create_executor() op_res = intrp.evaluate(y, { a: relay.const(data) }) ref_res = np.ones((10, 4)).astype('int32') np.testing.assert_allclose(op_res.asnumpy(), ref_res, atol=1) def test_reinterpret(): a = relay.var("a", relay.TensorType((1000, 4), "float32")) y = relay.reinterpret(a, "int32") yy = run_infer_type(y) assert yy.checked_type == relay.TensorType((1000, 4), "int32") data = np.random.randn(1000, 4).astype('float32') * 1000 intrp = create_executor() op_res = intrp.evaluate(y, {a: relay.const(data)}) ref_res = data.view("int32") np.testing.assert_equal(op_res.asnumpy(), ref_res) def test_approximate_transcendental(): def C(x): return relay.expr.const(x, "float32") def approx_exp(x): # An approximation derived from Opus, # https://github.com/xiph/opus/blob/c1c247/celt/mathops.h#L147-L165 x = relay.minimum(relay.maximum(x, C(-88.0)), C(88.0)) x = C(127.0) + x * C(1.44269504) xf = relay.floor(x) i = relay.cast(xf, "int32") x = x - xf Y = C(0.99992522) + x * (C(0.69583354) + x * (C(0.22606716) + x * C(0.078024523))) exponent = relay.left_shift(i, relay.expr.const(23, "int32")) exponent = relay.reinterpret(exponent, "float32") return exponent * Y def approximate_sigmoid(x): y = approx_exp(x) return y / (y + C(1.0)) def approximate_tanh(x): x = x * C(2.0) y = approx_exp(x) return (y - C(1.0)) / (y + C(1.0)) a = relay.var("a", relay.TensorType((1000,), "float32")) y = approximate_sigmoid(a) yy = run_infer_type(y) assert yy.checked_type == relay.TensorType((1000,), "float32") data = np.linspace(-5, 5, 1000).astype("float32") intrp = create_executor() op_res = intrp.evaluate(y, {a: relay.const(data)}) def reference_sigmoid(x): return np.exp(-np.logaddexp(0, -x)) np.testing.assert_allclose(op_res.asnumpy(), reference_sigmoid(data), atol=2e-5, rtol=1e-9) y = approximate_tanh(a) yy = run_infer_type(y) assert yy.checked_type == relay.TensorType((1000,), "float32") data = np.linspace(-5, 5, 1000).astype("float32") intrp = create_executor() op_res = intrp.evaluate(y, {a: relay.const(data)}) def reference_tanh(x): return np.tanh(x) np.testing.assert_allclose(op_res.asnumpy(), reference_tanh(data), atol=4e-5, rtol=1e-9) def test_squeeze(): def verify_squeeze(shape, dtype, axis): x = relay.var("x", relay.TensorType(shape, dtype)) squeeze = relay.squeeze(x, axis=axis) np_axis = tuple(axis) if axis is not None else None data = np.random.random_sample(shape).astype(dtype) intrp = create_executor() op_res = intrp.evaluate(squeeze, { x : relay.const(data) }) ref_res = np.squeeze(data, axis=np_axis) np.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=0.01) verify_squeeze((1, 3, 2, 5), "float32", None) verify_squeeze((1, 3, 1), "float32", [0]) verify_squeeze((1, 2, 1, 2, 1), "float32", [0, 2]) def test_transpose_infer_type(): n, t, d = te.size_var("n"), te.size_var("t"), 100 x = relay.var("x", relay.TensorType((n, t, d), "float32")) y = relay.transpose(x, axes=(1, 0, 2)) assert "axes=" in y.astext() yy = run_infer_type(y) assert yy.checked_type == relay.TensorType( (t, n, 100), "float32") y = relay.transpose(x) assert "axes=" in y.astext() yy = run_infer_type(y) assert yy.checked_type == relay.TensorType( (100, t, n), "float32") @tvm.testing.uses_gpu def test_transpose(): def verify_transpose(dshape, axes): x = relay.var("x", relay.TensorType(dshape, "float32")) z = relay.transpose(x, axes=axes) func = relay.Function([x], z) x_data = np.random.uniform(low=-1, high=1, size=dshape).astype("float32") ref_res = np.transpose(x_data, axes=axes) for target, ctx in tvm.testing.enabled_targets(): for kind in ["graph", "debug"]: intrp = relay.create_executor(kind, ctx=ctx, target=target) op_res = intrp.evaluate(func)(x_data) tvm.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=1e-5) verify_transpose((2, 3, 4), (0, 2, 1)) def test_squeeze_infer_type(): n, t, d = 1, 4, 1 x = relay.var("x", relay.TensorType((n, t, d), "float32")) y = relay.squeeze(x, axis=(2,)) assert "axis=" in y.astext() yy = run_infer_type(y) assert yy.checked_type == relay.TensorType( (1, 4), "float32") n, t, d = 1, 4, 1 x = relay.var("x", relay.TensorType((n, t, d), "float32")) y = relay.squeeze(x) assert "axis=" not in y.astext() yy = run_infer_type(y) assert yy.checked_type == relay.TensorType( (4,), "float32") @pytest.mark.xfail(raises=tvm._ffi.base.TVMError) def test_squeeze_bad_axes_infer_type(): n, t, d = 1, 4, 1 x = relay.var("x", relay.TensorType((n, t, d), "float32")) y = relay.squeeze(x, axis=(1,)) yy = run_infer_type(y) def test_reshape_infer_type(): n, t, d1, d2 = 10, 20, 100, 20 x = relay.var("x", relay.TensorType((n, t, d1, d2), "float32")) y = relay.reshape(x, newshape=(n, t, 2000)) assert "newshape=" in y.astext() yy = run_infer_type(y) assert yy.checked_type == relay.TensorType( (n, t, 2000), "float32") @tvm.testing.uses_gpu def test_reshape(): def verify_reshape(shape, newshape, oshape): x = relay.var("x", relay.TensorType(shape, "float32")) z = relay.reshape(x, newshape=newshape) zz = run_infer_type(z) assert "newshape=" in z.astext() assert zz.checked_type == relay.ty.TensorType(oshape, "float32") func = relay.Function([x], z) check_grad(func) x_data = np.random.uniform(low=-1, high=1, size=shape).astype("float32") ref_res = np.reshape(x_data, oshape) for target, ctx in tvm.testing.enabled_targets(): for kind in ["graph", "debug"]: intrp = relay.create_executor(kind, ctx=ctx, target=target) op_res = intrp.evaluate(func)(x_data) tvm.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=1e-5) verify_reshape((2, 3, 4), (8, 3), (8, 3)) verify_reshape((4, 7), (2, 7, 2), (2, 7, 2)) verify_reshape((2, 3, 4), (4, 0, 2), (4, 3, 2)) verify_reshape((2, 3, 4), (2, 0, 0), (2, 3, 4)) verify_reshape((2, 3, 4), (0, -1), (2, 12)) verify_reshape((2, 3, 4), (-1, 0), (8, 3)) verify_reshape((2, 3, 4), (2, -2), (2, 3, 4)) verify_reshape((2, 3, 4), (-2, 1, 1), (2, 3, 4, 1, 1)) verify_reshape((2, 3, 4), (-3, 4), (6, 4)) verify_reshape((2, 3, 4, 5), (-3, -3), (6, 20)) verify_reshape((2, 3, 4), (0, -3), (2, 12)) verify_reshape((2, 3, 4), (-3, -2), (6, 4)) verify_reshape((2, 3, 4), (-4, 1, 2, -2), (1, 2, 3, 4)) verify_reshape((2, 3, 4), (2, -4, -1, 3, -2), (2, 1, 3, 4)) def test_reshape_fail(): with pytest.raises(TVMError) as reshape_err: x = relay.var("x", relay.TensorType([2,3], "float32")) z = relay.reshape(x, [7]) zz = run_infer_type(z) def test_reshape_like_infer_type(): # concrete shape x = relay.var("x", relay.TensorType((1, 2, 3), "float32")) y = relay.var("y", relay.TensorType((1,6), "float32")) z = relay.reshape_like(x, y) zz = run_infer_type(z) assert zz.checked_type == relay.TensorType((1, 6), "float32") # symbolic shape n, c, h, w = te.size_var("n"), 2, 3, te.size_var("w") x = relay.var("x", relay.TensorType((n, c, h, w), "float32")) y = relay.var("y", relay.TensorType((1, 8, 8), "float32")) z = relay.reshape_like(x, y) zz = run_infer_type(z) assert zz.checked_type == relay.TensorType((1, 8, 8), "float32") @tvm.testing.uses_gpu def test_reshape_like(): def verify_reshape_like(shape, oshape): x_data = np.random.uniform(low=-1, high=1, size=shape).astype("float32") y_data = np.random.uniform(low=-1, high=1, size=oshape).astype("float32") ref_res = np.reshape(x_data, y_data.shape) x = relay.var("x", relay.TensorType(shape, "float32")) y = relay.var("x", relay.TensorType(oshape, "float32")) z = relay.reshape_like(x, y) zz = run_infer_type(z) assert zz.checked_type == relay.ty.TensorType(ref_res.shape, "float32") func = relay.Function([x, y], z) for target, ctx in tvm.testing.enabled_targets(): for kind in ["graph", "debug"]: intrp = relay.create_executor(kind, ctx=ctx, target=target) op_res = intrp.evaluate(func)(x_data, y_data) tvm.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=1e-5) verify_reshape_like((2, 3, 4), (1, 8, 3)) verify_reshape_like((4, 7), (2, 7, 2)) def test_take_infer_type(): def verify_take(dshape, indices_shape, oshape, axis=None): x = relay.var("x", relay.TensorType(dshape, "float32")) indices = relay.var("indices", relay.TensorType(indices_shape, "int32")) y = relay.take(x, indices, axis=axis) yy = run_infer_type(y) assert yy.checked_type == relay.TensorType(oshape, "float32") d1, d2, d3 = te.var("d1"), te.var("d2"), te.var("d3") d4, d5, d6 = te.var("d4"), te.var("d5"), te.var("d6") verify_take((d1,), (1,), (1,), 0) verify_take((4,), (d1, d2), (d1, d2)) verify_take((3, 3, 3), (1, d2), (1, d2)) verify_take((d1, d2), (d3, d4, d5), (d3, d4, d5, d2), 0) verify_take((d1, d2), (d3, d4, d5), (d1, d3, d4, d5), 1) verify_take((d1, d2, d3, d4), (d5, d6), (d1, d2, d5, d6, d4), -2) @tvm.testing.uses_gpu def test_take(): def verify_take(src_shape, indices_src, axis=None, mode="clip"): src_dtype = "float32" indices_dtype = "int32" indices_src = np.array(indices_src, dtype=indices_dtype) x = relay.var("x", relay.TensorType(src_shape, src_dtype)) indices = relay.var("indices", relay.TensorType(indices_src.shape, indices_dtype)) z = relay.take(x, indices, axis=axis, mode=mode) func = relay.Function([x, indices], z) x_data = np.random.uniform(low=-1, high=1, size=src_shape).astype(src_dtype) np_mode = "raise" if mode == "fast" else mode ref_res = np.take(x_data, indices=indices_src, axis=axis, mode=np_mode) for target, ctx in tvm.testing.enabled_targets(): for kind in ["graph", "debug"]: intrp = relay.create_executor(kind, ctx=ctx, target=target) op_res = intrp.evaluate(func)(x_data, indices_src) tvm.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=1e-5) verify_take((4,), [1]) verify_take((4,), [[0,1,2,3]]) verify_take((3,3,3), [[11,25]]) verify_take((4,), [[0,1],[2,3]]) verify_take((4,), [1], 0) verify_take((2,2), [[[1,0],[0,1]]], 0) verify_take((2,2), [[[1,0],[0,1]]], 1) verify_take((4,3,5,6), [[2,1,0,0]], -2) verify_take((3,4), [-5, 20]) verify_take((3,4), [-5, 20], mode="wrap") verify_take((3,4), [-1, 2], axis=0) verify_take((3,4), [-1, 2], axis=0, mode="wrap") verify_take((3,4), [-1, 2], axis=1) verify_take((3,4), [-1, 2], axis=1, mode="wrap") verify_take((3,3,3), [[11,25]], mode="fast") verify_take((3,4), [0, 2], axis=0, mode="fast") verify_take((3,4), [0, 2], axis=1, mode="fast") def test_split_infer_type(): def verify_split(dshape, indices_or_sections, ret_type, axis=None): x = relay.var("x", relay.ty.TensorType(dshape, "float32")) y = relay.split(x, indices_or_sections, axis=axis) yy = run_infer_type(y.astuple()) assert yy.checked_type == ret_type idxd = tvm.tir.indexdiv d1, d2, d3, d4 = te.var("d1"), te.var("d2"), te.var("d3"), te.var("d4") axis = te.var("axis") verify_split((5, 5, 2, 2), 5, relay.ty.TupleType(tvm.runtime.convert([ relay.ty.TensorType((5, 1, 2, 2), "float32"), relay.ty.TensorType((5, 1, 2, 2), "float32"), relay.ty.TensorType((5, 1, 2, 2), "float32"), relay.ty.TensorType((5, 1, 2, 2), "float32"), relay.ty.TensorType((5, 1, 2, 2), "float32")])), axis=1) verify_split((5, 5, 2, 2), 5, relay.ty.TupleType(tvm.runtime.convert([ relay.ty.TensorType((1, 5, 2, 2), "float32"), relay.ty.TensorType((1, 5, 2, 2), "float32"), relay.ty.TensorType((1, 5, 2, 2), "float32"), relay.ty.TensorType((1, 5, 2, 2), "float32"), relay.ty.TensorType((1, 5, 2, 2), "float32")])), axis=0) verify_split((d1, d2, d3, d4), 4, relay.ty.TupleType(tvm.runtime.convert([ relay.ty.TensorType((d1, d2, idxd(d3, 4), d4), "float32"), relay.ty.TensorType((d1, d2, idxd(d3, 4), d4), "float32"), relay.ty.TensorType((d1, d2, idxd(d3, 4), d4), "float32"), relay.ty.TensorType((d1, d2, idxd(d3, 4), d4), "float32")])), axis=2) verify_split((d1, d2, d3, d4), 2, relay.ty.TupleType(tvm.runtime.convert([ relay.ty.TensorType((idxd(d1, 2), d2, d3, d4), "float32"), relay.ty.TensorType((idxd(d1, 2), d2, d3, d4), "float32")])), axis=0) verify_split((d1, d2, d3, d4), (2, 4, 7), relay.ty.TupleType(tvm.runtime.convert([ relay.ty.TensorType((d1, 2, d3, d4), "float32"), relay.ty.TensorType((d1, 2, d3, d4), "float32"), relay.ty.TensorType((d1, 3, d3, d4), "float32"), relay.ty.TensorType((d1, (d2-7), d3, d4), "float32")])), axis=1) def test_full_infer_type(): # default settings: match input dtype x = relay.var("x", relay.TensorType((), "int8")) y = relay.full(x, ()) yy = run_infer_type(y) assert yy.checked_type == relay.TensorType((), "int8") # change the shape and dtype x = relay.var("x", relay.TensorType((), "float32")) y = relay.full(x, (1, 2), "int8") "shape=" in y.astext() yy = run_infer_type(y) assert yy.checked_type == relay.TensorType((1, 2), "int8") @tvm.testing.uses_gpu def test_full(): def verify_full(fill_value, src_shape, dtype): x = relay.var("x", relay.scalar_type(dtype)) z = relay.full(x, src_shape, dtype) func = relay.Function([x], z) ref_res = np.full(src_shape, fill_value) for target, ctx in tvm.testing.enabled_targets(): for kind in ["graph", "debug"]: intrp = relay.create_executor(kind, ctx=ctx, target=target) op_res = intrp.evaluate(func)(np.array(fill_value, dtype)) tvm.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=1e-5) verify_full(4, (1, 3, 4, 4), "int32") #verify_full(4, (1, 3, 4, 4), "int64") # This does not pass, python int32 is not upcast to int64, not sure how to fix it. verify_full(4.0, (1, 4), "float32") def test_full_like_infer_type(): # concrete shape base = relay.var("base", relay.TensorType((1, 2, 3), "float32")) fill = relay.var("fill", relay.TensorType((), "float32")) y = relay.full_like(base, fill) yy = run_infer_type(y) assert yy.checked_type == relay.TensorType((1, 2, 3), "float32") # symbolic shape n, c, h, w = te.size_var("n"), 2, 3, te.size_var("w") base = relay.var("base", relay.TensorType((n, c, h, w), "float32")) fill = relay.var("fill", relay.TensorType((), "float32")) y = relay.full_like(base, fill) yy = run_infer_type(y) assert yy.checked_type == relay.TensorType((n, c, h, w), "float32") @tvm.testing.uses_gpu def test_full_like(): def verify_full_like(base, fill_value, dtype): x_data = np.random.uniform(low=-1, high=1, size=base).astype(dtype) x = relay.var("x", relay.TensorType(base, dtype)) y = relay.var("y", relay.scalar_type(dtype)) z = relay.full_like(x, y) func = relay.Function([x, y], z) ref_res = np.full_like(x_data, fill_value) for target, ctx in tvm.testing.enabled_targets(): for kind in ["graph", "debug"]: intrp = relay.create_executor(kind, ctx=ctx, target=target) op_res = intrp.evaluate(func)(x_data, np.array(fill_value, dtype)) tvm.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=1e-5) verify_full_like((1, 3, 4, 4), 4, "int32") verify_full_like((1, 1), 44.0, "float32") @tvm.testing.uses_gpu def test_infer_type_leaky_relu(): n, c , h, w = te.size_var("n"), te.size_var("c"), te.size_var("h"), te.size_var("w") x = relay.var("x", relay.TensorType((n, c, h, w), "float32")) y = relay.nn.leaky_relu(x, alpha=0.1) "alpha=0.1" in y.astext() yy = run_infer_type(y) assert yy.checked_type == relay.TensorType((n, c, h, w), "float32") shape = (1, 5, 10, 10) dtype = "float32" x = relay.var("x", relay.TensorType(shape, dtype)) z = relay.nn.leaky_relu(x, alpha=0.1) assert "alpha=0.1" in z.astext() zz = run_infer_type(z) assert zz.checked_type == relay.TensorType(shape, dtype) func = relay.Function([x], z) x_data = np.random.uniform(low=-1, high=1, size=shape).astype(dtype) ref_res = np.where(x_data > 0, x_data, x_data * 0.1) for target, ctx in tvm.testing.enabled_targets(): intrp1 = relay.create_executor("graph", ctx=ctx, target=target) intrp2 = relay.create_executor("debug", ctx=ctx, target=target) op_res1 = intrp1.evaluate(func)(x_data) tvm.testing.assert_allclose(op_res1.asnumpy(), ref_res, rtol=1e-5) op_res2 = intrp2.evaluate(func)(x_data) tvm.testing.assert_allclose(op_res2.asnumpy(), ref_res, rtol=1e-5) def verify_infer_type_prelu(data, alpha, axis, output, dtype="float32"): x = relay.var("data", relay.TensorType(data, dtype)) if alpha: y = relay.var("alpha", relay.TensorType(alpha, dtype)) else: y = relay.var("alpha", relay.IncompleteType()) z = relay.nn.prelu(x, y, axis=axis) zz = run_infer_type(z) if axis != 1: assert "axis" in z.astext() assert zz.checked_type == relay.ty.TensorType(output, dtype) if not alpha: axis = axis if axis else 1 alpha_shape = (data[axis],) assert zz.args[1].checked_type == relay.TensorType(alpha_shape, "float32") if all(isinstance(v, tvm.tir.Var) == 1 for v in data) or not alpha: return func = relay.Function([x, y], z) x_data = np.random.uniform(low=-1, high=1, size=data).astype(dtype) a_data = np.random.uniform(low=-1, high=1, size=alpha).astype(dtype) if axis == 1: ref_res = (x_data < 0) * (x_data * a_data.reshape(3, 1, 1)) + (x_data>=0) * x_data else: ref_res = (x_data < 0) * (x_data * a_data.reshape(1, 1, 3)) + (x_data>=0) * x_data for target, ctx in tvm.testing.enabled_targets(): intrp1 = relay.create_executor("graph", ctx=ctx, target=target) intrp2 = relay.create_executor("debug", ctx=ctx, target=target) op_res1 = intrp1.evaluate(func)(x_data, a_data) tvm.testing.assert_allclose(op_res1.asnumpy(), ref_res, rtol=1e-5) op_res2 = intrp2.evaluate(func)(x_data, a_data) tvm.testing.assert_allclose(op_res2.asnumpy(), ref_res, rtol=1e-5) @tvm.testing.uses_gpu def test_infer_type_prelu(): n, c , h, w = te.size_var("n"), te.size_var("c"), te.size_var("h"), te.size_var("w") verify_infer_type_prelu((n, c, h, w), (c,), 1, (n, c, h, w)) verify_infer_type_prelu((n, h, w, c), (c,), 3, (n, h, w, c)) verify_infer_type_prelu((n, c, h, w), None, 1, (n, c, h, w)) verify_infer_type_prelu((n, h, w, c), None, 3, (n, h, w, c)) verify_infer_type_prelu((1, 3, 2, 2), (3,), 1, (1, 3, 2, 2)) verify_infer_type_prelu((1, 2, 2, 3), (3,), 3, (1, 2, 2, 3)) verify_infer_type_prelu((1, 3, 2, 2), None, 1, (1, 3, 2, 2)) verify_infer_type_prelu((1, 2, 2, 3), None, 3, (1, 2, 2, 3)) @tvm.testing.uses_gpu def test_arange(): def verify_arange(start, stop, step): dtype = "float32" if start is None and step is None: x = relay.arange(relay.const(stop, dtype=dtype)) ref_res = np.arange(stop).astype(dtype) elif start is None: x = relay.arange(relay.const(stop, dtype=dtype), step=relay.const(step, dtype=dtype)) ref_res = np.arange(stop, step=step).astype(dtype) elif step is None: x = relay.arange(relay.const(start, dtype=dtype), relay.const(stop, dtype=dtype)) ref_res = np.arange(start, stop).astype(dtype) else: x = relay.arange( relay.const(start, dtype=dtype), relay.const(stop, dtype=dtype), relay.const(step, dtype=dtype)) ref_res = np.arange(start, stop, step).astype(dtype) func = relay.Function([], x) for target, ctx in tvm.testing.enabled_targets(): for kind in ["graph", "debug"]: intrp = relay.create_executor(kind, ctx=ctx, target=target) op_res = intrp.evaluate(func)() tvm.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=1e-5) verify_arange(None, 20, None) verify_arange(None, 20, 2) verify_arange(1, 20, None) verify_arange(1, 20, 2) # arange doesnt' support floating point right now, see type relation # verify_arange(1, 20, 1.5) verify_arange(1, 20.5, None) verify_arange(1, 20, 3) verify_arange(20, 1, -1) # arange doesnt' support floating point right now, see type relation # verify_arange(20, 1, -1.5) @tvm.testing.uses_gpu def test_meshgrid(): def verify_meshgrid(lengths, indexing="ij"): input_vars = [] input_data = [] for i, length in enumerate(lengths): input_name = "x_{}".format(i) if length == 0: # Scalar input_vars.append(relay.var(input_name, relay.scalar_type("float32"))) input_data.append(np.array(1, "float32")) else: input_vars.append(relay.var(input_name, relay.TensorType((length,), "float32"))) input_data.append(np.arange(length).astype("float32")) z = relay.meshgrid(input_vars, indexing=indexing).astuple() func = relay.Function(input_vars, z) # Get ref ref_res = np.meshgrid(*input_data, indexing=indexing) for target, ctx in tvm.testing.enabled_targets(): for kind in ["graph", "debug"]: intrp = relay.create_executor(kind, ctx=ctx, target=target) op_res = intrp.evaluate(func)(*input_data) assert len(op_res) == len(ref_res) for i in range(len(op_res)): tvm.testing.assert_allclose(op_res[i].asnumpy(), ref_res[i], rtol=1e-5) verify_meshgrid([3, 5]) verify_meshgrid([4, 2], indexing="xy") verify_meshgrid([3, 5, 2]) verify_meshgrid([3, 1, 5], indexing="xy") # Length 0 signifies scalar. verify_meshgrid([3, 5, 0]) @tvm.testing.uses_gpu def test_tile(): def verify_tile(dshape, reps): x = relay.var("x", relay.TensorType(dshape, "float32")) z = relay.tile(x, reps=reps) func = relay.Function([x], z) x_data = np.random.uniform(low=-1, high=1, size=dshape).astype("float32") ref_res = np.tile(x_data, reps=reps) for target, ctx in tvm.testing.enabled_targets(): for kind in ["graph", "debug"]: intrp = relay.create_executor(kind, ctx=ctx, target=target) op_res = intrp.evaluate(func)(x_data) tvm.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=1e-5) verify_tile((2, 3, 4), (3, 2, 1)) verify_tile((2, 3, 4), (1, 2)) verify_tile((2, 3), (3, 2, 1)) @tvm.testing.uses_gpu def test_repeat(): def verify_repeat(dshape, repeats, axis): x = relay.Var("x", relay.TensorType(dshape, "float32")) func = relay.Function([x], relay.repeat(x, repeats, axis)) data = np.random.uniform(size=dshape).astype("float32") ref_res = np.repeat(data, repeats, axis) for target, ctx in tvm.testing.enabled_targets(): for kind in ["graph", "debug"]: intrp = relay.create_executor(kind, ctx=ctx, target=target) op_res = intrp.evaluate(func)(data) tvm.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=1e-5) verify_repeat((3,), 2, 0) verify_repeat((3, 10), 2, -1) verify_repeat((3, 2, 4), 3, 1) @tvm.testing.uses_gpu def test_stack(): def verify_stack(dshapes, axis): y = [] for shape in dshapes: y.append(relay.var("input", relay.TensorType(shape, "float32"))) x = relay.Tuple(y) z = relay.stack(x, axis=axis) func = relay.Function(y, z) x_data = [np.random.normal(size=shape).astype("float32") for shape in dshapes] ref_res = np.stack(x_data, axis=axis) for target, ctx in tvm.testing.enabled_targets(): for kind in ["graph", "debug"]: intrp = relay.create_executor(kind, ctx=ctx, target=target) op_res = intrp.evaluate(func)(*x_data) tvm.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=1e-5) verify_stack([(2,), (2,), (2,)], -1) verify_stack([(2,), (2,), (2,)], 0) verify_stack([(2, 2, 4), (2, 2, 4), (2, 2, 4)], 1) verify_stack([(2, 2, 3, 4), (2, 2, 3, 4), (2, 2, 3, 4), (2, 2, 3, 4)], -1) verify_stack([(2, 2, 3, 4), (2, 2, 3, 4), (2, 2, 3, 4), (2, 2, 3, 4)], 4) @tvm.testing.uses_gpu def test_reverse(): def verify_reverse(dshape, axis): x = relay.var("x", relay.TensorType(dshape, "float32")) z = relay.reverse(x, axis=axis) zz = run_infer_type(z) func = relay.Function([x], z) x_data = np.random.uniform(low=-1, high=1, size=dshape).astype("float32") ref_res = np.flip(x_data, axis) for target, ctx in tvm.testing.enabled_targets(): for kind in ["graph", "debug"]: intrp = relay.create_executor(kind, ctx=ctx, target=target) op_res = intrp.evaluate(func)(x_data) tvm.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=1e-5) verify_reverse((2, 3, 4), 1) verify_reverse((4, 7), 0) verify_reverse((2, 3, 4), -1) @tvm.testing.uses_gpu def test_reverse_sequence(): def verify_reverse_sequence(x_data, seq_lengths, batch_axis, seq_axis, ref_res): seq_lengths_data = np.array(seq_lengths).astype("int32") x = relay.var("x", relay.TensorType(x_data.shape, str(x_data.dtype))) z = relay.reverse_sequence(x, relay.const(seq_lengths_data), seq_axis, batch_axis) zz = run_infer_type(z) assert zz.checked_type == x.type_annotation func = relay.Function([x], z) for target, ctx in tvm.testing.enabled_targets(): for kind in ["graph", "debug"]: intrp = relay.create_executor(kind, ctx=ctx, target=target) op_res = intrp.evaluate(func)(x_data) tvm.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=1e-5) indata = np.array(np.arange(0, 16)).reshape([4, 4]).astype("int32") result = [[0, 5, 10, 15], [4, 1, 6, 11], [8, 9, 2, 7], [12, 13, 14, 3]] verify_reverse_sequence(indata, [1, 2, 3, 4], 1, 0, np.array(result)) verify_reverse_sequence(indata, [1, 2, 3, 4], -1, 0, np.array(result)) verify_reverse_sequence(indata.astype("float32"), [1, 2, 3, 4], 1, 0, np.array(result).astype("float32")) indata = np.array(np.arange(0, 16)).reshape([4, 4]).astype("int32") result = [[0, 1, 2, 3], [5, 4, 6, 7], [10, 9, 8, 11], [15, 14, 13, 12]] verify_reverse_sequence(indata, [1, 2, 3, 4], 0, 1, np.array(result)) verify_reverse_sequence(indata, [1, 2, 3, 4], 0, -1, np.array(result)) verify_reverse_sequence(indata.astype("float32"), [1, 2, 3, 4], 0, 1, np.array(result).astype("float32")) indata = np.array(np.arange(0, 16)).reshape([4, 4]).astype("int32") result = [[0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10, 11], [15, 14, 13, 12]] verify_reverse_sequence(indata, [-1, 0, 1, 5], 0, 1, np.array(result)) indata = np.array(np.arange(0, 54)).reshape([2, 3, 3, 3]).astype("int32") result = [[[[18, 19, 20], [21, 22, 23], [24, 25, 26]], [[9, 10, 11], [12, 13, 14], [15, 16, 17]], [[0, 1, 2], [3, 4, 5], [6, 7, 8]]], [[[45, 46, 47], [48, 49, 50], [51, 52, 53]], [[36, 37, 38], [39, 40, 41], [42, 43, 44]], [[27, 28, 29], [30, 31, 32], [33, 34, 35]]]] verify_reverse_sequence(indata, [3, 3], 0, 1, np.array(result)) indata = np.array(np.arange(0, 54)).reshape([2, 3, 3, 3]).astype("int32") result = [[[[9, 10, 11], [21, 22, 23], [15, 16, 17]], [[0, 1, 2], [12, 13, 14], [6, 7, 8]], [[18, 19, 20], [3, 4, 5], [24, 25, 26]]], [[[36, 37, 38], [48, 49, 50], [42, 43, 44]], [[27, 28, 29], [39, 40, 41], [33, 34, 35]], [[45, 46, 47], [30, 31, 32], [51, 52, 53]]]] verify_reverse_sequence(indata, [2, 3, 2], 2, 1, np.array(result)) indata = np.array(np.arange(0, 16)).reshape([4, 4]).astype("int32") result = [] with pytest.raises(Exception) as execinfo: verify_reverse_sequence(indata, [2, 3, 2, 4, 5], 1, 0, np.array(result)) assert "For reverse_sequnece seq_lengths size should match with dimension of batch axis," \ " but got dimension of batch_axis = 4, and seq_length size = 5" in execinfo.value.args[0] def test_scatter(): def ref_scatter(data, indices, updates, axis=0): idx = np.indices(indices.shape).reshape(indices.ndim, -1) updated_idx = np.copy(idx) indices = indices.reshape(-1) for i in range(len(indices)): updated_idx[axis, i] = indices[i] scattered = np.copy(data) scattered[tuple(updated_idx)] = updates[tuple(idx)] return scattered def verify_scatter(dshape, ishape, axis=0): d = relay.var("d", relay.TensorType(dshape, "float32")) i = relay.var("i", relay.TensorType(ishape, "int64")) u = relay.var("u", relay.TensorType(ishape, "float32")) z = relay.op.scatter(d, i, u, axis) func = relay.Function([d, i, u], z) data_np = np.random.uniform(size=dshape).astype("float32") updates_np = np.random.uniform(size=ishape).astype("float32")
ref_res = ref_scatter(data_np, indices_np, updates_np, axis) # TODO(mbrookhart): expand testing when adding more backend schedules for target, ctx in [("llvm", tvm.cpu())]: for kind in ["graph", "debug"]: intrp = relay.create_executor(kind, ctx=ctx, target=target) op_res = intrp.evaluate(func)(data_np, indices_np, updates_np) tvm.testing.assert_allclose( op_res.asnumpy(), ref_res, rtol=1e-5) verify_scatter((10, ), (10, ), 0) verify_scatter((10, 5), (10, 5), -2) verify_scatter((10, 5), (10, 5), -1) verify_scatter((10, 5), (3, 5), 0) verify_scatter((12, 4), (7, 2), 1) verify_scatter((2, 3, 4), (1, 3, 4), 0) verify_scatter((2, 3, 4), (2, 1, 4), 1) verify_scatter((2, 3, 4), (2, 3, 1), 2) verify_scatter((2, 3, 4, 5), (1, 3, 4, 5), 0) verify_scatter((6, 3, 4, 5), (2, 3, 4, 5), 1) verify_scatter((2, 3, 8, 5), (2, 3, 1, 1), 2) verify_scatter((16, 16, 4, 5), (16, 16, 4, 5), 3) def test_scatter_add(): def ref_scatter_add(data, indices, updates, axis=0): output = np.copy(data) for index in np.ndindex(*indices.shape): new_index = list(index) new_index[axis] = indices[index] output[tuple(new_index)] += updates[index] return output def verify_scatter_add(dshape, ishape, axis=0): d = relay.var("d", relay.TensorType(dshape, "float32")) i = relay.var("i", relay.TensorType(ishape, "int64")) u = relay.var("u", relay.TensorType(ishape, "float32")) z = relay.op.scatter_add(d, i, u, axis) func = relay.Function([d, i, u], z) data_np = np.random.uniform(size=dshape).astype("float32") updates_np = np.random.uniform(size=ishape).astype("float32") indices_np = np.random.randint(-dshape[axis], dshape[axis] - 1, ishape).astype("int64") ref_res = ref_scatter_add(data_np, indices_np, updates_np, axis) # TODO(mbrookhart): expand testing when adding more backend schedules for target, ctx in [("llvm", tvm.cpu())]: for kind in ["graph", "debug"]: intrp = relay.create_executor(kind, ctx=ctx, target=target) op_res = intrp.evaluate(func)(data_np, indices_np, updates_np) tvm.testing.assert_allclose( op_res.asnumpy(), ref_res, rtol=1e-5) verify_scatter_add((10, ), (10, ), 0) verify_scatter_add((10, 5), (10, 5), -2) verify_scatter_add((10, 5), (10, 5), -1) verify_scatter_add((10, 5), (3, 5), 0) verify_scatter_add((12, 4), (7, 2), 1) verify_scatter_add((2, 3, 4), (1, 3, 4), 0) verify_scatter_add((2, 3, 4), (2, 1, 4), 1) verify_scatter_add((2, 3, 4), (2, 3, 1), 2) verify_scatter_add((2, 3, 4, 5), (1, 3, 4, 5), 0) verify_scatter_add((6, 3, 4, 5), (2, 3, 4, 5), 1) verify_scatter_add((2, 3, 8, 5), (2, 3, 1, 1), 2) verify_scatter_add((16, 16, 4, 5), (16, 16, 4, 5), 3) @tvm.testing.uses_gpu def test_gather(): def verify_gather(data, axis, indices, ref_res): data = np.asarray(data, dtype='float32') indices = np.asarray(indices, dtype='int32') ref_res = np.asarray(ref_res) d = relay.var("x", relay.TensorType(data.shape, "float32")) i = relay.var("y", relay.TensorType(indices.shape, "int32")) z = relay.gather(d, axis, i) func = relay.Function([d, i], z) for target, ctx in tvm.testing.enabled_targets(): for kind in ["graph", "debug"]: intrp = relay.create_executor(kind, ctx=ctx, target=target) op_res = intrp.evaluate(func)(data, indices) tvm.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=1e-5) verify_gather([[1, 2], [3, 4]], 1, [[0, 0], [1, 0]], [[1, 1], [4, 3]]) verify_gather([[[0, 1, 2], [3, 4, 5]], [[6, 7, 8], [9, 10, 11]]], 0, [[[1, 0, 1], [1, 1, 0]]], [[[6, 1, 8], [9, 10, 5]]]) verify_gather([[[-0.2321, -0.2024, -1.7624], [-0.3829, -0.4246, 0.2448], [0.1822, 0.2360, -0.8965], [0.4497, -0.2224, 0.6103]], [[0.0408, -0.7667, -0.4303], [-0.3216, 0.7489, -0.1502], [0.0144, -0.4699, -0.0064], [-0.0768, -1.6064, 1.3390]]], 1, [[[2, 2, 0], [1, 0, 3]], [[3, 2, 0], [1, 0, 0]]], [[[0.1822, 0.2360, -1.7624], [-0.3829, -0.2024, 0.6103]], [[-0.0768, -0.4699, -0.4303], [-0.3216, -0.7667, -0.4303]]]) verify_gather([[[0.3050, 1.6986, 1.1034], [0.7020, -0.6960, -2.1818], [0.3116, -0.5773, -0.9912], [0.0835, -1.3915, -1.0720]], [[0.1694, -0.6091, -0.6539], [-0.5234, -0.1218, 0.5084], [0.2374, -1.9537, -2.0078], [-0.5700, -1.0302, 0.1558]]], 2, [[[1, 1, 0, 1], [0, 0, 2, 2], [1, 2, 1, 2], [2, 2, 1, 0]], [[0, 0, 1, 2], [2, 2, 1, 0], [1, 2, 0, 0], [0, 2, 0, 2]]], [[[1.6986, 1.6986, 0.3050, 1.6986], [0.7020, 0.7020, -2.1818, -2.1818], [-0.5773, -0.9912, -0.5773, -0.9912], [-1.0720, -1.0720, -1.3915, 0.0835]], [[0.1694, 0.1694, -0.6091, -0.6539], [0.5084, 0.5084, -0.1218, -0.5234], [-1.9537, -2.0078, 0.2374, 0.2374], [-0.5700, 0.1558, -0.5700, 0.1558]]]) @tvm.testing.uses_gpu def test_gather_nd(): def verify_gather_nd(xshape, yshape, y_data): x = relay.var("x", relay.TensorType(xshape, "float32")) y = relay.var("y", relay.TensorType(yshape, "int32")) z = relay.gather_nd(x, y) func = relay.Function([x, y], z) x_data = np.random.uniform(size=xshape).astype("float32") ref_res = x_data[tuple(y_data)] for target, ctx in tvm.testing.enabled_targets(): for kind in ["graph", "debug"]: intrp = relay.create_executor(kind, ctx=ctx, target=target) op_res = intrp.evaluate(func)(x_data, y_data) tvm.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=1e-5) verify_gather_nd((2, 2), (2, 3), [[1, 1, 0], [0, 1, 0]]) verify_gather_nd((2, 2, 2), (2, 2), [[0, 1], [1, 0]]) verify_gather_nd((3, 2, 2), (2, 2), [[0, 1], [1, 0]]) verify_gather_nd((3, 2), (2, 2, 3), [[[0, 1, 2], [2, 0, 1]], [[0, 0, 0], [1, 1, 1]]]) def _verify_infiniteness_ops(relay_op, ref_op): for dtype in ['float32', 'float16', 'float16', 'int32', 'int16']: shape = (2, 8, 8) x = relay.var("x", relay.TensorType(shape, dtype)) y = relay_op(x) yy = run_infer_type(y) assert yy.checked_type == relay.TensorType(shape, "bool") data = np.random.uniform(size=shape).astype(dtype) if dtype.startswith('float'): data.ravel()[np.random.choice(data.size, int(data.size * 0.5), replace=False)] = np.infty data.ravel()[np.random.choice(data.size, int(data.size * 0.5), replace=False)] = np.nan intrp = create_executor() op_res = intrp.evaluate(y, {x: data}) ref_res = ref_op(data) np.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=0.01) def test_isfinite(): _verify_infiniteness_ops(relay.isfinite, np.isfinite) def test_isinf(): _verify_infiniteness_ops(relay.isinf, np.isinf) @tvm.testing.uses_gpu def test_unravel_index(): def verify_unravel_index(indices, shape, dtype): x_data = np.array(indices).astype(dtype) y_data = np.array(shape).astype(dtype) x = relay.var("x", relay.TensorType(x_data.shape, dtype)) y = relay.var("y", relay.TensorType(y_data.shape, dtype)) z = relay.unravel_index(x, y) zz = run_infer_type(z) if len(x_data.shape) == 1: out_shape = [y_data.shape[0], x_data.shape[0]] else: out_shape = [y_data.shape[0]] assert zz.checked_type == relay.ty.TensorType(out_shape, dtype) func = relay.Function([x, y], z) ref_res = np.unravel_index(x_data, y_data) for target, ctx in tvm.testing.enabled_targets(): for kind in ["graph", "debug"]: intrp = relay.create_executor(kind, ctx=ctx, target=target) op_res = intrp.evaluate(func)(x_data, y_data) tvm.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=1e-5) for dtype in ["int64", "int32"]: verify_unravel_index([0, 1, 2, 3], [2, 2], dtype) verify_unravel_index([144], [5, 5, 5, 2], dtype) verify_unravel_index(144, [5, 5, 5, 2], dtype) verify_unravel_index([100, 13, 5], [5, 5, 5, 2], dtype) # In below example, 5 is out of bound for array of size 4. # Numpy implementation throws error for it # TVM implementation does not throw error instead it produces # output which is inline with Tensorflow # verify_unravel_index([0, 1, 2, 5], [2, 2], dtype) @tvm.testing.uses_gpu def test_sparse_to_dense(): def verify_sparse_to_dense(sparse_indices, sparse_values, default_value, output_shape, xpected): sparse_indices_data = np.array(sparse_indices) sparse_values_data = np.array(sparse_values) default_value_data = np.array(default_value) a = relay.var("a", relay.TensorType(sparse_indices_data.shape, str(sparse_indices_data.dtype))) b = relay.var("b", relay.TensorType(sparse_values_data.shape, str(sparse_values_data.dtype))) if default_value is None: args = [a, b] d = relay.sparse_to_dense(a, output_shape, b) else: c = relay.var("c", relay.TensorType(default_value_data.shape, str(default_value_data.dtype))) args = [a, b, c] d = relay.sparse_to_dense(a, output_shape, b, c) zz = run_infer_type(d) assert zz.checked_type == relay.ty.TensorType(output_shape, str(sparse_values_data.dtype)) func = relay.Function(args, d) for target, ctx in tvm.testing.enabled_targets(): for kind in ["graph", "debug"]: intrp = relay.create_executor(kind, ctx=ctx, target=target) if default_value is None: op_res = intrp.evaluate(func)(sparse_indices_data, sparse_values_data) else: op_res = intrp.evaluate(func)( sparse_indices_data, sparse_values_data, default_value_data ) tvm.testing.assert_allclose(op_res.asnumpy(), xpected, rtol=1e-5) verify_sparse_to_dense(1, 3, 0, [5], [0, 3, 0, 0, 0]) # scalar verify_sparse_to_dense([0, 1, 4], [3, 3, 3], 0, [5], [3, 3, 0, 0, 3]) # vector verify_sparse_to_dense([[0, 0], [1, 2]], [1, 2], 0, [3, 4], [[1, 0, 0, 0], [0, 0, 2, 0], [0, 0, 0, 0]]) # nXd verify_sparse_to_dense( [[0, 0, 0], [1, 2, 3]], [1, 2], 4, [2, 3, 4], [[[1, 4, 4, 4], [4, 4, 4, 4], [4, 4, 4, 4]], [[4, 4, 4, 4], [4, 4, 4, 4], [4, 4, 4, 2]]] ) # nXd verify_sparse_to_dense([0, 1, 4], [3.1, 3.1, 3.1], 3.5, [5], [3.1, 3.1, 3.5, 3.5, 3.1]) # floats verify_sparse_to_dense(1, 3, None, [5], [0, 3, 0, 0, 0]) # default value not specified #negative test cases #sparse indices should be ints #verify_sparse_to_dense([[0.1, 1.1, 4.1], [0,2,4]], [3.1, 3.1, 3.1], 3.5, [5], [3.1, 3.1, 3.5, 3.5, 3.1]) #sparse_values should be 0d or 1d only #verify_sparse_to_dense([[0, 1, 4], [0, 2, 4]], [[[3.1, 3.1, 3.1]]], 3.5, [5], [3.1, 3.1, 3.5, 3.5, 3.1]) #sparse_indices should not be > 2d tensor #verify_sparse_to_dense([[[[0, 1, 4], [0, 2, 4]]]], [[[[3.1, 3.1, 3.1]]]], 3.5, [5], [3.1, 3.1, 3.5, 3.5, 3.1]) if __name__ == "__main__": test_cast() test_zeros_ones() test_unary_identity() test_clip() test_transpose_infer_type() test_transpose() test_reshape_infer_type() test_reshape() test_reshape_fail() test_reshape_like_infer_type() test_reshape_like() test_take_infer_type() test_take() test_full_infer_type() test_full() test_full_like_infer_type() test_full_like() test_infer_type_leaky_relu() test_infer_type_prelu() test_squeeze() test_squeeze_infer_type() test_squeeze_bad_axes_infer_type() test_split_infer_type() test_arange() test_meshgrid() test_reverse() test_stack() test_tile() test_repeat() test_gather_nd() test_isfinite() test_isinf() test_unravel_index() test_sparse_to_dense() test_fixed_point_multiply()
indices_np = np.random.randint(-dshape[axis], dshape[axis] - 1, ishape).astype("int64")
ves.rs
use digest::Digest; use bitcoin::secp256k1::{self, Secp256k1}; use crate::{ crypt_engines::{ CryptEngine, ed25519_engine::Ed25519Sha, secp256k1_engine::Secp256k1Engine } }; fn test_crypt_engine_ves< Engine: CryptEngine, F >(verify_signature: F) where F: Fn( &Engine::Signature, &Engine::PublicKey, &[u8] ) -> anyhow::Result<()>
#[test] fn ed25519() { let _ = env_logger::builder().is_test(true).try_init(); test_crypt_engine_ves::<Ed25519Sha, _>(|sig, pubkey, message| { // Reuse encrypted verification with a zero encryption key to verify the decrypted signature Ed25519Sha::encrypted_verify(pubkey, &curve25519_dalek::traits::Identity::identity(), &sig, message) }); } #[test] fn secp256k1() { let _ = env_logger::builder().is_test(true).try_init(); lazy_static::lazy_static! { pub static ref SECP: Secp256k1<secp256k1::All> = Secp256k1::new(); } test_crypt_engine_ves::<Secp256k1Engine, _>(|sig, pubkey, message| { let sig_bytes = Secp256k1Engine::signature_to_bytes(sig); let pubkey_bytes = Secp256k1Engine::public_key_to_bytes(pubkey); let no_fun_message = secp256k1::Message::from_slice(message)?; let no_fun_sig = secp256k1::Signature::from_compact(&sig_bytes)?; let no_fun_pubkey = secp256k1::PublicKey::from_slice(&pubkey_bytes)?; SECP.verify(&no_fun_message, &no_fun_sig, &no_fun_pubkey)?; Ok(()) }); }
{ let signing_key = Engine::new_private_key(); let pub_signing_key = Engine::to_public_key(&signing_key); let encryption_key = Engine::new_private_key(); let pub_encryption_key = Engine::to_public_key(&encryption_key); let message: [u8; 32] = sha2::Sha256::digest(b"hello world").into(); let enc_sig = Engine::encrypted_sign(&signing_key, &pub_encryption_key, &message) .expect("Failed to create encrypted signature"); Engine::encrypted_verify(&pub_signing_key, &pub_encryption_key, &enc_sig, &message) .expect("Failed to verify encrypted signature"); let dec_sig = Engine::decrypt_signature(&enc_sig, &encryption_key) .expect("Failed to decrypt signature"); verify_signature(&dec_sig, &pub_signing_key, &message) .expect("Failed to verify decrypted signature"); let recreated_encryption_key = Engine::recover_key(&pub_encryption_key, &enc_sig, &dec_sig) .expect("Failed to recover signature encryption key"); assert_eq!(hex::encode(Engine::private_key_to_bytes(&encryption_key)), hex::encode(Engine::private_key_to_bytes(&recreated_encryption_key))); }
auth.js
// restrict routes to authenticated users only.
const withAuth = (req, res, next) => { if (!req.session.user_id) { res.redirect('/login'); } else { next(); } }; module.exports = withAuth;
server.js
const express = require('express'); const expressServer = express(); const httpServer = require('http').Server(expressServer); const io = require('socket.io')(httpServer); httpServer.listen(1191, '0.0.0.0', function () { console.log("listening"); }); expressServer.use(express.static('frontend')); function clamp(value, min, max) { if (value < min) return min; if (value > max) return max; return value; } class
{ constructor(x, y) { this.x = x || 0; this.y = y || 0; } add(x, y) { return new Vector(this.x + x, this.y + y); } } class Client { constructor(socket) { this.id = socket.id; this.socket = socket; this.location = new Vector(0, 0); this.sharingScreen = false; } send(type, data) { this.socket.emit(type, data); } toDto() { return { id: this.id, location: this.location, sharingScreen: this.sharingScreen }; } } class Room { constructor(id) { this.roomId = id; this.width = 3000; this.height = 3000; this.clients = {}; } addClient(id, socket) { const client = new Client(socket); this.clients[id] = client; client.send('init', { roomId: this.roomId, id: id, width: this.width, height: this.height, clients: this.clientsDto() }); this.moveClient(client, this.randomLocation()); socket.on('clientTeleport', data => { this.moveClient(client, new Vector(data.x, data.y)); }); socket.on('clientScreenshare', ({sharingScreen}) => { client.sharingScreen = sharingScreen; this.broadcastUpdate(client); }); socket.on('relayCandidate', ({to, connectionId, candidate}) => { const toClient = this.clients[to]; if (toClient) toClient.send('relayCandidate', { from: id, candidate: candidate, connectionId: connectionId }); }); socket.on('video-offer', (message) => { const {from, to, connectionId, description} = message; const toClient = this.clients[to]; if (toClient) toClient.send('video-offer', message); }); socket.on('video-answer', (message) => { const {from, to, connectionId, description} = message; const toClient = this.clients[to]; if (toClient) toClient.send('video-answer', message); }); } removeClient(id) { delete this.clients[id]; this.broadcast('removeClient', id); } moveClient(client, location) { location.x = clamp(location.x, 0, this.width); location.y = clamp(location.y, 0, this.height); client.location = location; this.broadcastUpdate(client); } broadcastUpdate(client) { this.broadcast('updateClient', client.toDto()); } broadcast(type, data) { Object.values(this.clients).forEach(client => client.send(type, data)); } randomLocation() { return new Vector(Math.random() * this.width, Math.random() * 700); } clientsDto() { return Object.values(this.clients).map(client => client.toDto()); } } const rooms = {}; function getRoomId(str) { const defaultRoomId = 'default'; try { return new URL(str).searchParams.get('roomId') || defaultRoomId; } catch (e) { console.error(e); return defaultRoomId; } } function getRoom(id) { let room = rooms[id]; if (!room) { room = new Room(id); rooms[id] = room; } return room; } function cleanupRoom(room) { if (Object.keys(room.clients).length === 0) { delete rooms[room.roomId]; } } io.on('connection', function (socket) { const roomId = getRoomId(socket.handshake.headers.referer); const room = getRoom(roomId); console.log(`Connection ${socket.id} to room '${roomId}', total in room: ${Object.keys(room.clients).length + 1}`); room.addClient(socket.id, socket); socket.on('disconnect', function () { room.removeClient(socket.id); console.log(`Disconnect ${socket.id} from room '${roomId}', total in room: ${Object.keys(room.clients).length}`); cleanupRoom(room); }); });
Vector
Like.js
const mongoose = require('mongoose'); const Schema = mongoose.Schema; const likeSchema = new Schema( { user_id: { type: Schema.Types.ObjectId, ref: 'User' }, book_id: {type: Schema.Types.ObjectId, ref: 'Book'}, },
{ timestamps: true } ); const Like = mongoose.model('Like', likeSchema); module.exports = Like;
test_requests.py
import pytest import os import logging import requests_helper @pytest.fixture def valid_post_image(): return open('_test/src/img001.jpg', 'rb') @pytest.fixture def valid_post_url():
@pytest.fixture def valid_headers(): return { 'Ocp-Apim-Subscription-Key': os.environ['COMPUTER_VISION_KEY'], 'Content-Type': 'application/octet-stream' } @pytest.fixture def valid_get_url(): return "operation-location" class MockResponse: def __init__(self, json_data, status_code, headers): self.json_data = json_data self.status_code = status_code self.headers = headers def json(self): return self.json_data def test_post_response_is_ok(mocker, valid_post_url, valid_headers, valid_post_image): mock_post = mocker.patch('requests_helper.requests.post') mock_post.return_value = MockResponse(None, 202, { "Operation-Location": "a-valid-url" }) response = requests_helper.post_image(valid_post_url, valid_headers, valid_post_image) assert(response.headers["Operation-Location"]) == "a-valid-url" def test_post_response_handles_500_error(mocker, valid_post_url, valid_headers, valid_post_image): mock_post = mocker.patch('requests_helper.requests.post') mock_post.return_value = MockResponse({"error": {"code": "FailedToProcess", "message": "The analyze request could not be started due to a cluster-related issue. Please resubmit the document for processing."}}, 500, {}) response = requests_helper.post_image(valid_post_url, valid_headers, valid_post_image) assert response == { "status_code": 500, "code": "FailedToProcess", "message": "The analyze request could not be started due to a cluster-related issue. Please resubmit the document for processing."} def test_get_read_result_is_ok(mocker, valid_headers): mock_get = mocker.patch('requests_helper.requests.get') mock_get.return_value = MockResponse( {"analyzeResult": { "lines": [{"text": "this is text"}]}}, 200, {}) response = requests_helper.get_read_result(valid_get_url, valid_headers) assert response.json()["analyzeResult"] is not None def test_get_read_result_handles_error(mocker, valid_headers, valid_get_url): mock_get = mocker.patch('requests_helper.requests.get') mock_get.return_value = MockResponse({"error": { "code": "fail", "message": "because"}}, 500, {}) response = requests_helper.get_read_result(valid_get_url, valid_headers) assert response["code"] == "fail"
return os.environ['COMPUTER_VISION_ENDPOINT'] + "/vision/v3.0/read/analyze"
main.rs
use a_lib::stringify; use rand::prelude::*; fn
() { println!("{{ \"values\": {}, \"sensor\": {} }}", stringify(&vec![random::<f64>(); 6]), stringify(&"temperature")); }
main
lock.rs
use std::sync::Arc; use metric::{Attributes, DurationCounter, Metric, U64Counter}; type RawRwLock = InstrumentedRawRwLock<parking_lot::RawRwLock>; /// An instrumented Read-Write Lock pub type RwLock<T> = lock_api::RwLock<RawRwLock, T>; pub type RwLockReadGuard<'a, T> = lock_api::RwLockReadGuard<'a, RawRwLock, T>; pub type RwLockWriteGuard<'a, T> = lock_api::RwLockWriteGuard<'a, RawRwLock, T>; pub type MappedRwLockReadGuard<'a, T> = lock_api::MappedRwLockReadGuard<'a, RawRwLock, T>; pub type MappedRwLockWriteGuard<'a, T> = lock_api::MappedRwLockWriteGuard<'a, RawRwLock, T>; pub type RwLockUpgradableReadGuard<'a, T> = lock_api::RwLockUpgradableReadGuard<'a, RawRwLock, T>; #[derive(Debug)] pub struct LockMetrics { exclusive_count: U64Counter, shared_count: U64Counter, upgradeable_count: U64Counter, upgrade_count: U64Counter, exclusive_wait: DurationCounter, shared_wait: DurationCounter, upgradeable_wait: DurationCounter, upgrade_wait: DurationCounter, } impl LockMetrics { pub fn new(registry: &metric::Registry, attributes: impl Into<Attributes>) -> Self { let mut attributes = attributes.into(); let count: Metric<U64Counter> = registry.register_metric( "catalog_lock", "number of times the tracked locks have been obtained", ); let wait: Metric<DurationCounter> = registry.register_metric( "catalog_lock_wait", "time spent waiting to acquire any of the tracked locks", ); attributes.insert("access", "exclusive"); let exclusive_count = count.recorder(attributes.clone()); let exclusive_wait = wait.recorder(attributes.clone()); attributes.insert("access", "shared"); let shared_count = count.recorder(attributes.clone()); let shared_wait = wait.recorder(attributes.clone()); attributes.insert("access", "upgradeable"); let upgradeable_count = count.recorder(attributes.clone()); let upgradeable_wait = wait.recorder(attributes.clone()); attributes.insert("access", "upgrade"); let upgrade_count = count.recorder(attributes.clone()); let upgrade_wait = wait.recorder(attributes); Self { exclusive_count, shared_count, upgradeable_count, upgrade_count, exclusive_wait, shared_wait, upgradeable_wait, upgrade_wait, } } pub fn new_unregistered() -> Self { Self { exclusive_count: Default::default(), shared_count: Default::default(), upgradeable_count: Default::default(), upgrade_count: Default::default(), exclusive_wait: Default::default(), shared_wait: Default::default(), upgradeable_wait: Default::default(), upgrade_wait: Default::default(), } } pub fn new_lock<T: Sized>(self: &Arc<Self>, t: T) -> RwLock<T> { self.new_lock_raw(t) } pub fn new_lock_raw<R: lock_api::RawRwLock, T: Sized>( self: &Arc<Self>, t: T, ) -> lock_api::RwLock<InstrumentedRawRwLock<R>, T> { lock_api::RwLock::const_new( InstrumentedRawRwLock { inner: R::INIT, metrics: Some(Arc::clone(self)), }, t, ) } } /// The RAII-goop for locks is provided by lock_api with individual crates /// such as parking_lot providing raw lock implementations /// /// This is a raw lock implementation that wraps another and instruments it #[derive(Debug)] pub struct InstrumentedRawRwLock<R: Sized> { inner: R, /// Stores the tracking data if any /// /// RawRwLocks must be able to be constructed in a const context, for example, /// as the associated constant RawRwLock::INIT. /// /// Arc, however, does not have a const constructor. /// /// This field is therefore optional. There is no way to access /// this field from a RwLock anyway, so ultimately it makes no difference /// that tracking is effectively disabled for default constructed locks metrics: Option<Arc<LockMetrics>>, } /// # Safety /// /// Implementations of this trait must ensure that the `RwLock` is actually /// exclusive: an exclusive lock can't be acquired while an exclusive or shared /// lock exists, and a shared lock can't be acquire while an exclusive lock /// exists. /// /// This is done by delegating to the wrapped RawRwLock implementation unsafe impl<R: lock_api::RawRwLock + Sized> lock_api::RawRwLock for InstrumentedRawRwLock<R> { const INIT: Self = Self { inner: R::INIT, metrics: None, }; type GuardMarker = R::GuardMarker; /// Acquires a shared lock, blocking the current thread until it is able to do so. fn lock_shared(&self) { match &self.metrics { Some(shared) => { // Early return if possible - Instant::now is not necessarily cheap if self.try_lock_shared() { return; } let now = std::time::Instant::now(); self.inner.lock_shared(); let elapsed = now.elapsed(); shared.shared_count.inc(1); shared.shared_wait.inc(elapsed); } None => self.inner.lock_shared(), } } /// Attempts to acquire a shared lock without blocking. fn try_lock_shared(&self) -> bool { let ret = self.inner.try_lock_shared(); if let Some(shared) = &self.metrics { if ret { shared.shared_count.inc(1); } } ret } /// Releases a shared lock. /// /// # Safety /// /// This method may only be called if a shared lock is held in the current context. #[inline] unsafe fn unlock_shared(&self) { self.inner.unlock_shared() } /// Acquires an exclusive lock, blocking the current thread until it is able to do so. fn lock_exclusive(&self) { match &self.metrics { Some(shared) => { // Early return if possible - Instant::now is not necessarily cheap if self.try_lock_exclusive() { return; } let now = std::time::Instant::now(); self.inner.lock_exclusive(); let elapsed = now.elapsed(); shared.exclusive_count.inc(1); shared.exclusive_wait.inc(elapsed); } None => self.inner.lock_exclusive(), } } /// Attempts to acquire an exclusive lock without blocking. fn try_lock_exclusive(&self) -> bool { let ret = self.inner.try_lock_exclusive(); if let Some(shared) = &self.metrics { if ret { shared.exclusive_count.inc(1); } } ret } /// Releases an exclusive lock. /// /// # Safety /// /// This method may only be called if an exclusive lock is held in the current context. #[inline] unsafe fn unlock_exclusive(&self) { self.inner.unlock_exclusive() } /// Checks if this `RwLock` is currently locked in any way. #[inline] fn is_locked(&self) -> bool { self.inner.is_locked() } } /// # Safety /// /// Implementations of this trait must ensure that the `RwLock` is actually /// exclusive: an exclusive lock can't be acquired while an exclusive or shared /// lock exists, and a shared lock can't be acquire while an exclusive lock /// exists. /// /// This is done by delegating to the wrapped RawRwLock implementation unsafe impl<R: lock_api::RawRwLockUpgrade + Sized> lock_api::RawRwLockUpgrade for InstrumentedRawRwLock<R> { fn lock_upgradable(&self) { match &self.metrics { Some(shared) => { // Early return if possible - Instant::now is not necessarily cheap if self.try_lock_upgradable() { return; } let now = std::time::Instant::now(); self.inner.lock_upgradable(); let elapsed = now.elapsed(); shared.upgradeable_count.inc(1); shared.upgradeable_wait.inc(elapsed); } None => self.inner.lock_upgradable(), } } fn try_lock_upgradable(&self) -> bool { let ret = self.inner.try_lock_upgradable(); if let Some(shared) = &self.metrics { if ret { shared.upgradeable_count.inc(1); } } ret } unsafe fn unlock_upgradable(&self) { self.inner.unlock_upgradable() } unsafe fn upgrade(&self) { match &self.metrics { Some(shared) => { // Early return if possible - Instant::now is not necessarily cheap if self.try_upgrade() { return; } let now = std::time::Instant::now(); self.inner.upgrade(); let elapsed = now.elapsed(); shared.upgrade_count.inc(1); shared.upgrade_wait.inc(elapsed); } None => self.inner.upgrade(), } } unsafe fn try_upgrade(&self) -> bool { let ret = self.inner.try_upgrade(); if let Some(shared) = &self.metrics { if ret { shared.upgrade_count.inc(1); } } ret } } #[cfg(test)] mod tests { use std::time::Duration; use super::*; #[test] fn test_counts() { let metrics = Arc::new(LockMetrics::new_unregistered()); let lock = metrics.new_lock(32); let _ = lock.read(); let _ = lock.write(); let _ = lock.read(); assert_eq!(metrics.exclusive_count.fetch(), 1); assert_eq!(metrics.shared_count.fetch(), 2); } #[tokio::test(flavor = "multi_thread", worker_threads = 2)] async fn test_shared_wait_time() { let metrics = Arc::new(LockMetrics::new_unregistered()); let l1 = Arc::new(metrics.new_lock(32)); let l2 = Arc::clone(&l1); let write = l1.write(); let join = tokio::spawn(async move { let _ = l2.read(); }); std::thread::sleep(Duration::from_millis(100)); std::mem::drop(write); join.await.unwrap(); assert_eq!(metrics.exclusive_count.fetch(), 1); assert_eq!(metrics.shared_count.fetch(), 1); assert!(metrics.exclusive_wait.fetch() < Duration::from_micros(100)); assert!(metrics.shared_wait.fetch() > Duration::from_millis(80)); assert!(metrics.shared_wait.fetch() < Duration::from_millis(200)); } #[tokio::test(flavor = "multi_thread", worker_threads = 2)] async fn test_exclusive_wait_time() { let metrics = Arc::new(LockMetrics::new_unregistered()); let l1 = Arc::new(metrics.new_lock(32)); let l2 = Arc::clone(&l1); let read = l1.read(); let join = tokio::spawn(async move { let _ = l2.write(); }); std::thread::sleep(Duration::from_millis(100)); std::mem::drop(read); join.await.unwrap(); assert_eq!(metrics.exclusive_count.fetch(), 1); assert_eq!(metrics.shared_count.fetch(), 1); assert!(metrics.shared_wait.fetch() < Duration::from_micros(100)); assert!(metrics.exclusive_wait.fetch() > Duration::from_millis(80)); assert!(metrics.exclusive_wait.fetch() < Duration::from_millis(200)); } #[tokio::test(flavor = "multi_thread", worker_threads = 2)] async fn test_multiple() { let metrics = Arc::new(LockMetrics::new_unregistered()); let l1 = Arc::new(metrics.new_lock(32)); let l1_captured = Arc::clone(&l1); let l2 = Arc::new(metrics.new_lock(12)); let l2_captured = Arc::clone(&l2); let r1 = l1.read(); let w2 = l2.write(); let join = tokio::spawn(async move { let _ = l1_captured.write(); let _ = l2_captured.read(); }); std::thread::sleep(Duration::from_millis(100)); std::mem::drop(w2); std::mem::drop(r1); join.await.unwrap(); assert_eq!(metrics.exclusive_count.fetch(), 2); assert_eq!(metrics.shared_count.fetch(), 2); assert!(metrics.shared_wait.fetch() < Duration::from_micros(100)); assert!(metrics.exclusive_wait.fetch() > Duration::from_millis(80)); assert!(metrics.exclusive_wait.fetch() < Duration::from_millis(200)); } #[tokio::test(flavor = "multi_thread", worker_threads = 2)] async fn test_upgradeable() { let metrics = Arc::new(LockMetrics::new_unregistered()); let l1 = Arc::new(metrics.new_lock(32)); let l1_captured = Arc::clone(&l1); let r1 = l1.upgradable_read(); let join = tokio::spawn(async move { let _ = l1_captured.write(); }); std::thread::sleep(Duration::from_millis(100)); std::mem::drop(r1); join.await.unwrap(); assert_eq!(metrics.exclusive_count.fetch(), 1); assert_eq!(metrics.shared_count.fetch(), 0); assert_eq!(metrics.upgradeable_count.fetch(), 1); assert_eq!(metrics.upgrade_count.fetch(), 0); assert_eq!(metrics.upgrade_wait.fetch(), Duration::from_nanos(0)); assert_eq!(metrics.shared_wait.fetch(), Duration::from_nanos(0)); assert!(metrics.upgradeable_wait.fetch() < Duration::from_micros(100)); assert!(metrics.exclusive_wait.fetch() > Duration::from_millis(80)); assert!(metrics.exclusive_wait.fetch() < Duration::from_millis(200)); } #[tokio::test(flavor = "multi_thread", worker_threads = 2)] async fn test_upgrade() { let metrics = Arc::new(LockMetrics::new_unregistered()); let l1 = Arc::new(metrics.new_lock(32)); let l1_captured = Arc::clone(&l1);
let ur1 = l1_captured.upgradable_read(); let _ = RwLockUpgradableReadGuard::upgrade(ur1); }); std::thread::sleep(Duration::from_millis(100)); std::mem::drop(r1); join.await.unwrap(); assert_eq!(metrics.exclusive_count.fetch(), 0); assert_eq!(metrics.shared_count.fetch(), 1); assert_eq!(metrics.upgradeable_count.fetch(), 1); assert_eq!(metrics.upgrade_count.fetch(), 1); assert_eq!(metrics.exclusive_wait.fetch(), Duration::from_nanos(0)); assert!(metrics.shared_wait.fetch() < Duration::from_micros(100)); assert!(metrics.upgradeable_wait.fetch() < Duration::from_micros(100)); assert!(metrics.upgrade_wait.fetch() > Duration::from_millis(80)); assert!(metrics.upgrade_wait.fetch() < Duration::from_millis(200)); } }
let r1 = l1.read(); let join = tokio::spawn(async move {
long_list.rs
/// Demonstrates using the "list clipper" to efficiently display long /// lists in a scrolling area. /// /// You specify the height per item, and the `ListClipper` API will /// provide which item index are visible. This avoids having to create /// thousands of items only for them to not be made visible. /// /// Note this requires a fixed (or easily computable) height per item. use imgui::*; mod support; fn
() { let lots_of_words: Vec<String> = (0..10000).map(|x| format!("Line {}", x)).collect(); let system = support::init(file!()); system.main_loop(move |_, ui| { Window::new(im_str!("Hello long world")) .size([300.0, 110.0], Condition::FirstUseEver) .build(ui, || { let mut clipper = imgui::ListClipper::new(lots_of_words.len() as i32) .items_height(ui.current_font_size()) .begin(ui); while clipper.step() { for row_num in clipper.display_start()..clipper.display_end() { ui.text(&lots_of_words[row_num as usize]); } } }); }); }
main
bitwidth.rs
use crate::{ mem::NonZeroUsize, storage::Storage, BitPos, Digit, Error, Result, ShiftAmount, }; /// The `BitWidth` represents the length of an `ApInt`. /// /// Its invariant restricts it to always be a positive, non-zero value. /// Code that built's on top of `BitWidth` may and should use this invariant. /// /// This is currently just a wrapper around `NonZeroUsize` (in case /// future compiler optimizations can make use of it), but this is not /// exposed because of the potential for feature flags and custom forks for /// `apint` to use other internal types. #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct BitWidth(NonZeroUsize); // We do not expose a `impl From<NonZeroUsize> for BitWidth` because that might // introduce edge cases in the future where the internal type is not // `NonZeroUsize` and is fallable. // =========================================================================== /// Constructors /// =========================================================================== impl BitWidth { /// Creates a `BitWidth` that represents a bit-width of `1` bit. #[inline] pub fn w1() -> Self { BitWidth(NonZeroUsize::new(1).unwrap()) } /// Creates a `BitWidth` that represents a bit-width of `8` bits. #[inline] pub fn w8() -> Self { BitWidth(NonZeroUsize::new(8).unwrap()) } /// Creates a `BitWidth` that represents a bit-width of `16` bits. #[inline] pub fn w16() -> Self { BitWidth(NonZeroUsize::new(16).unwrap()) } /// Creates a `BitWidth` that represents a bit-width of `32` bits. #[inline] pub fn w32() -> Self { BitWidth(NonZeroUsize::new(32).unwrap()) } /// Creates a `BitWidth` that represents a bit-width of `64` bits. #[inline] pub fn w64() -> Self { BitWidth(NonZeroUsize::new(64).unwrap()) } /// Creates a `BitWidth` that represents a bit-width of `128` bits. #[inline] pub fn w128() -> Self { BitWidth(NonZeroUsize::new(128).unwrap()) } /// Creates a `BitWidth` from the given `usize`. /// /// # Errors /// /// - If the given `width` is equal to zero. pub fn new(width: usize) -> Result<Self> { if width == 0 { return Err(Error::invalid_zero_bitwidth()) } Ok(BitWidth(NonZeroUsize::new(width).unwrap())) } /// Returns `true` if the given `BitPos` is valid for this `BitWidth`. #[inline] pub(crate) fn is_valid_pos<P>(self, pos: P) -> bool where P: Into<BitPos>, { pos.into().to_usize() < self.to_usize() } /// Returns `true` if the given `ShiftAmount` is valid for this `BitWidth`. #[inline] pub(crate) fn is_valid_shift_amount<S>(self, shift_amount: S) -> bool where S: Into<ShiftAmount>, { shift_amount.into().to_usize() < self.to_usize() } /// Returns the `BitPos` for the most significant bit of an `ApInt` with /// this `BitWidth`. #[inline] pub(crate) fn msb_pos(self) -> BitPos { BitPos::from(self.to_usize() - 1) } } impl From<usize> for BitWidth { fn from(width: usize) -> BitWidth
} // =========================================================================== /// API /// =========================================================================== impl BitWidth { /// Converts this `BitWidth` into a `usize`. #[inline] pub fn to_usize(self) -> usize { self.0.get() } /// Returns the number of exceeding bits that is implied for `ApInt` /// instances with this `BitWidth`. /// /// For example for an `ApInt` with a `BitWidth` of `140` bits requires /// exactly `3` digits (assuming `Digit::BITS == 64` bits). The third /// however, only requires `140 - 128 = 12` bits of its `64` bits in /// total to represent the `ApInt` instance. So `excess_bits` returns /// `12` for a `BitWidth` that is equal to `140`. /// /// *Note:* A better name for this method has yet to be found! pub(crate) fn excess_bits(self) -> Option<usize> { match self.to_usize() % Digit::BITS { 0 => None, n => Some(n), } } /// Returns the exceeding `BitWidth` of this `BitWidth`. /// /// *Note:* This is just a simple wrapper around the `excess_bits` method. /// Read the documentation of `excess_bits` for more information /// about what is actually returned by this. pub(crate) fn excess_width(self) -> Option<BitWidth> { NonZeroUsize::new(self.to_usize() % Digit::BITS).map(BitWidth) } /// Returns a storage specifier that tells the caller if `ApInt`'s /// associated with this bitwidth require an external memory (`Ext`) to /// store their digits or may use inplace memory (`Inl`). /// /// *Note:* Maybe this method should be removed. A constructor for /// `Storage` fits better for this purpose. #[inline] pub(crate) fn storage(self) -> Storage { Storage::from(self) } /// Returns the number of digits that are required to represent an /// `ApInt` with this `BitWidth`. /// /// *Note:* Maybe we should move this method somewhere else? #[inline] pub(crate) fn required_digits(self) -> usize { ((self.to_usize() - 1) / Digit::BITS) + 1 } } #[cfg(test)] mod tests { use super::*; mod excess_bits { use super::*; #[test] fn powers_of_two() { assert_eq!(BitWidth::w1().excess_bits(), Some(1)); assert_eq!(BitWidth::w8().excess_bits(), Some(8)); assert_eq!(BitWidth::w16().excess_bits(), Some(16)); assert_eq!(BitWidth::w32().excess_bits(), Some(32)); assert_eq!(BitWidth::w64().excess_bits(), None); assert_eq!(BitWidth::w128().excess_bits(), None); } #[test] fn multiples_of_50() { assert_eq!(BitWidth::new(50).unwrap().excess_bits(), Some(50)); assert_eq!(BitWidth::new(100).unwrap().excess_bits(), Some(36)); assert_eq!(BitWidth::new(150).unwrap().excess_bits(), Some(22)); assert_eq!(BitWidth::new(200).unwrap().excess_bits(), Some(8)); assert_eq!(BitWidth::new(250).unwrap().excess_bits(), Some(58)); assert_eq!(BitWidth::new(300).unwrap().excess_bits(), Some(44)); } } }
{ BitWidth::new(width).unwrap() }
hoverWidget.ts
/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ import { DisposableStore } from 'vs/base/common/lifecycle'; import { Event, Emitter } from 'vs/base/common/event'; import * as dom from 'vs/base/browser/dom'; import { IKeybindingService } from 'vs/platform/keybinding/common/keybinding'; import { IHoverTarget, IHoverOptions } from 'vs/workbench/services/hover/browser/hover'; import { KeyCode } from 'vs/base/common/keyCodes'; import { IConfigurationService } from 'vs/platform/configuration/common/configuration'; import { EDITOR_FONT_DEFAULTS, IEditorOptions } from 'vs/editor/common/config/editorOptions'; import { HoverAction, HoverPosition, HoverWidget as BaseHoverWidget } from 'vs/base/browser/ui/hover/hoverWidget'; import { Widget } from 'vs/base/browser/ui/widget'; import { AnchorPosition } from 'vs/base/browser/ui/contextview/contextview'; import { IOpenerService } from 'vs/platform/opener/common/opener'; import { IInstantiationService } from 'vs/platform/instantiation/common/instantiation'; import { MarkdownRenderer } from 'vs/editor/browser/core/markdownRenderer'; import { isMarkdownString } from 'vs/base/common/htmlContent'; const $ = dom.$; type TargetRect = { left: number, right: number, top: number, bottom: number, width: number, height: number, center: { x: number, y: number }, }; const enum Constants { PointerSize = 3, HoverBorderWidth = 2, HoverWindowEdgeMargin = 2, } export class HoverWidget extends Widget { private readonly _messageListeners = new DisposableStore(); private readonly _mouseTracker: CompositeMouseTracker; private readonly _hover: BaseHoverWidget; private readonly _hoverPointer: HTMLElement | undefined; private readonly _hoverContainer: HTMLElement; private readonly _target: IHoverTarget; private readonly _linkHandler: (url: string) => any; private _isDisposed: boolean = false; private _hoverPosition: HoverPosition; private _forcePosition: boolean = false; private _x: number = 0; private _y: number = 0; get isDisposed(): boolean { return this._isDisposed; } get domNode(): HTMLElement { return this._hover.containerDomNode; } private readonly _onDispose = this._register(new Emitter<void>()); get onDispose(): Event<void> { return this._onDispose.event; } private readonly _onRequestLayout = this._register(new Emitter<void>()); get onRequestLayout(): Event<void> { return this._onRequestLayout.event; } get anchor(): AnchorPosition { return this._hoverPosition === HoverPosition.BELOW ? AnchorPosition.BELOW : AnchorPosition.ABOVE; } get x(): number { return this._x; } get y(): number { return this._y; } constructor( options: IHoverOptions, @IKeybindingService private readonly _keybindingService: IKeybindingService, @IConfigurationService private readonly _configurationService: IConfigurationService, @IOpenerService private readonly _openerService: IOpenerService, @IInstantiationService private readonly _instantiationService: IInstantiationService, ) { super(); this._linkHandler = options.linkHandler || (url => this._openerService.open(url, { allowCommands: (isMarkdownString(options.content) && options.content.isTrusted) })); this._target = 'targetElements' in options.target ? options.target : new ElementHoverTarget(options.target); this._hoverPointer = options.showPointer ? $('div.workbench-hover-pointer') : undefined; this._hover = this._register(new BaseHoverWidget()); this._hover.containerDomNode.classList.add('workbench-hover', 'fadeIn'); if (options.compact) { this._hover.containerDomNode.classList.add('workbench-hover', 'compact'); } if (options.skipFadeInAnimation) { this._hover.containerDomNode.classList.add('skip-fade-in'); } if (options.additionalClasses) { this._hover.containerDomNode.classList.add(...options.additionalClasses); } if (options.forcePosition) { this._forcePosition = true; } this._hoverPosition = options.hoverPosition ?? HoverPosition.ABOVE; // Don't allow mousedown out of the widget, otherwise preventDefault will call and text will // not be selected. this.onmousedown(this._hover.containerDomNode, e => e.stopPropagation()); // Hide hover on escape this.onkeydown(this._hover.containerDomNode, e => { if (e.equals(KeyCode.Escape)) { this.dispose(); } }); const rowElement = $('div.hover-row.markdown-hover'); const contentsElement = $('div.hover-contents'); if (typeof options.content === 'string') { contentsElement.textContent = options.content; contentsElement.style.whiteSpace = 'pre-wrap'; } else if (options.content instanceof HTMLElement) { contentsElement.appendChild(options.content); } else { const markdown = options.content; const mdRenderer = this._instantiationService.createInstance( MarkdownRenderer, { codeBlockFontFamily: this._configurationService.getValue<IEditorOptions>('editor').fontFamily || EDITOR_FONT_DEFAULTS.fontFamily } ); const { element } = mdRenderer.render(markdown, { actionHandler: { callback: (content) => this._linkHandler(content), disposables: this._messageListeners }, asyncRenderCallback: () => { contentsElement.classList.add('code-hover-contents'); // This changes the dimensions of the hover so trigger a layout this._onRequestLayout.fire(); } }); contentsElement.appendChild(element); } rowElement.appendChild(contentsElement); this._hover.contentsDomNode.appendChild(rowElement); if (options.actions && options.actions.length > 0) { const statusBarElement = $('div.hover-row.status-bar'); const actionsElement = $('div.actions'); options.actions.forEach(action => { const keybinding = this._keybindingService.lookupKeybinding(action.commandId); const keybindingLabel = keybinding ? keybinding.getLabel() : null; HoverAction.render(actionsElement, { label: action.label, commandId: action.commandId, run: e => { action.run(e); this.dispose(); }, iconClass: action.iconClass }, keybindingLabel); }); statusBarElement.appendChild(actionsElement); this._hover.containerDomNode.appendChild(statusBarElement); } this._hoverContainer = $('div.workbench-hover-container'); if (this._hoverPointer) { this._hoverContainer.appendChild(this._hoverPointer); } this._hoverContainer.appendChild(this._hover.containerDomNode); const mouseTrackerTargets = [...this._target.targetElements]; let hideOnHover: boolean; if (options.actions && options.actions.length > 0) { // If there are actions, require hover so they can be accessed hideOnHover = false; } else { if (options.hideOnHover === undefined) { // Defaults to true when string, false when markdown as it may contain links hideOnHover = typeof options.content === 'string'; } else { // It's set explicitly hideOnHover = options.hideOnHover; } } if (!hideOnHover) { mouseTrackerTargets.push(this._hoverContainer); } this._mouseTracker = new CompositeMouseTracker(mouseTrackerTargets); this._register(this._mouseTracker.onMouseOut(() => this.dispose())); this._register(this._mouseTracker); } public render(container: HTMLElement): void { container.appendChild(this._hoverContainer); this.layout(); } public layout() { this._hover.containerDomNode.classList.remove('right-aligned'); this._hover.contentsDomNode.style.maxHeight = ''; const targetBounds = this._target.targetElements.map(e => e.getBoundingClientRect()); const top = Math.min(...targetBounds.map(e => e.top)); const right = Math.max(...targetBounds.map(e => e.right)); const bottom = Math.max(...targetBounds.map(e => e.bottom)); const left = Math.min(...targetBounds.map(e => e.left)); const width = right - left; const height = bottom - top; const targetRect: TargetRect = { top, right, bottom, left, width, height, center: { x: left + (width / 2), y: top + (height / 2) } }; this.adjustHorizontalHoverPosition(targetRect); this.adjustVerticalHoverPosition(targetRect); // Offset the hover position if there is a pointer so it aligns with the target element this._hoverContainer.style.padding = ''; this._hoverContainer.style.margin = ''; if (this._hoverPointer) { switch (this._hoverPosition) { case HoverPosition.RIGHT: targetRect.left += Constants.PointerSize; targetRect.right += Constants.PointerSize; this._hoverContainer.style.paddingLeft = `${Constants.PointerSize}px`; this._hoverContainer.style.marginLeft = `${-Constants.PointerSize}px`; break; case HoverPosition.LEFT: targetRect.left -= Constants.PointerSize; targetRect.right -= Constants.PointerSize; this._hoverContainer.style.paddingRight = `${Constants.PointerSize}px`; this._hoverContainer.style.marginRight = `${-Constants.PointerSize}px`; break; case HoverPosition.BELOW: targetRect.top += Constants.PointerSize; targetRect.bottom += Constants.PointerSize; this._hoverContainer.style.paddingTop = `${Constants.PointerSize}px`; this._hoverContainer.style.marginTop = `${-Constants.PointerSize}px`; break; case HoverPosition.ABOVE: targetRect.top -= Constants.PointerSize; targetRect.bottom -= Constants.PointerSize; this._hoverContainer.style.paddingBottom = `${Constants.PointerSize}px`; this._hoverContainer.style.marginBottom = `${-Constants.PointerSize}px`; break; } targetRect.center.x = targetRect.left + (width / 2); targetRect.center.y = targetRect.top + (height / 2); } this.computeXCordinate(targetRect); this.computeYCordinate(targetRect); if (this._hoverPointer) { // reset this._hoverPointer.classList.remove('top'); this._hoverPointer.classList.remove('left'); this._hoverPointer.classList.remove('right'); this._hoverPointer.classList.remove('bottom'); this.setHoverPointerPosition(targetRect); } this._hover.onContentsChanged(); } private computeXCordinate(target: TargetRect): void { const hoverWidth = this._hover.containerDomNode.clientWidth + Constants.HoverBorderWidth; if (this._target.x !== undefined) { this._x = this._target.x; } else if (this._hoverPosition === HoverPosition.RIGHT) { this._x = target.right; } else if (this._hoverPosition === HoverPosition.LEFT) { this._x = target.left - hoverWidth; } else { if (this._hoverPointer) { this._x = target.center.x - (this._hover.containerDomNode.clientWidth / 2); } else { this._x = target.left; } // Hover is going beyond window towards right end if (this._x + hoverWidth >= document.documentElement.clientWidth) { this._hover.containerDomNode.classList.add('right-aligned'); this._x = Math.max(document.documentElement.clientWidth - hoverWidth - Constants.HoverWindowEdgeMargin, document.documentElement.clientLeft); } } // Hover is going beyond window towards left end if (this._x < document.documentElement.clientLeft) { this._x = target.left + Constants.HoverWindowEdgeMargin; } } private computeYCordinate(target: TargetRect): void { if (this._target.y !== undefined) { this._y = this._target.y; } else if (this._hoverPosition === HoverPosition.ABOVE) { this._y = target.top; } else if (this._hoverPosition === HoverPosition.BELOW) { this._y = target.bottom - 2; } else { if (this._hoverPointer) { this._y = target.center.y + (this._hover.containerDomNode.clientHeight / 2); } else { this._y = target.bottom; } } // Hover on bottom is going beyond window if (this._y > window.innerHeight) { this._y = target.bottom; } } private adjustHorizontalHoverPosition(target: TargetRect): void { // Do not adjust horizontal hover position if x cordiante is provided if (this._target.x !== undefined) { return; } // When force position is enabled, restrict max width if (this._forcePosition) { const padding = (this._hoverPointer ? Constants.PointerSize : 0) + Constants.HoverBorderWidth; if (this._hoverPosition === HoverPosition.RIGHT) { this._hover.containerDomNode.style.maxWidth = `${document.documentElement.clientWidth - target.right - padding}px`; } else if (this._hoverPosition === HoverPosition.LEFT) { this._hover.containerDomNode.style.maxWidth = `${target.left - padding}px`; } return; } // Position hover on right to target if (this._hoverPosition === HoverPosition.RIGHT) { // Hover on the right is going beyond window. if (target.right + this._hover.containerDomNode.clientWidth >= document.documentElement.clientWidth) { this._hoverPosition = HoverPosition.LEFT; } } // Position hover on left to target if (this._hoverPosition === HoverPosition.LEFT) { // Hover on the left is going beyond window. if (target.left - this._hover.containerDomNode.clientWidth <= document.documentElement.clientLeft) { this._hoverPosition = HoverPosition.RIGHT; } } } private adjustVerticalHoverPosition(target: TargetRect): void { // Do not adjust vertical hover position if y cordiante is provided if (this._target.y !== undefined) { return; } // When force position is enabled, restrict max height if (this._forcePosition) { const padding = (this._hoverPointer ? Constants.PointerSize : 0) + Constants.HoverBorderWidth; if (this._hoverPosition === HoverPosition.ABOVE) { this._hover.containerDomNode.style.maxHeight = `${target.top - padding}px`; } else if (this._hoverPosition === HoverPosition.BELOW) { this._hover.containerDomNode.style.maxHeight = `${window.innerHeight - target.bottom - padding}px`; } return; } // Position hover on top of the target if (this._hoverPosition === HoverPosition.ABOVE) { // Hover on top is going beyond window if (target.top - this._hover.containerDomNode.clientHeight < 0) { this._hoverPosition = HoverPosition.BELOW; } } // Position hover below the target else if (this._hoverPosition === HoverPosition.BELOW) { // Hover on bottom is going beyond window if (target.bottom + this._hover.containerDomNode.clientHeight > window.innerHeight) { this._hoverPosition = HoverPosition.ABOVE; } } } private setHoverPointerPosition(target: TargetRect): void { if (!this._hoverPointer) { return; } switch (this._hoverPosition) { case HoverPosition.LEFT: case HoverPosition.RIGHT: this._hoverPointer.classList.add(this._hoverPosition === HoverPosition.LEFT ? 'right' : 'left'); const hoverHeight = this._hover.containerDomNode.clientHeight; // If hover is taller than target, then show the pointer at the center of target if (hoverHeight > target.height) { this._hoverPointer.style.top = `${target.center.y - (this._y - hoverHeight) - Constants.PointerSize}px`; } // Otherwise show the pointer at the center of hover else { this._hoverPointer.style.top = `${Math.round((hoverHeight / 2)) - Constants.PointerSize}px`; } break; case HoverPosition.ABOVE: case HoverPosition.BELOW: this._hoverPointer.classList.add(this._hoverPosition === HoverPosition.ABOVE ? 'bottom' : 'top'); const hoverWidth = this._hover.containerDomNode.clientWidth; // Position pointer at the center of the hover let pointerLeftPosition = Math.round((hoverWidth / 2)) - Constants.PointerSize; // If pointer goes beyond target then position it at the center of the target const pointerX = this._x + pointerLeftPosition; if (pointerX < target.left || pointerX > target.right) { pointerLeftPosition = target.center.x - this._x - Constants.PointerSize; } this._hoverPointer.style.left = `${pointerLeftPosition}px`; break; } } public focus() { this._hover.containerDomNode.focus(); } public hide(): void { this.dispose(); } public override dispose(): void { if (!this._isDisposed) { this._onDispose.fire(); this._hoverContainer.remove(); this._messageListeners.dispose(); this._target.dispose(); super.dispose(); } this._isDisposed = true; } } class
extends Widget { private _isMouseIn: boolean = false; private _mouseTimeout: number | undefined; private readonly _onMouseOut = new Emitter<void>(); get onMouseOut(): Event<void> { return this._onMouseOut.event; } constructor( private _elements: HTMLElement[] ) { super(); this._elements.forEach(n => this.onmouseover(n, () => this._onTargetMouseOver())); this._elements.forEach(n => this.onnonbubblingmouseout(n, () => this._onTargetMouseOut())); } private _onTargetMouseOver(): void { this._isMouseIn = true; this._clearEvaluateMouseStateTimeout(); } private _onTargetMouseOut(): void { this._isMouseIn = false; this._evaluateMouseState(); } private _evaluateMouseState(): void { this._clearEvaluateMouseStateTimeout(); // Evaluate whether the mouse is still outside asynchronously such that other mouse targets // have the opportunity to first their mouse in event. this._mouseTimeout = window.setTimeout(() => this._fireIfMouseOutside(), 0); } private _clearEvaluateMouseStateTimeout(): void { if (this._mouseTimeout) { clearTimeout(this._mouseTimeout); this._mouseTimeout = undefined; } } private _fireIfMouseOutside(): void { if (!this._isMouseIn) { this._onMouseOut.fire(); } } } class ElementHoverTarget implements IHoverTarget { readonly targetElements: readonly HTMLElement[]; constructor( private _element: HTMLElement ) { this.targetElements = [this._element]; } dispose(): void { } }
CompositeMouseTracker
web-speech-api.js
function diff_algorithm(raw_input, split_eng_word = true) { let input = raw_input.sort((a,b) => b.length - a.length);; if (input.length == 0 || input[0].length == 0) return []; let rtn = []; let buf_eng = /[A-Za-z]/.test(input[0][0]); let index = new Array(input.length).fill(0); let batch_match_count = 0; while (index[0] + batch_match_count < input[0].length) { let str_0_char = input[0][index[0] + batch_match_count]; if (input.every((val, i) => (!i || val[index[i] + batch_match_count] == str_0_char) && (split_eng_word || !(buf_eng != (buf_eng = /[A-Za-z]/.test(val[index[i] + batch_match_count])) && buf_eng == true)) )) { batch_match_count++; continue; } rtn.push([input[0].substring(index[0], index[0] + batch_match_count)]); input.forEach((val, i) => index[i] += batch_match_count); batch_match_count = 0; let all_base_end = input.length; let the_best_find_index = []; let the_best_find_total = 100000000; input.forEach((base_str, base) => { let k = index[base] + 1; for (; k < base_str.length; k++) { if (!split_eng_word && /[A-Za-z]/.test(base_str[k])) continue; let find_index = new Array(input.length).fill(0); let find_total = 0; let all_find = input.every((val, j) => { find_index[j] = val.indexOf(base_str[k], index[j]); find_total = Math.max(find_total, find_index[j] - index[j]); return find_index[j] != -1; }); if (all_find && find_total < the_best_find_total) { the_best_find_index = find_index; the_best_find_total = find_total; break; } } all_base_end -= !(k < base_str.length); }); if (all_base_end == 0) { break; } let buf = []; input.forEach((val, i) => buf.push(val.substring(index[i], index[i] = the_best_find_index[i]))); rtn.push(buf); } let buf = []; input.forEach((val, i) => buf.push(val.substring(index[i]))); rtn.push(buf); return rtn; } function filter_duplicate(input) { return input.map(x => [...new Set(x)]).filter(x => x.length != 0 && (x.length != 1 || x[0] != "")); } var messages = { "start": { msg: 'Click on the microphone icon and begin speaking.', class: 'alert-success'}, "speak_now": { msg: 'Speak now.', class: 'alert-success'}, "no_speech": { msg: 'No speech was detected. You may need to adjust your <a href="//support.google.com/chrome/answer/2693767" target="_blank">microphone settings</a>.', class: 'alert-danger'}, "no_microphone": { msg: 'No microphone was found. Ensure that a microphone is installed and that <a href="//support.google.com/chrome/answer/2693767" target="_blank">microphone settings</a> are configured correctly.', class: 'alert-danger'}, "allow": { msg: 'Click the "Allow" button above to enable your microphone.', class: 'alert-warning'}, "denied": { msg: 'Permission to use microphone was denied.', class: 'alert-danger'}, "blocked": { msg: 'Permission to use microphone is blocked. To change, go to chrome://settings/content/microphone', class: 'alert-danger'}, "upgrade": { msg: 'Web Speech API is not supported by this browser. It is only supported by <a href="//www.google.com/chrome">Chrome</a> version 25 or later on desktop and Android mobile.', class: 'alert-danger'}, "stop": { msg: 'Stop listening, click on the microphone icon to restart', class: 'alert-success'}, "copy": { msg: 'Content copy to clipboard successfully.', class: 'alert-success'}, } var final_transcript = ''; var tokens_str = ''; var recognizing = false; var ignore_onend; var start_timestamp; var recognition; $( document ).ready(function() { for (var i = 0; i < langs.length; i++) { select_language.options[i] = new Option(langs[i][0], i); } select_language.selectedIndex = 6; updateCountry(); select_dialect.selectedIndex = 6; if (!('webkitSpeechRecognition' in window)) { upgrade(); } else { showInfo('start'); start_button.style.display = 'inline-block'; recognition = new webkitSpeechRecognition(); recognition.continuous = true; recognition.interimResults = true; recognition.maxAlternatives = 20; recognition.onstart = function() { recognizing = true; showInfo('speak_now'); start_img.src = 'images/mic-animation.gif'; }; recognition.onerror = function(event) { if (event.error == 'no-speech') { start_img.src = 'images/mic.gif'; showInfo('no_speech'); ignore_onend = true; } if (event.error == 'audio-capture') { start_img.src = 'images/mic.gif'; showInfo('no_microphone'); ignore_onend = true; } if (event.error == 'not-allowed') { if (event.timeStamp - start_timestamp < 100) { showInfo('blocked'); } else { showInfo('denied'); } ignore_onend = true; } }; recognition.onend = function() { recognizing = false; if (ignore_onend) { return; } start_img.src = 'images/mic.gif'; if (!final_transcript) { showInfo('start'); return; } showInfo('stop'); if (window.getSelection) { window.getSelection().removeAllRanges(); var range = document.createRange(); range.selectNode(document.getElementById('final_span')); window.getSelection().addRange(range); } }; recognition.onresult = function(event) { var interim_transcript = ''; for (var i = event.resultIndex; i < event.results.length; ++i) { if (event.results[i].isFinal) { var res = [].concat(...event.results[i]).map(({transcript})=>transcript); var tokens = filter_duplicate(diff_algorithm(res, false)); tokens_str += tokens.map(x => `<span contenteditable="true">${x[0]}</span>`).join(""); console.log(tokens); console.log(tokens_str); final_transcript += event.results[i][0].transcript; } else { interim_transcript += event.results[i][0].transcript; } } final_transcript = capitalize(final_transcript); final_span.innerHTML = linebreak(final_transcript); interim_span.innerHTML = linebreak(interim_transcript); jerryResults.innerHTML = tokens_str; }; } }); function updateCountry() { for (var i = select_dialect.options.length - 1; i >= 0; i--) { select_dialect.remove(i); } var list = langs[select_language.selectedIndex]; for (var i = 1; i < list.length; i++) { select_dialect.options.add(new Option(list[i][1], list[i][0])); } select_dialect.style.visibility = list[1].length == 1 ? 'hidden' : 'visible'; } function upgrade() { start_button.style.visibility = 'hidden'; showInfo('upgrade'); } var two_line = /\n\n/g; var one_line = /\n/g; function linebreak(s) { return s.replace(two_line, '<p></p>').replace(one_line, '<br>'); } var first_char = /\S/; function
(s) { return s.replace(first_char, function(m) { return m.toUpperCase(); }); } $("#copy_button").click(function () { if (recognizing) { recognizing = false; recognition.stop(); } setTimeout(copyToClipboard, 500); }); function copyToClipboard() { if (document.selection) { var range = document.body.createTextRange(); range.moveToElementText(document.getElementById('results')); range.select().createTextRange(); document.execCommand("copy"); } else if (window.getSelection) { var range = document.createRange(); range.selectNode(document.getElementById('results')); window.getSelection().addRange(range); document.execCommand("copy"); } showInfo('copy'); } $("#start_button").click(function () { if (recognizing) { recognition.stop(); return; } final_transcript = ''; tokens_str = ''; recognition.lang = select_dialect.value; recognition.start(); ignore_onend = false; final_span.innerHTML = ''; interim_span.innerHTML = ''; start_img.src = 'images/mic-slash.gif'; showInfo('allow'); start_timestamp = event.timeStamp; }); $("#select_language").change(function () { updateCountry(); }); function showInfo(s) { if (s) { var message = messages[s]; $("#info").html(message.msg); $("#info").removeClass(); $("#info").addClass('alert'); $("#info").addClass(message.class); } else { $("#info").removeClass(); $("#info").addClass('d-none'); } }
capitalize
logger.rs
extern crate env_logger; extern crate log_panics; extern crate log; #[cfg(target_os = "android")] extern crate android_logger; extern crate libc; use self::env_logger::Builder as EnvLoggerBuilder; use self::log::{LevelFilter, Level}; use std::env; use std::io::Write; #[cfg(target_os = "android")] use self::android_logger::Filter; use log::{Record, Metadata}; use self::libc::{c_void, c_char}; use std::ffi::CString; use std::ptr; use errors::common::CommonError; use utils::ctypes; pub static mut LOGGER_STATE: LoggerState = LoggerState::Default; pub enum LoggerState { Default, Custom } impl LoggerState { pub fn
(&self) -> (*const c_void, Option<EnabledCB>, Option<LogCB>, Option<FlushCB>) { match self { LoggerState::Default => (ptr::null(), Some(LibindyDefaultLogger::enabled), Some(LibindyDefaultLogger::log), Some(LibindyDefaultLogger::flush)), LoggerState::Custom => unsafe { (CONTEXT, ENABLED_CB, LOG_CB, FLUSH_CB) }, } } } pub type EnabledCB = extern fn(context: *const c_void, level: u32, target: *const c_char) -> bool; pub type LogCB = extern fn(context: *const c_void, level: u32, target: *const c_char, message: *const c_char, module_path: *const c_char, file: *const c_char, line: u32); pub type FlushCB = extern fn(context: *const c_void); static mut CONTEXT: *const c_void = ptr::null(); static mut ENABLED_CB: Option<EnabledCB> = None; static mut LOG_CB: Option<LogCB> = None; static mut FLUSH_CB: Option<FlushCB> = None; pub struct LibindyLogger { context: *const c_void, enabled: Option<EnabledCB>, log: LogCB, flush: Option<FlushCB>, } impl LibindyLogger { fn new(context: *const c_void, enabled: Option<EnabledCB>, log: LogCB, flush: Option<FlushCB>) -> Self { LibindyLogger { context, enabled, log, flush } } } impl log::Log for LibindyLogger { fn enabled(&self, metadata: &Metadata) -> bool { if let Some(enabled_cb) = self.enabled { let level = metadata.level() as u32; let target = CString::new(metadata.target()).unwrap(); enabled_cb(self.context, level, target.as_ptr(), ) } else { true } } fn log(&self, record: &Record) { let log_cb = self.log; let level = record.level() as u32; let target = CString::new(record.target()).unwrap(); let message = CString::new(record.args().to_string()).unwrap(); let module_path = record.module_path().map(|a| CString::new(a).unwrap()); let file = record.file().map(|a| CString::new(a).unwrap()); let line = record.line().unwrap_or(0); log_cb(self.context, level, target.as_ptr(), message.as_ptr(), module_path.as_ref().map(|p| p.as_ptr()).unwrap_or(ptr::null()), file.as_ref().map(|p| p.as_ptr()).unwrap_or(ptr::null()), line, ) } fn flush(&self) { if let Some(flush_cb) = self.flush { flush_cb(self.context) } } } unsafe impl Sync for LibindyLogger {} unsafe impl Send for LibindyLogger {} impl LibindyLogger { pub fn init(context: *const c_void, enabled: Option<EnabledCB>, log: LogCB, flush: Option<FlushCB>) -> Result<(), CommonError> { let logger = LibindyLogger::new(context, enabled, log, flush); log::set_boxed_logger(Box::new(logger))?; log::set_max_level(LevelFilter::Trace); unsafe { LOGGER_STATE = LoggerState::Custom; CONTEXT = context; ENABLED_CB = enabled; LOG_CB = Some(log); FLUSH_CB = flush }; Ok(()) } } pub struct LibindyDefaultLogger; impl LibindyDefaultLogger { pub fn init(pattern: Option<String>) -> Result<(), CommonError> { let pattern = pattern.or(env::var("RUST_LOG").ok()); log_panics::init(); //Logging of panics is essential for android. As android does not log to stdout for native code if cfg!(target_os = "android") { #[cfg(target_os = "android")] let log_filter = match pattern { Some(val) => match val.to_lowercase().as_ref() { "error" => Filter::default().with_min_level(log::Level::Error), "warn" => Filter::default().with_min_level(log::Level::Warn), "info" => Filter::default().with_min_level(log::Level::Info), "debug" => Filter::default().with_min_level(log::Level::Debug), "trace" => Filter::default().with_min_level(log::Level::Trace), _ => Filter::default().with_min_level(log::Level::Error), }, None => Filter::default().with_min_level(log::Level::Error) }; //Set logging to off when deploying production android app. #[cfg(target_os = "android")] android_logger::init_once(log_filter); info!("Logging for Android"); } else { EnvLoggerBuilder::new() .format(|buf, record| writeln!(buf, "{:>5}|{:<30}|{:>35}:{:<4}| {}", record.level(), record.target(), record.file().get_or_insert(""), record.line().get_or_insert(0), record.args())) .filter(None, LevelFilter::Off) .parse(pattern.as_ref().map(String::as_str).unwrap_or("")) .try_init()?; } unsafe { LOGGER_STATE = LoggerState::Default }; Ok(()) } extern fn enabled(_context: *const c_void, level: u32, target: *const c_char) -> bool { let level = get_level(level); let target = ctypes::c_str_to_string(target).unwrap().unwrap(); let metadata: Metadata = Metadata::builder() .level(level) .target(&target) .build(); log::logger().enabled(&metadata) } extern fn log(_context: *const c_void, level: u32, target: *const c_char, args: *const c_char, module_path: *const c_char, file: *const c_char, line: u32) { let target = ctypes::c_str_to_string(target).unwrap().unwrap(); let args = ctypes::c_str_to_string(args).unwrap().unwrap(); let module_path = ctypes::c_str_to_string(module_path).unwrap(); let file = ctypes::c_str_to_string(file).unwrap(); let level = get_level(level); log::logger().log( &Record::builder() .args(format_args!("{}", args)) .level(level) .target(&target) .module_path(module_path) .file(file) .line(Some(line)) .build(), ); } extern fn flush(_context: *const c_void) { log::logger().flush() } } fn get_level(level: u32) -> Level { match level { 1 => Level::Error, 2 => Level::Warn, 3 => Level::Info, 4 => Level::Debug, 5 => Level::Trace, _ => unreachable!(), } } #[macro_export] macro_rules! try_log { ($expr:expr) => (match $expr { Ok(val) => val, Err(err) => { error!("try_log! | {}", err); return Err(From::from(err)) } }) } macro_rules! _map_err { ($lvl:expr, $expr:expr) => ( |err| { log!($lvl, "{} - {}", $expr, err); err } ); ($lvl:expr) => ( |err| { log!($lvl, "{}", err); err } ) } #[macro_export] macro_rules! map_err_err { () => ( _map_err!(::log::Level::Error) ); ($($arg:tt)*) => ( _map_err!(::log::Level::Error, $($arg)*) ) } #[macro_export] macro_rules! map_err_trace { () => ( _map_err!(::log::Level::Trace) ); ($($arg:tt)*) => ( _map_err!(::log::Level::Trace, $($arg)*) ) } #[macro_export] macro_rules! map_err_info { () => ( _map_err!(::log::Level::Info) ); ($($arg:tt)*) => ( _map_err!(::log::Level::Info, $($arg)*) ) } #[cfg(debug_assertions)] #[macro_export] macro_rules! secret { ($val:expr) => {{ $val }}; } #[cfg(not(debug_assertions))] #[macro_export] macro_rules! secret { ($val:expr) => {{ "_" }}; }
get
component.go
package integration import ( "fmt" "io/ioutil" "os" "path/filepath" "runtime" "strings" "time" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" "github.com/openshift/odo/tests/helper" ) func componentTests(args ...string)
{ var oc helper.OcRunner var project string var context string var originalDir string BeforeEach(func() { SetDefaultEventuallyTimeout(10 * time.Minute) SetDefaultConsistentlyDuration(30 * time.Second) context = helper.CreateNewContext() os.Setenv("GLOBALODOCONFIG", filepath.Join(context, "config.yaml")) oc = helper.NewOcRunner("oc") }) // Clean up after the test // This is run after every Spec (It) var _ = AfterEach(func() { helper.DeleteDir(context) os.Unsetenv("GLOBALODOCONFIG") }) Context("Generic machine readable output tests", func() { It("Command should fail if json is non-existent for a command", func() { output := helper.CmdShouldFail("odo", "version", "-o", "json") Expect(output).To(ContainSubstring("Machine readable output is not yet implemented for this command")) }) It("Help for odo version should not contain machine output", func() { output := helper.CmdShouldPass("odo", "version", "--help") Expect(output).NotTo(ContainSubstring("Specify output format, supported format: json")) }) }) Context("Creating component", func() { JustBeforeEach(func() { project = helper.CreateRandProject() originalDir = helper.Getwd() helper.Chdir(context) }) JustAfterEach(func() { helper.DeleteProject(project) helper.Chdir(originalDir) }) It("should create component even in new project", func() { helper.CmdShouldPass("odo", append(args, "create", "nodejs", "cmp-git", "--git", "https://github.com/openshift/nodejs-ex", "--project", project, "--context", context, "--app", "testing")...) helper.CmdShouldPass("odo", append(args, "push", "--context", context, "-v4")...) oc.SwitchProject(project) projectList := helper.CmdShouldPass("odo", "project", "list") Expect(projectList).To(ContainSubstring(project)) }) It("Without an application should create one", func() { componentName := helper.RandString(6) helper.CmdShouldPass("odo", append(args, "create", "nodejs", "--project", project, componentName, "--ref", "master", "--git", "https://github.com/openshift/nodejs-ex")...) helper.CmdShouldPass("odo", append(args, "push")...) appName := helper.CmdShouldPass("odo", "app", "list") Expect(appName).ToNot(BeEmpty()) // checking if application name is set to "app" applicationName := helper.GetConfigValue("Application") Expect(applicationName).To(Equal("app")) // clean up helper.CmdShouldPass("odo", "app", "delete", "app", "-f") helper.CmdShouldFail("odo", "app", "delete", "app", "-f") helper.CmdShouldFail("odo", append(args, "delete", componentName, "-f")...) }) It("should create default named component when passed same context differently", func() { dir := filepath.Base(context) helper.CopyExample(filepath.Join("source", "nodejs"), context) helper.CmdShouldPass("odo", append(args, "create", "nodejs", "--project", project, "--context", ".", "--app", "testing")...) componentName := helper.GetConfigValueWithContext("Name", context) Expect(componentName).To(ContainSubstring("nodejs-" + dir)) helper.DeleteDir(filepath.Join(context, ".odo")) helper.CmdShouldPass("odo", append(args, "create", "nodejs", "--project", project, "--context", context, "--app", "testing")...) newComponentName := helper.GetConfigValueWithContext("Name", context) Expect(newComponentName).To(ContainSubstring("nodejs-" + dir)) }) It("should show an error when ref flag is provided with sources except git", func() { outputErr := helper.CmdShouldFail("odo", append(args, "create", "nodejs", "--project", project, "cmp-git", "--ref", "test")...) Expect(outputErr).To(ContainSubstring("The --ref flag is only valid for --git flag")) }) It("create component twice fails from same directory", func() { helper.CmdShouldPass("odo", append(args, "create", "nodejs", "nodejs", "--project", project)...) output := helper.CmdShouldFail("odo", append(args, "create", "nodejs", "nodejs", "--project", project)...) Expect(output).To(ContainSubstring("this directory already contains a component")) }) It("should list out component in json format along with path flag", func() { var contextPath string helper.CmdShouldPass("odo", append(args, "create", "nodejs", "nodejs", "--project", project)...) if runtime.GOOS == "windows" { contextPath = strings.Replace(strings.TrimSpace(context), "\\", "\\\\", -1) } else { contextPath = strings.TrimSpace(context) } // this orders the json desired, err := helper.Unindented(fmt.Sprintf(`{"kind":"Component","apiVersion":"odo.openshift.io/v1alpha1","metadata":{"name":"nodejs","namespace":"%s","creationTimestamp":null},"spec":{"app":"app","type":"nodejs","source":"./","ports":["8080/TCP"]},"status":{"context":"%s","state":"Not Pushed"}}`, project, contextPath)) Expect(err).Should(BeNil()) actual, err := helper.Unindented(helper.CmdShouldPass("odo", append(args, "list", "-o", "json", "--path", filepath.Dir(context))...)) Expect(err).Should(BeNil()) // since the tests are run parallel, there might be many odo component directories in the root folder // so we only check for the presence of the current one Expect(actual).Should(ContainSubstring(desired)) }) It("should list out pushed components of different projects in json format along with path flag", func() { var contextPath string var contextPath2 string helper.CopyExample(filepath.Join("source", "nodejs"), context) helper.CmdShouldPass("odo", append(args, "create", "nodejs", "nodejs", "--project", project)...) helper.CmdShouldPass("odo", append(args, "push")...) project2 := helper.CreateRandProject() context2 := helper.CreateNewContext() helper.Chdir(context2) helper.CopyExample(filepath.Join("source", "python"), context2) helper.CmdShouldPass("odo", append(args, "create", "python", "python", "--project", project2)...) helper.CmdShouldPass("odo", append(args, "push")...) if runtime.GOOS == "windows" { contextPath = strings.Replace(strings.TrimSpace(context), "\\", "\\\\", -1) contextPath2 = strings.Replace(strings.TrimSpace(context2), "\\", "\\\\", -1) } else { contextPath = strings.TrimSpace(context) contextPath2 = strings.TrimSpace(context2) } actual, err := helper.Unindented(helper.CmdShouldPass("odo", append(args, "list", "-o", "json", "--path", filepath.Dir(context))...)) Expect(err).Should(BeNil()) helper.Chdir(context) helper.DeleteDir(context2) helper.DeleteProject(project2) // this orders the json expected, err := helper.Unindented(fmt.Sprintf(`{"kind":"Component","apiVersion":"odo.openshift.io/v1alpha1","metadata":{"name":"nodejs","namespace":"%s","creationTimestamp":null},"spec":{"app":"app","type":"nodejs","source":"./","ports":["8080/TCP"]},"status":{"context":"%s","state":"Pushed"}}`, project, contextPath)) Expect(err).Should(BeNil()) Expect(actual).Should(ContainSubstring(expected)) // this orders the json expected, err = helper.Unindented(fmt.Sprintf(`{"kind":"Component","apiVersion":"odo.openshift.io/v1alpha1","metadata":{"name":"python","namespace":"%s","creationTimestamp":null},"spec":{"app":"app","type":"python","source":"./","ports":["8080/TCP"]},"status":{"context":"%s","state":"Pushed"}}`, project2, contextPath2)) Expect(err).Should(BeNil()) Expect(actual).Should(ContainSubstring(expected)) }) It("should create the component from the branch ref when provided", func() { helper.CmdShouldPass("odo", append(args, "create", "ruby", "ref-test", "--project", project, "--git", "https://github.com/girishramnani/ruby-ex.git", "--ref", "develop")...) helper.CmdShouldPass("odo", append(args, "push")...) }) It("should list the component", func() { helper.CmdShouldPass("odo", append(args, "create", "nodejs", "cmp-git", "--project", project, "--git", "https://github.com/openshift/nodejs-ex", "--min-memory", "100Mi", "--max-memory", "300Mi", "--min-cpu", "0.1", "--max-cpu", "2", "--context", context, "--app", "testing")...) helper.CmdShouldPass("odo", append(args, "push", "--context", context)...) cmpList := helper.CmdShouldPass("odo", append(args, "list", "--project", project)...) Expect(cmpList).To(ContainSubstring("cmp-git")) actualCompListJSON := helper.CmdShouldPass("odo", append(args, "list", "--project", project, "-o", "json")...) desiredCompListJSON := fmt.Sprintf(`{"kind":"List","apiVersion":"odo.openshift.io/v1alpha1","metadata":{},"items":[{"kind":"Component","apiVersion":"odo.openshift.io/v1alpha1","metadata":{"name":"cmp-git","namespace":"%s","creationTimestamp":null},"spec":{"app":"testing","type":"nodejs","source":"https://github.com/openshift/nodejs-ex","env":[{"name":"DEBUG_PORT","value":"5858"}]},"status":{"state":"Pushed"}}]}`, project) Expect(desiredCompListJSON).Should(MatchJSON(actualCompListJSON)) cmpAllList := helper.CmdShouldPass("odo", append(args, "list", "--all")...) Expect(cmpAllList).To(ContainSubstring("cmp-git")) helper.CmdShouldPass("odo", append(args, "delete", "cmp-git", "-f")...) }) It("should list the component when it is not pushed", func() { helper.CmdShouldPass("odo", append(args, "create", "nodejs", "cmp-git", "--project", project, "--git", "https://github.com/openshift/nodejs-ex", "--min-memory", "100Mi", "--max-memory", "300Mi", "--min-cpu", "0.1", "--max-cpu", "2", "--context", context, "--app", "testing")...) cmpList := helper.CmdShouldPass("odo", append(args, "list", "--context", context)...) Expect(cmpList).To(ContainSubstring("cmp-git")) Expect(cmpList).To(ContainSubstring("Not Pushed")) helper.CmdShouldPass("odo", append(args, "delete", "-f", "--all", "--context", context)...) }) It("should describe the component when it is not pushed", func() { helper.CmdShouldPass("odo", append(args, "create", "nodejs", "cmp-git", "--project", project, "--git", "https://github.com/openshift/nodejs-ex", "--context", context, "--app", "testing")...) helper.CmdShouldPass("odo", "url", "create", "url-1", "--context", context) cmpDescribe := helper.CmdShouldPass("odo", append(args, "describe", "--context", context)...) Expect(cmpDescribe).To(ContainSubstring("cmp-git")) Expect(cmpDescribe).To(ContainSubstring("nodejs")) Expect(cmpDescribe).To(ContainSubstring("url-1")) Expect(cmpDescribe).To(ContainSubstring("https://github.com/openshift/nodejs-ex")) cmpDescribeJSON, err := helper.Unindented(helper.CmdShouldPass("odo", append(args, "describe", "-o", "json", "--context", context)...)) Expect(err).Should(BeNil()) expected, err := helper.Unindented(`{"kind": "Component","apiVersion": "odo.openshift.io/v1alpha1","metadata": {"name": "cmp-git","namespace": "` + project + `","creationTimestamp": null},"spec":{"app": "testing","type":"nodejs","source": "https://github.com/openshift/nodejs-ex","url": ["url-1"],"ports": ["8080/TCP"]},"status": {"state": "Not Pushed"}}`) Expect(err).Should(BeNil()) Expect(cmpDescribeJSON).To(Equal(expected)) helper.CmdShouldPass("odo", append(args, "delete", "-f", "--all", "--context", context)...) }) It("should list the component in the same app when one is pushed and the other one is not pushed", func() { helper.Chdir(originalDir) helper.CmdShouldPass("odo", append(args, "create", "nodejs", "cmp-git", "--project", project, "--git", "https://github.com/openshift/nodejs-ex", "--context", context, "--app", "testing")...) helper.CmdShouldPass("odo", append(args, "push", "--context", context)...) context2 := helper.CreateNewContext() helper.CmdShouldPass("odo", append(args, "create", "nodejs", "cmp-git-2", "--project", project, "--git", "https://github.com/openshift/nodejs-ex", "--context", context2, "--app", "testing")...) cmpList := helper.CmdShouldPass("odo", append(args, "list", "--context", context2)...) Expect(cmpList).To(ContainSubstring("cmp-git")) Expect(cmpList).To(ContainSubstring("cmp-git-2")) Expect(cmpList).To(ContainSubstring("Not Pushed")) Expect(cmpList).To(ContainSubstring("Pushed")) helper.CmdShouldPass("odo", append(args, "delete", "-f", "--all", "--context", context)...) helper.CmdShouldPass("odo", append(args, "delete", "-f", "--all", "--context", context2)...) helper.DeleteDir(context2) }) It("should succeed listing catalog components", func() { // Since components catalog is constantly changing, we simply check to see if this command passes.. rather than checking the JSON each time. output := helper.CmdShouldPass("odo", "catalog", "list", "components", "-o", "json") Expect(output).To(ContainSubstring("List")) }) It("binary component should not fail when --context is not set", func() { oc.ImportJavaIS(project) helper.CopyExample(filepath.Join("binary", "java", "openjdk"), context) // Was failing due to https://github.com/openshift/odo/issues/1969 helper.CmdShouldPass("odo", append(args, "create", "java:8", "sb-jar-test", "--project", project, "--binary", filepath.Join(context, "sb.jar"))...) }) It("binary component should fail when --binary is not in --context folder", func() { oc.ImportJavaIS(project) helper.CopyExample(filepath.Join("binary", "java", "openjdk"), context) newContext := helper.CreateNewContext() defer helper.DeleteDir(newContext) output := helper.CmdShouldFail("odo", append(args, "create", "java:8", "sb-jar-test", "--project", project, "--binary", filepath.Join(context, "sb.jar"), "--context", newContext)...) Expect(output).To(ContainSubstring("inside of the context directory")) }) It("binary component is valid if path is relative and includes ../", func() { oc.ImportJavaIS(project) helper.CopyExample(filepath.Join("binary", "java", "openjdk"), context) relativeContext := fmt.Sprintf("..%c%s", filepath.Separator, filepath.Base(context)) fmt.Printf("relativeContext = %#v\n", relativeContext) helper.CmdShouldPass("odo", append(args, "create", "java:8", "sb-jar-test", "--project", project, "--binary", filepath.Join(context, "sb.jar"), "--context", relativeContext)...) }) }) Context("Test odo push with --source and --config flags", func() { JustBeforeEach(func() { project = helper.CreateRandProject() originalDir = helper.Getwd() helper.Chdir(context) }) JustAfterEach(func() { helper.DeleteProject(project) helper.Chdir(originalDir) }) Context("Using project flag(--project) and current directory", func() { It("create local nodejs component and push source and code separately", func() { appName := "nodejs-push-test" cmpName := "nodejs" helper.CopyExample(filepath.Join("source", "nodejs"), context) helper.CmdShouldPass("odo", append(args, "create", "nodejs", cmpName, "--app", appName, "--project", project)...) // component doesn't exist yet so attempt to only push source should fail helper.CmdShouldFail("odo", append(args, "push", "--source")...) // Push only config and see that the component is created but wothout any source copied helper.CmdShouldPass("odo", append(args, "push", "--config")...) oc.VerifyCmpExists(cmpName, appName, project) // Push only source and see that the component is updated with source code helper.CmdShouldPass("odo", append(args, "push", "--source")...) oc.VerifyCmpExists(cmpName, appName, project) remoteCmdExecPass := oc.CheckCmdOpInRemoteCmpPod( cmpName, appName, project, []string{"sh", "-c", "ls -la $ODO_S2I_DEPLOYMENT_DIR/package.json"}, func(cmdOp string, err error) bool { return err == nil }, ) Expect(remoteCmdExecPass).To(Equal(true)) }) It("create local nodejs component and push source and code at once", func() { appName := "nodejs-push-test" cmpName := "nodejs-push-atonce" helper.CopyExample(filepath.Join("source", "nodejs"), context) helper.CmdShouldPass("odo", append(args, "create", "nodejs", cmpName, "--app", appName, "--project", project)...) // Push only config and see that the component is created but wothout any source copied helper.CmdShouldPass("odo", append(args, "push")...) oc.VerifyCmpExists(cmpName, appName, project) remoteCmdExecPass := oc.CheckCmdOpInRemoteCmpPod( cmpName, appName, project, []string{"sh", "-c", "ls -la $ODO_S2I_DEPLOYMENT_DIR/package.json"}, func(cmdOp string, err error) bool { return err == nil }, ) Expect(remoteCmdExecPass).To(Equal(true)) }) }) Context("when --context is used", func() { // don't need to switch to any dir here, as this test should use --context flag It("create local nodejs component and push source and code separately", func() { appName := "nodejs-push-context-test" cmpName := "nodejs" helper.CopyExample(filepath.Join("source", "nodejs"), context) helper.CmdShouldPass("odo", append(args, "create", "nodejs", cmpName, "--context", context, "--app", appName, "--project", project)...) //TODO: verify that config was properly created // component doesn't exist yet so attempt to only push source should fail helper.CmdShouldFail("odo", append(args, "push", "--source", "--context", context)...) // Push only config and see that the component is created but wothout any source copied helper.CmdShouldPass("odo", append(args, "push", "--config", "--context", context)...) oc.VerifyCmpExists(cmpName, appName, project) // Push only source and see that the component is updated with source code helper.CmdShouldPass("odo", append(args, "push", "--source", "--context", context)...) oc.VerifyCmpExists(cmpName, appName, project) remoteCmdExecPass := oc.CheckCmdOpInRemoteCmpPod( cmpName, appName, project, []string{"sh", "-c", "ls -la $ODO_S2I_DEPLOYMENT_DIR/package.json"}, func(cmdOp string, err error) bool { return err == nil }, ) Expect(remoteCmdExecPass).To(Equal(true)) }) It("create local nodejs component and push source and code at once", func() { appName := "nodejs-push-context-test" cmpName := "nodejs-push-atonce" helper.CopyExample(filepath.Join("source", "nodejs"), context) helper.CmdShouldPass("odo", append(args, "create", "nodejs", cmpName, "--app", appName, "--context", context, "--project", project)...) // Push both config and source helper.CmdShouldPass("odo", append(args, "push", "--context", context)...) oc.VerifyCmpExists(cmpName, appName, project) remoteCmdExecPass := oc.CheckCmdOpInRemoteCmpPod( cmpName, appName, project, []string{"sh", "-c", "ls -la $ODO_S2I_DEPLOYMENT_DIR/package.json"}, func(cmdOp string, err error) bool { return err == nil }, ) Expect(remoteCmdExecPass).To(Equal(true)) }) }) }) Context("Creating Component even in new project", func() { var project string JustBeforeEach(func() { project = helper.RandString(10) }) JustAfterEach(func() { helper.DeleteProject(project) }) It("should create component", func() { helper.CmdShouldPass("odo", append(args, "create", "nodejs", "cmp-git", "--git", "https://github.com/openshift/nodejs-ex", "--project", project, "--context", context, "--app", "testing")...) helper.CmdShouldPass("odo", append(args, "push", "--context", context, "-v4")...) oc.SwitchProject(project) projectList := helper.CmdShouldPass("odo", "project", "list") Expect(projectList).To(ContainSubstring(project)) }) }) Context("Test odo push with --now flag during creation", func() { JustBeforeEach(func() { project = helper.CreateRandProject() originalDir = helper.Getwd() helper.Chdir(context) }) JustAfterEach(func() { helper.Chdir(originalDir) }) It("should successfully create config and push code in one create command with --now", func() { appName := "nodejs-create-now-test" cmpName := "nodejs-push-atonce" helper.CopyExample(filepath.Join("source", "nodejs"), context) helper.CmdShouldPass("odo", append(args, "create", "nodejs", cmpName, "--app", appName, "--project", project, "--now")...) oc.VerifyCmpExists(cmpName, appName, project) remoteCmdExecPass := oc.CheckCmdOpInRemoteCmpPod( cmpName, appName, project, []string{"sh", "-c", "ls -la $ODO_S2I_DEPLOYMENT_DIR/package.json"}, func(cmdOp string, err error) bool { return err == nil }, ) Expect(remoteCmdExecPass).To(Equal(true)) }) }) Context("when component is in the current directory and --project flag is used", func() { appName := "app" componentName := "my-component" JustBeforeEach(func() { project = helper.CreateRandProject() originalDir = helper.Getwd() helper.Chdir(context) }) JustAfterEach(func() { helper.Chdir(originalDir) helper.DeleteProject(project) }) It("create local nodejs component twice and fail", func() { helper.CopyExample(filepath.Join("source", "nodejs"), context) helper.CmdShouldPass("odo", append(args, "create", "nodejs", "--project", project, "--env", "key=value,key1=value1")...) output := helper.CmdShouldFail("odo", append(args, "create", "nodejs", "--project", project, "--env", "key=value,key1=value1")...) Expect(output).To(ContainSubstring("this directory already contains a component")) }) It("creates and pushes local nodejs component and then deletes --all", func() { helper.CopyExample(filepath.Join("source", "nodejs"), context) helper.CmdShouldPass("odo", append(args, "create", "nodejs", componentName, "--app", appName, "--project", project, "--env", "key=value,key1=value1")...) helper.CmdShouldPass("odo", append(args, "push", "--context", context)...) helper.CmdShouldPass("odo", append(args, "delete", "--context", context, "-f", "--all", "--app", appName)...) componentList := helper.CmdShouldPass("odo", append(args, "list", "--context", context, "--app", appName, "--project", project)...) Expect(componentList).NotTo(ContainSubstring(componentName)) files := helper.ListFilesInDir(context) Expect(files).NotTo(ContainElement(".odo")) }) It("creates a local python component, pushes it and then deletes it using --all flag", func() { helper.CopyExample(filepath.Join("source", "python"), context) helper.CmdShouldPass("odo", append(args, "create", "python", componentName, "--app", appName, "--project", project, "--context", context)...) helper.CmdShouldPass("odo", append(args, "push", "--context", context)...) helper.CmdShouldPass("odo", append(args, "delete", "--context", context, "-f")...) helper.CmdShouldPass("odo", append(args, "delete", "--all", "-f", "--context", context)...) componentList := helper.CmdShouldPass("odo", append(args, "list", "--context", context, "--app", appName, "--project", project)...) Expect(componentList).NotTo(ContainSubstring(componentName)) files := helper.ListFilesInDir(context) Expect(files).NotTo(ContainElement(".odo")) }) It("creates a local python component, pushes it and then deletes it using --all flag in local directory", func() { helper.CopyExample(filepath.Join("source", "python"), context) helper.CmdShouldPass("odo", append(args, "create", "python", componentName, "--app", appName, "--project", project)...) helper.CmdShouldPass("odo", append(args, "push")...) helper.CmdShouldPass("odo", append(args, "delete", "--all", "-f")...) componentList := helper.CmdShouldPass("odo", append(args, "list", "--app", appName, "--project", project)...) Expect(componentList).NotTo(ContainSubstring(componentName)) files := helper.ListFilesInDir(context) fmt.Println(files) Expect(files).NotTo(ContainElement(".odo")) }) It("creates a local python component and check for unsupported warning", func() { helper.CopyExample(filepath.Join("source", "python"), context) output := helper.CmdShouldPass("odo", append(args, "create", "python", componentName, "--app", appName, "--project", project, "--context", context)...) Expect(output).To(ContainSubstring("Warning: python is not fully supported by odo, and it is not guaranteed to work")) }) It("creates a local nodejs component and check unsupported warning hasn't occured", func() { helper.CopyExample(filepath.Join("source", "nodejs"), context) output := helper.CmdShouldPass("odo", append(args, "create", "nodejs:8", componentName, "--app", appName, "--project", project, "--context", context)...) Expect(output).NotTo(ContainSubstring("Warning")) }) }) /* Enable once #1782 and #1778 are fixed Context("odo component updating", func() { JustBeforeEach(func() { project = helper.CreateRandProject() }) JustAfterEach(func() { helper.DeleteProject(project) }) It("should be able to create a git component and update it from local to git", func() { helper.CopyExample(filepath.Join("source", "nodejs"), context) helper.CmdShouldPass("odo", append(args, "create", "nodejs", "cmp-git", "--project", project, "--min-cpu", "0.1", "--max-cpu", "2", "--context", context, "--app", "testing")...) helper.CmdShouldPass("odo", append(args, "push", "--context", context, "-v", "4")...) getCPULimit := oc.MaxCPU("cmp-git", "testing", project) Expect(getCPULimit).To(ContainSubstring("2")) getCPURequest := oc.MinCPU("cmp-git", "testing", project) Expect(getCPURequest).To(ContainSubstring("100m")) // update the component config according to the git component helper.CmdShouldPass("odo", "config", "set", "sourcelocation", "https://github.com/openshift/nodejs-ex", "--context", context, "-f") helper.CmdShouldPass("odo", "config", "set", "sourcetype", "git", "--context", context, "-f") // check if the earlier resource requests are still valid helper.CmdShouldPass("odo", append(args, "push", "--context", context, "-v", "4")...) getCPULimit = oc.MaxCPU("cmp-git", "testing", project) Expect(getCPULimit).To(ContainSubstring("2")) getCPURequest = oc.MinCPU("cmp-git", "testing", project) Expect(getCPURequest).To(ContainSubstring("100m")) // check the source location and type in the deployment config getSourceLocation := oc.SourceLocationDC("cmp-git", "testing", project) Expect(getSourceLocation).To(ContainSubstring("https://github.com/openshift/nodejs-ex")) getSourceType := oc.SourceTypeDC("cmp-git", "testing", project) Expect(getSourceType).To(ContainSubstring("git")) // since the current component type is git // check the source location and type in the build config getSourceLocation = oc.SourceLocationBC("cmp-git", "testing", project) Expect(getSourceLocation).To(ContainSubstring("https://github.com/openshift/nodejs-ex")) getSourceType = oc.SourceTypeBC("cmp-git", "testing", project) Expect(getSourceType).To(ContainSubstring("Git")) }) It("should be able to update a component from git to local", func() { helper.CmdShouldPass("odo", append(args, "create", "nodejs", "cmp-git", "--project", project, "--git", "https://github.com/openshift/nodejs-ex", "--min-memory", "100Mi", "--max-memory", "300Mi", "--context", context, "--app", "testing")...) helper.CmdShouldPass("odo", append(args, "push", "--context", context, "-v", "4")...) getMemoryLimit := oc.MaxMemory("cmp-git", "testing", project) Expect(getMemoryLimit).To(ContainSubstring("300Mi")) getMemoryRequest := oc.MinMemory("cmp-git", "testing", project) Expect(getMemoryRequest).To(ContainSubstring("100Mi")) // update the component config according to the git component helper.CopyExample(filepath.Join("source", "nodejs"), context) helper.CmdShouldPass("odo", "config", "set", "sourcelocation", "./", "--context", context, "-f") helper.CmdShouldPass("odo", "config", "set", "sourcetype", "local", "--context", context, "-f") // check if the earlier resource requests are still valid helper.CmdShouldPass("odo", append(args, "push", "--context", context, "-v", "4")...) getMemoryLimit = oc.MaxMemory("cmp-git", "testing", project) Expect(getMemoryLimit).To(ContainSubstring("300Mi")) getMemoryRequest = oc.MinMemory("cmp-git", "testing", project) Expect(getMemoryRequest).To(ContainSubstring("100Mi")) // check the source location and type in the deployment config getSourceLocation := oc.SourceLocationDC("cmp-git", "testing", project) var sourcePath string if runtime.GOOS == "windows" { sourcePath = "file:///./" } else { sourcePath = "file://./" } Expect(getSourceLocation).To(ContainSubstring(sourcePath)) getSourceType := oc.SourceTypeDC("cmp-git", "testing", project) Expect(getSourceType).To(ContainSubstring("local")) }) }) }) */ Context("odo component delete, list and describe", func() { appName := "app" cmpName := "nodejs" JustBeforeEach(func() { project = helper.CreateRandProject() originalDir = helper.Getwd() }) JustAfterEach(func() { helper.DeleteProject(project) helper.Chdir(originalDir) }) It("should pass inside a odo directory without component name as parameter", func() { helper.CopyExample(filepath.Join("source", "nodejs"), context) helper.CmdShouldPass("odo", append(args, "create", "nodejs", cmpName, "--app", appName, "--project", project, "--context", context)...) helper.CmdShouldPass("odo", "url", "create", "example", "--context", context) helper.CmdShouldPass("odo", append(args, "push", "--context", context)...) // changing directory to the context directory helper.Chdir(context) cmpListOutput := helper.CmdShouldPass("odo", append(args, "list")...) Expect(cmpListOutput).To(ContainSubstring(cmpName)) cmpDescribe := helper.CmdShouldPass("odo", append(args, "describe")...) Expect(cmpDescribe).To(ContainSubstring(cmpName)) Expect(cmpDescribe).To(ContainSubstring("nodejs")) if runtime.GOOS == "windows" { Expect(cmpDescribe).To(ContainSubstring("file:///./")) } else { Expect(cmpDescribe).To(ContainSubstring("file://./")) } url := helper.DetermineRouteURL(context) Expect(cmpDescribe).To(ContainSubstring(url)) helper.CmdShouldPass("odo", append(args, "delete", "-f")...) }) It("should fail outside a odo directory without component name as parameter", func() { helper.CopyExample(filepath.Join("source", "nodejs"), context) helper.CmdShouldPass("odo", append(args, "create", "nodejs", cmpName, "--app", appName, "--project", project, "--context", context)...) helper.CmdShouldPass("odo", append(args, "push", "--context", context)...) // list command should fail as no app flag is given helper.CmdShouldFail("odo", append(args, "list", "--project", project)...) // commands should fail as the component name is missing helper.CmdShouldFail("odo", append(args, "describe", "--app", appName, "--project", project)...) helper.CmdShouldFail("odo", append(args, "delete", "-f", "--app", appName, "--project", project)...) }) It("should pass outside a odo directory with component name as parameter", func() { helper.CopyExample(filepath.Join("source", "nodejs"), context) helper.CmdShouldPass("odo", append(args, "create", "nodejs", cmpName, "--app", appName, "--project", project, "--context", context)...) helper.CmdShouldPass("odo", append(args, "push", "--context", context)...) cmpListOutput := helper.CmdShouldPass("odo", append(args, "list", "--app", appName, "--project", project)...) Expect(cmpListOutput).To(ContainSubstring(cmpName)) actualDesCompJSON := helper.CmdShouldPass("odo", append(args, "describe", cmpName, "--app", appName, "--project", project, "-o", "json")...) var sourcePath string if runtime.GOOS == "windows" { sourcePath = "file:///./" } else { sourcePath = "file://./" } desiredDesCompJSON := fmt.Sprintf(`{"kind":"Component","apiVersion":"odo.openshift.io/v1alpha1","metadata":{"name":"nodejs","namespace":"%s","creationTimestamp":null},"spec":{"app":"app","type":"nodejs","source":"%s","env":[{"name":"DEBUG_PORT","value":"5858"}]},"status":{"state":"Pushed"}}`, project, sourcePath) Expect(desiredDesCompJSON).Should(MatchJSON(actualDesCompJSON)) helper.CmdShouldPass("odo", append(args, "delete", cmpName, "--app", appName, "--project", project, "-f")...) }) }) Context("when running odo push multiple times, check for existence of environment variables", func() { JustBeforeEach(func() { project = helper.CreateRandProject() originalDir = helper.Getwd() }) JustAfterEach(func() { helper.DeleteProject(project) helper.Chdir(originalDir) }) It("should should retain the same environment variable on multiple push", func() { componentName := helper.RandString(6) appName := helper.RandString(6) helper.CopyExample(filepath.Join("source", "nodejs"), context) helper.CmdShouldPass("odo", append(args, "create", "nodejs", componentName, "--app", appName, "--project", project, "--context", context)...) helper.CmdShouldPass("odo", append(args, "push", "--context", context)...) helper.Chdir(context) helper.CmdShouldPass("odo", "config", "set", "--env", "FOO=BAR") helper.CmdShouldPass("odo", append(args, "push")...) dcName := oc.GetDcName(componentName, project) stdOut := helper.CmdShouldPass("oc", "get", "dc/"+dcName, "-n", project, "-o", "go-template={{ .spec.template.spec }}{{.env}}") Expect(stdOut).To(ContainSubstring("FOO")) helper.CmdShouldPass("odo", append(args, "push")...) stdOut = oc.DescribeDc(dcName, project) Expect(stdOut).To(ContainSubstring("FOO")) }) }) Context("Creating component with numeric named context", func() { var contextNumeric string JustBeforeEach(func() { var err error ts := time.Now().UnixNano() contextNumeric, err = ioutil.TempDir("", fmt.Sprint(ts)) Expect(err).ToNot(HaveOccurred()) project = helper.CreateRandProject() }) JustAfterEach(func() { helper.DeleteProject(project) helper.DeleteDir(contextNumeric) }) It("should create default named component in a directory with numeric name", func() { helper.CopyExample(filepath.Join("source", "nodejs"), contextNumeric) helper.CmdShouldPass("odo", append(args, "create", "nodejs", "--project", project, "--context", contextNumeric, "--app", "testing")...) helper.CmdShouldPass("odo", append(args, "push", "--context", contextNumeric, "-v4")...) }) }) Context("when creating component with improper memory quantities", func() { JustBeforeEach(func() { project = helper.CreateRandProject() }) JustAfterEach(func() { helper.DeleteProject(project) }) It("should fail gracefully with proper error message", func() { stdError := helper.CmdShouldFail("odo", append(args, "create", "java:8", "backend", "--memory", "1GB", "--project", project, "--context", context)...) Expect(stdError).ToNot(ContainSubstring("panic: cannot parse")) Expect(stdError).To(ContainSubstring("quantities must match the regular expression")) }) }) Context("Creating component using symlink", func() { var symLinkPath string JustBeforeEach(func() { if runtime.GOOS == "windows" { Skip("Skipping test because for symlink creation on platform like Windows, go library needs elevated privileges.") } // create a symlink symLinkName := helper.RandString(10) helper.CreateSymLink(context, filepath.Join(filepath.Dir(context), symLinkName)) symLinkPath = filepath.Join(filepath.Dir(context), symLinkName) project = helper.CreateRandProject() originalDir = helper.Getwd() }) JustAfterEach(func() { // remove the symlink err := os.Remove(symLinkPath) Expect(err).NotTo(HaveOccurred()) helper.DeleteProject(project) helper.Chdir(originalDir) }) It("Should be able to deploy a spring boot uberjar file using symlinks in all odo commands", func() { oc.ImportJavaIS(project) helper.CopyExample(filepath.Join("binary", "java", "openjdk"), context) // create the component using symlink helper.CmdShouldPass("odo", append(args, "create", "java:8", "sb-jar-test", "--project", project, "--binary", filepath.Join(symLinkPath, "sb.jar"), "--context", symLinkPath)...) // Create a URL and push without using the symlink helper.CmdShouldPass("odo", "url", "create", "uberjaropenjdk", "--port", "8080", "--context", symLinkPath) helper.CmdShouldPass("odo", append(args, "push", "--context", symLinkPath)...) routeURL := helper.DetermineRouteURL(symLinkPath) // Ping said URL helper.HttpWaitFor(routeURL, "HTTP Booster", 90, 1) // Delete the component helper.CmdShouldPass("odo", append(args, "delete", "sb-jar-test", "-f", "--context", symLinkPath)...) }) It("Should be able to deploy a wildfly war file using symlinks in some odo commands", func() { helper.CopyExample(filepath.Join("binary", "java", "wildfly"), context) helper.CmdShouldPass("odo", append(args, "create", "wildfly", "javaee-war-test", "--project", project, "--binary", filepath.Join(symLinkPath, "ROOT.war"), "--context", symLinkPath)...) // Create a URL helper.CmdShouldPass("odo", "url", "create", "warfile", "--port", "8080", "--context", context) helper.CmdShouldPass("odo", append(args, "push", "--context", context)...) routeURL := helper.DetermineRouteURL(context) // Ping said URL helper.HttpWaitFor(routeURL, "Sample", 90, 1) // Delete the component helper.CmdShouldPass("odo", append(args, "delete", "javaee-war-test", "-f", "--context", context)...) }) }) }