file_name
stringlengths
3
137
prefix
stringlengths
0
918k
suffix
stringlengths
0
962k
middle
stringlengths
0
812k
Style.js
import styled from 'styled-components'; export const MainBodyWrapper = styled.section` background: ${props => props.theme.background}; ` export const SinglePostCard = styled.article` border:none; box-shadow:none; background: ${props => props.theme.background}; .card-image{ border:solid 1px ${props => props.theme.panel}; line-height:0; img{ width:100%; } } .card-content{ padding:50px 100px; color: ${props => props.theme.color}; @media(max-width:767px){ padding:30px 10px; } } a{ color:#41a4e6; } .title{ font-weight:800; font-size:28px; color: ${props => props.theme.title}; @media(max-width:768px){ font-size:20px; line-height:1.3; } a{ color: ${props => props.theme.title}; transition:all 0.4s ease;
} } } .post-meta{ font-weight:800; font-size:12px; p, a{ font-size:12px; } } .arrow-right{ border-left: 5px solid #41a4e6; } .card-footer{ margin-top:30px; border-top:solid 2px ${props => props.theme.panel}; } .user-wrapper{ p, a{ font-size:13px; } a{ color: ${props => props.theme.author}; } } `
&:hover{ color:#41a4e6 !important;
asserts.js
const assert = require('assert') function assertErrorMessage(...args) { const report = args[0] let index let message if (args.length === 3) { index = args[1] message = args[2] } else { index = 0 message = args[1] } const errorMessage = `Report should contain message with text "${message}" at ${index} pos. ${printReport( report )}` assert.ok(report.messages[index].message.includes(message), errorMessage) } function assertNoErrors(report) { assert.equal(report.errorCount, 0, `Report must not contain errors. ${printReport(report)}`) } function assertNoWarnings(report) { assert.equal(report.warningCount, 0, `Report must not contain warnings. ${printReport(report)}`) } function assertErrorCount(report, count) { assert.equal( report.errorCount, count, `Report must contains ${count} errors. ${printReport(report)}` ) } function assertWarnsCount(report, count) { assert.equal( report.warningCount, count, `Report must contains ${count} warnings. ${printReport(report)}` ) } function assertLineNumber(report, line) { assert.equal(report.line, line, `Report must be in line ${line}.`) } function printReport(report) { const messages = report.messages.map((i, index) => `${index + 1}. ${i.message}`) return ['Errors / Warnings:', ...messages, ''].join('\n' + ' '.repeat(8)) } module.exports = { assertErrorMessage, assertNoWarnings, assertNoErrors,
assertLineNumber }
assertErrorCount, assertWarnsCount,
statuses.js
var Joi = require('joi'); var Hoek = require('hoek'); var AuthPlugin = require('../auth'); exports.register = function (server, options, next) { options = Hoek.applyToDefaults({ basePath: '' }, options); server.route({ method: 'GET', path: options.basePath + '/statuses', config: { auth: { strategy: 'simple', scope: 'admin' }, validate: { query: { fields: Joi.string(), sort: Joi.string().default('_id'), limit: Joi.number().default(20), page: Joi.number().default(1) } }, pre: [ AuthPlugin.preware.ensureAdminGroup('root') ] }, handler: function (request, reply) { var Status = request.server.plugins['hapi-mongo-models'].Status; var query = {}; var fields = request.query.fields; var sort = request.query.sort; var limit = request.query.limit; var page = request.query.page; Status.pagedFind(query, fields, sort, limit, page, function (err, results) { if (err) { return reply(err); } reply(results); }); } }); server.route({ method: 'GET', path: options.basePath + '/statuses/{id}', config: { auth: { strategy: 'simple', scope: 'admin' }, pre: [ AuthPlugin.preware.ensureAdminGroup('root') ] }, handler: function (request, reply) { var Status = request.server.plugins['hapi-mongo-models'].Status; Status.findById(request.params.id, function (err, status) { if (err) { return reply(err); } if (!status) { return reply({ message: 'Document not found.' }).code(404); } reply(status); }); } }); server.route({ method: 'POST', path: options.basePath + '/statuses', config: { auth: { strategy: 'simple', scope: 'admin' }, validate: { payload: { pivot: Joi.string().required(), name: Joi.string().required() } }, pre: [ AuthPlugin.preware.ensureAdminGroup('root') ] }, handler: function (request, reply) { var Status = request.server.plugins['hapi-mongo-models'].Status; var pivot = request.payload.pivot; var name = request.payload.name; Status.create(pivot, name, function (err, status) { if (err) { return reply(err); } reply(status); }); } }); server.route({ method: 'PUT', path: options.basePath + '/statuses/{id}', config: { auth: { strategy: 'simple', scope: 'admin' }, validate: { payload: { name: Joi.string().required() } }, pre: [ AuthPlugin.preware.ensureAdminGroup('root') ] }, handler: function (request, reply) { var Status = request.server.plugins['hapi-mongo-models'].Status; var id = request.params.id; var update = { $set: { name: request.payload.name } }; Status.findByIdAndUpdate(id, update, function (err, status) { if (err) { return reply(err); } if (!status) { return reply({ message: 'Document not found.' }).code(404); } reply(status); }); } }); server.route({
strategy: 'simple', scope: 'admin' }, pre: [ AuthPlugin.preware.ensureAdminGroup('root') ] }, handler: function (request, reply) { var Status = request.server.plugins['hapi-mongo-models'].Status; Status.findByIdAndDelete(request.params.id, function (err, status) { if (err) { return reply(err); } if (!status) { return reply({ message: 'Document not found.' }).code(404); } reply({ message: 'Success.' }); }); } }); next(); }; exports.register.attributes = { name: 'statuses' };
method: 'DELETE', path: options.basePath + '/statuses/{id}', config: { auth: {
auth.service.ts
import {Injectable} from "@nestjs/common"; import {JwtService} from "@nestjs/jwt";
@Injectable() export class AuthService{ constructor( private jwtService: JwtService ){} async login(){ return {access_token: this.jwtService.sign({content: 'telos_content'})} } }
__init__.py
# This file is MACHINE GENERATED! Do not edit. # Generated by: tensorflow/python/tools/api/generator/create_python_api.py script. """ResNet models for Keras. """ from __future__ import print_function as _print_function import sys as _sys
from tensorflow.python.keras.applications.resnet import ResNet101 from tensorflow.python.keras.applications.resnet import ResNet152 from tensorflow.python.keras.applications.resnet import ResNet50 from tensorflow.python.keras.applications.resnet import decode_predictions from tensorflow.python.keras.applications.resnet import preprocess_input del _print_function
1010.rs
/** * 1010. 다리 놓기 * * 작성자: xCrypt0r * 언어: Rust 2018 * 사용 메모리: 13,028 KB * 소요 시간: 20 ms * 해결 날짜: 2020년 10월 3일 */ #![allow(non_snake_case)] macro_rules! get_line { ( $( $t: ty ),+ ) => { { let mut line = String::new(); std::io::stdin().read_line(&mut line).unwrap(); let mut iter = line.split_whitespace(); ( $( iter.next().unwrap().parse::<$t>().unwrap() ),+ ) } } } fn main() { let T = get_line!(i32); let mut memo = v
ec![vec![0; 31]; 31]; for i in 0..31 { memo[i][i] = 1; memo[i][0] = 1; } for i in 1..31 { for j in 1..31 { memo[i][j] = memo[i - 1][j] + memo[i - 1][j - 1]; } } for _ in 0..T { let (N, M) = get_line!(usize, usize); println!("{}", memo[M][N]); } }
parsers.py
#!/usr/bin/env python # -*- coding: utf-8 -*- ''' Copyright (c) 2013 Qin Xuye <[email protected]> Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Created on 2013-6-8 @author: Chine ''' import time import json import urllib import re from datetime import datetime, timedelta from threading import Lock from cola.core.parsers import Parser from cola.core.utils import urldecode, beautiful_soup from cola.core.errors import DependencyNotInstalledError, FetchBannedError from cola.core.logs import get_logger from login import WeiboLoginFailure from bundle import WeiboUserBundle from storage import DoesNotExist, Q, WeiboUser, Friend,\ MicroBlog, Geo, UserInfo, WorkInfo, EduInfo,\ Comment, Forward, Like, ValidationError from conf import fetch_forward, fetch_comment, fetch_like,fetch_n_comments try: from dateutil.parser import parse except ImportError: raise DependencyNotInstalledError('python-dateutil') TIMEOUT = 30.0 class WeiboParser(Parser): def __init__(self, opener=None, url=None, bundle=None, **kwargs): super(WeiboParser, self).__init__(opener=opener, url=url, **kwargs) self.bundle = bundle self.uid = bundle.label self.opener.set_default_timeout(TIMEOUT) if not hasattr(self, 'logger') or self.logger is None: self.logger = get_logger(name='weibo_parser') def _check_url(self, dest_url, src_url): return dest_url.split('?')[0] == src_url.split('?')[0] def check(self, url, br): dest_url = br.geturl() if not self._check_url(dest_url, url): if dest_url.startswith('http://weibo.com/login.php'): raise WeiboLoginFailure('Weibo not login or login expired') if dest_url.startswith('http://weibo.com/sorry?usernotexists'): self.bundle.exists = False return False return True def get_weibo_user(self): if self.bundle.weibo_user is not None: return self.bundle.weibo_user try: self.bundle.weibo_user = getattr(WeiboUser, 'objects').get(uid=self.uid) except DoesNotExist: self.bundle.weibo_user = WeiboUser(uid=self.uid) self.bundle.weibo_user.save() return self.bundle.weibo_user class MicroBlogParser(WeiboParser):
k() def _strptime(self, string, format_): self.strptime_lock.acquire() try: return datetime.strptime(string, format_) finally: self.strptime_lock.release() def parse_datetime(self, dt_str): dt = None if u'秒' in dt_str: sec = int(dt_str.split(u'秒', 1)[0].strip()) dt = datetime.now() - timedelta(seconds=sec) elif u'分钟' in dt_str: sec = int(dt_str.split(u'分钟', 1)[0].strip()) * 60 dt = datetime.now() - timedelta(seconds=sec) elif u'今天' in dt_str: dt_str = dt_str.replace(u'今天', datetime.now().strftime('%Y-%m-%d')) dt = self._strptime(dt_str, '%Y-%m-%d %H:%M') elif u'月' in dt_str and u'日' in dt_str: this_year = datetime.now().year date_str = '%s %s' % (this_year, dt_str) if isinstance(date_str, unicode): date_str = date_str.encode('utf-8') dt = self._strptime(date_str, '%Y %m月%d日 %H:%M') else: dt = parse(dt_str) return dt def parse(self, url=None): if self.bundle.exists is False: return url = url or self.url try: br = self.opener.browse_open(url) except Exception as e: print(e) print('休息10分钟!') time.sleep(60*10) try: jsn = json.loads(br.response().read()) except ValueError: print('休息10分钟!') time.sleep(60 * 10) raise FetchBannedError('fetch banned by weibo server') # self.logger.debug('load %s finish' % url) try: soup = beautiful_soup(jsn['data']['html']) current_page = jsn['data']['page']['pagenum'] n_pages = jsn['data']['page']['totalpage'] except KeyError: print('休息10分钟!') time.sleep(60 * 10) raise FetchBannedError('fetch banned by weibo server') if not self.check(url, br): return decodes = urldecode(url) mid = decodes.get('id', decodes.get('mid')) mblog = self.bundle.current_mblog if mblog is None or mblog.mid != mid: try: mblog = getattr(MicroBlog, 'objects').get(Q(mid=mid)&Q(uid=self.uid)) except DoesNotExist: mblog = MicroBlog(mid=mid, uid=self.uid) mblog.save() def set_instance(instance, dl): instance.avatar = dl.find('dt').find('img')['src'] date = dl.find('dd').find(attrs={'class': 'S_txt2'}).text date = date.strip().strip('(').strip(')') instance.created = self.parse_datetime(date) for div in dl.find_all('div'): div.extract() for span in dl.find_all('span'): span.extract() instance.content = dl.text.strip() counter_type = None #print(u'微博:'+mblog.content+u'的评论') if url.startswith('http://weibo.com/aj/comment'): counter_type = 'comment' dls = soup.find_all('dl', mid=True) for dl in dls: uid = dl.find('a', usercard=True)['usercard'].split("id=", 1)[1] comment = Comment(uid=uid) set_instance(comment, dl) #print(u'微博评论:'+comment.content) mblog.comments.append(comment) elif url.startswith('http://weibo.com/aj/mblog/info'): counter_type = 'forward' dls = soup.find_all('dl', mid=True) for dl in dls: forward_again_a = dl.find('a', attrs={'action-type': re.compile("^(feed_list|fl)_forward$")}) uid = urldecode('?%s' % forward_again_a['action-data'])['uid'] forward = Forward(uid=uid, mid=dl['mid']) set_instance(forward, dl) mblog.forwards.append(forward) elif url.startswith('http://weibo.com/aj/like'): counter_type = 'like' lis = soup.find_all('li', uid=True) for li in lis: like = Like(uid=li['uid']) like.avatar = li.find('img')['src'] mblog.likes.append(like) mblog.save() # self.logger.debug('parse %s finish' % url) # counter add one for the processed forward or comment or like list url if counter_type is not None: self.counter.inc('processed_%s_list_page' % counter_type, 1) if current_page >= n_pages: return params = urldecode(url) new_params = urldecode('?page=%s'%(current_page+1)) params.update(new_params) params['__rnd'] = int(time.time()*1000) next_page = '%s?%s' % (url.split('?')[0] , urllib.urlencode(params)) yield next_page class UserInfoParser(WeiboParser): def parse(self, url=None): if self.bundle.exists is False: return url = url or self.url try: br = self.opener.browse_open(url) except Exception as e: print(e) print('休息10分钟!') time.sleep(60*10) # self.logger.debug('load %s finish' % url) soup = beautiful_soup(br.response().read()) if not self.check(url, br): return weibo_user = self.get_weibo_user() info = weibo_user.info if info is None: weibo_user.info = UserInfo() new_style = False profile_div = None career_div = None edu_div = None tags_div = None for script in soup.find_all('script'): text = script.text if text.startswith('FM.view'): text = text.strip().replace(';', '').replace('FM.view(', '')[:-1] data = json.loads(text) domid = data['domid'] if domid.startswith('Pl_Official_LeftInfo__'): info_soup = beautiful_soup(data['html']) info_div = info_soup.find('div', attrs={'class': 'profile_pinfo'}) for block_div in info_div.find_all('div', attrs={'class': 'infoblock'}): block_title = block_div.find('form').text.strip() if block_title == u'基本信息': profile_div = block_div elif block_title == u'工作信息': career_div = block_div elif block_title == u'教育信息': edu_div = block_div elif block_title == u'标签信息': tags_div = block_div elif domid.startswith('Pl_Official_PersonalInfo__'): new_style = True info_soup = beautiful_soup(data['html']) for block_div in info_soup.find_all('div', attrs={'class': 'WB_cardwrap'}): block_title_div = block_div.find('h4', attrs={'class': 'obj_name'}) if block_title_div is None: block_title_div = block_div.find('div', attrs={'class': 'obj_name'})\ .find('h2') if block_title_div is None: continue block_title = block_title_div.text.strip() inner_div = block_div.find('div', attrs={'class': 'WB_innerwrap'}) if block_title == u'基本信息': profile_div = inner_div elif block_title == u'工作信息': career_div = inner_div elif block_title == u'教育信息': edu_div = inner_div elif block_title == u'标签信息': tags_div = inner_div elif domid == 'Pl_Official_Header__1': header_soup = beautiful_soup(data['html']) weibo_user.info.avatar = header_soup.find('div', attrs={'class': 'pf_head_pic'})\ .find('img')['src'] weibo_user.info.n_follows = int(header_soup.find('ul', attrs={'class': 'user_atten'})\ .find('strong', attrs={'node-type': 'follow'}).text) weibo_user.info.n_fans = int(header_soup.find('ul', attrs={'class': 'user_atten'})\ .find('strong', attrs={'node-type': 'fans'}).text) elif domid.startswith('Pl_Core_T8CustomTriColumn__'): # new style friends info header_soup = beautiful_soup(data['html']) tds = header_soup.find('table', attrs={'class': 'tb_counter'})\ .find_all('td') weibo_user.info.n_follows = int(tds[0].find('strong').text) weibo_user.info.n_fans = int(tds[1].find('strong').text) elif domid.startswith('Pl_Official_Headerv6__'): # new style avatar info header_soup = beautiful_soup(data['html']) weibo_user.info.avatar = header_soup.find('p', attrs='photo_wrap')\ .find('img')['src'] elif 'STK' in text: text = text.replace('STK && STK.pageletM && STK.pageletM.view(', '')[:-1] data = json.loads(text) pid = data['pid'] if pid == 'pl_profile_infoBase': profile_div = beautiful_soup(data['html']) elif pid == 'pl_profile_infoCareer': career_div = beautiful_soup(data['html']) elif pid == 'pl_profile_infoEdu': edu_div = beautiful_soup(data['html']) elif pid == 'pl_profile_infoTag': tags_div = beautiful_soup(data['html']) elif pid == 'pl_profile_photo': soup = beautiful_soup(data['html']) weibo_user.info.avatar = soup.find('img')['src'] profile_map = { u'昵称': {'field': 'nickname'}, u'所在地': {'field': 'location'}, u'性别': {'field': 'sex', 'func': lambda s: True if s == u'男' else False}, u'生日': {'field': 'birth'}, u'博客': {'field': 'blog'}, u'个性域名': {'field': 'site'}, u'简介': {'field': 'intro'}, u'邮箱': {'field': 'email'}, u'QQ': {'field': 'qq'}, u'MSN': {'field': 'msn'} } if profile_div is not None: if not new_style: divs = profile_div.find_all(attrs={'class': 'pf_item'}) else: divs = profile_div.find_all('li', attrs={'class': 'li_1'}) for div in divs: if not new_style: k = div.find(attrs={'class': 'label'}).text.strip() v = div.find(attrs={'class': 'con'}).text.strip() else: k = div.find('span', attrs={'class': 'pt_title'}).text.strip().strip(u':') d = div.find('span', attrs={'class': 'pt_detail'}) if d: v = d.text.strip() else: v = div.find('a').text.strip() if k in profile_map: if k == u'个性域名' and '|' in v: v = v.split('|')[1].strip() func = (lambda s: s) \ if 'func' not in profile_map[k] \ else profile_map[k]['func'] v = func(v) setattr(weibo_user.info, profile_map[k]['field'], v) weibo_user.info.work = [] if career_div is not None: if not new_style: for div in career_div.find_all(attrs={'class': 'con'}): work_info = WorkInfo() ps = div.find_all('p') for p in ps: a = p.find('a') if a is not None: work_info.name = a.text text = p.text if '(' in text: work_info.date = text.strip().split('(')[1].strip(')') else: text = p.text if text.startswith(u'地区:'): work_info.location = text.split(u':', 1)[1] elif text.startswith(u'职位:'): work_info.position = text.split(u':', 1)[1] else: work_info.detail = text weibo_user.info.work.append(work_info) else: li = career_div.find('li', attrs={'class': 'li_1'}) for span in li.find_all('span', attrs={'class': 'pt_detail'}): work_info = WorkInfo() text = span.text a = span.find('a') if a is not None: work_info.name = a.text if '(' in text: work_info.date = text.strip().split('(')[1]\ .replace('\r', '')\ .replace('\n', '')\ .replace('\t', '')\ .split(')', 1)[0] for l in text.split('\r\n'): l = l.strip() if len(l) == 0: continue if l.startswith(u'地区:'): work_info.location = l.split(u':', 1)[1] elif l.startswith(u'职位:'): work_info.position = l.split(u':', 1)[1] else: work_info.detail = text.replace('\r', '')\ .replace('\n', '')\ .replace('\t', '')\ .strip() weibo_user.info.work.append(work_info) weibo_user.info.edu = [] if edu_div is not None: if not new_style: for div in edu_div.find_all(attrs={'class': 'con'}): edu_info = EduInfo() ps = div.find_all('p') for p in ps: a = p.find('a') text = p.text if a is not None: edu_info.name = a.text if '(' in text: edu_info.date = text.strip().split('(')[1].strip().strip(')') else: edu_info.detail = text weibo_user.info.edu.append(edu_info) else: span = edu_div.find('li', attrs={'class': 'li_1'})\ .find('span', attrs={'class': 'pt_detail'}) text = span.text names = [] for a in span.find_all('a'): names.append(a.text) for idx, name in enumerate(names): start_pos = text.find(name) + len(name) if idx < len(names) - 1: end_pos = text.find(names[idx+1], start_pos) else: end_pos = len(text) t = text[start_pos: end_pos] edu_info = EduInfo() edu_info.name = name if '(' in text: edu_info.date = t.strip().split('(')[1]\ .replace('\r', '')\ .replace('\n', '')\ .replace('\t', '')\ .split(')', 1)[0] t = t[t.find(')')+1:] text = text[end_pos:] edu_info.detail = t.replace('\r', '').replace('\n', '')\ .replace('\t', '').strip() weibo_user.info.edu.append(edu_info) weibo_user.info.tags = [] if tags_div is not None: if not new_style: for div in tags_div.find_all(attrs={'class': 'con'}): for a in div.find_all('a'): weibo_user.info.tags.append(a.text) else: for a in tags_div.find('span', attrs={'class': 'pt_detail'}).find_all('a'): weibo_user.info.tags.append(a.text.strip()) weibo_user.save() # self.logger.debug('parse %s finish' % url) # counter add one for the profile url self.counter.inc('processed_profile_page', 1) class UserFriendParser(WeiboParser): def parse(self, url=None): if self.bundle.exists is False: return url = url or self.url try: br = self.opener.browse_open(url) except Exception as e: print(e) print('休息10分钟!') time.sleep(60*10) # self.logger.debug('load %s finish' % url) soup = beautiful_soup(br.response().read()) if not self.check(url, br): return weibo_user = self.get_weibo_user() html = None decodes = urldecode(url) is_follow = True is_new_mode = False is_banned = True for script in soup.find_all('script'): text = script.text if text.startswith('FM.view'): if is_banned: is_banned = False text = text.strip().replace(';', '').replace('FM.view(', '')[:-1] data = json.loads(text) domid = data['domid'] if domid.startswith('Pl_Official_LeftHisRelation__') or \ domid.startswith('Pl_Official_HisRelation__'): html = beautiful_soup(data['html']) if 'relate' in decodes and decodes['relate'] == 'fans': is_follow = False is_new_mode = True elif 'STK' in text: if is_banned: is_banned = False text = text.replace('STK && STK.pageletM && STK.pageletM.view(', '')[:-1] data = json.loads(text) if data['pid'] == 'pl_relation_hisFollow' or \ data['pid'] == 'pl_relation_hisFans': html = beautiful_soup(data['html']) if data['pid'] == 'pl_relation_hisFans': is_follow = False if is_banned: print('休息10分钟!') time.sleep(60 * 10) raise FetchBannedError('fetch banned by weibo server') ul = None try: ul = html.find(attrs={'class': 'cnfList', 'node-type': 'userListBox'}) if ul is None: ul = html.find(attrs={'class': 'follow_list', 'node-type': 'userListBox'}) except AttributeError, e: print('休息10分钟!') time.sleep(60 * 10) if br.geturl().startswith('http://e.weibo.com'): return raise e if ul is None: if is_follow is True: if is_new_mode: yield 'http://weibo.com/%s/follow?relate=fans' % self.uid else: yield 'http://weibo.com/%s/fans' % self.uid return current_page = decodes.get('page', 1) if current_page == 1: if is_follow: weibo_user.follows = [] else: weibo_user.fans = [] for cls in ('S_line1', 'S_line2'): for li in ul.find_all(attrs={'class': cls, 'action-type': 'itemClick'}): data = dict([l.split('=') for l in li['action-data'].split('&')]) friend = Friend() friend.uid = data['uid'] friend.nickname = data['fnick'] friend.sex = True if data['sex'] == u'm' else False yield WeiboUserBundle(str(friend.uid)) if is_follow: weibo_user.follows.append(friend) else: weibo_user.fans.append(friend) weibo_user.save() # self.logger.debug('parse %s finish' % url) # counter add one for the friend url counter_type = 'follows' if is_follow else 'fans' self.counter.inc('processed_%s_list_page' % counter_type, 1) pages = html.find('div', attrs={'class': 'W_pages', 'node-type': 'pageList'}) if pages is None: pages = html.find('div', attrs={'class': 'WB_cardpage', 'node-type': 'pageList'}) if pages is not None: a = pages.find_all('a') if len(a) > 0: next_ = a[-1] if next_['class'] == ['W_btn_c'] or 'next' in next_['class']: decodes['page'] = int(decodes.get('page', 1)) + 1 query_str = urllib.urlencode(decodes) url = '%s?%s' % (url.split('?')[0], query_str) yield url return if is_follow is True: if is_new_mode: yield 'http://weibo.com/%s/follow?relate=fans' % self.uid else: yield 'http://weibo.com/%s/fans' % self.uid
def parse(self, url=None): if self.bundle.exists is False: return url = url or self.url params = urldecode(url) try: br = self.opener.browse_open(url) except Exception as e: print(e) print('休息10分钟!') time.sleep(60*10)# self.logger.debug('load %s finish' % url) if not self.check(url, br): return weibo_user = self.get_weibo_user() params['_t'] = 0 params['__rnd'] = str(int(time.time() * 1000)) page = int(params.get('page', 1)) pre_page = int(params.get('pre_page', 0)) count = 15 if 'pagebar' not in params: params['pagebar'] = '0' pre_page += 1 elif params['pagebar'] == '0': params['pagebar'] = '1' elif params['pagebar'] == '1': del params['pagebar'] pre_page = page page += 1 count = 50 params['count'] = count params['page'] = page params['pre_page'] = pre_page try: data = json.loads(br.response().read())['data'] except Exception as e: print(e) print('休息10分钟!') time.sleep(60 * 10) # self.logger.debug('load %s finish' % url) soup = beautiful_soup(data) finished = False divs = soup.find_all('div', attrs={'class': 'WB_feed_type'}, mid=True) max_id = None for div in divs: mid = div['mid'] if len(mid) == 0: continue max_id = mid if 'end_id' not in params: params['end_id'] = mid if mid in weibo_user.newest_mids: finished = True break if len(self.bundle.newest_mids) < 3: self.bundle.newest_mids.append(mid) try: mblog = getattr(MicroBlog, 'objects').get(Q(mid=mid)&Q(uid=self.uid)) continue #认为已经爬过了 except DoesNotExist: mblog = MicroBlog(mid=mid, uid=self.uid) content_div = div.find('div', attrs={ 'class': 'WB_text', 'node-type': 'feed_list_content' }) for img in content_div.find_all("img", attrs={'type': 'face'}): img.replace_with(img['title']); mblog.content = content_div.text #print(u'微博内容:'+mblog.content) is_forward = div.get('isforward') == '1' if is_forward: mblog.omid = div['omid'] name_a = div.find('a', attrs={ 'class': 'WB_name', 'node-type': 'feed_list_originNick' }) text_a = div.find('div', attrs={ 'class': 'WB_text', 'node-type': 'feed_list_reason' }) if name_a is not None and text_a is not None: mblog.forward = '%s: %s' % ( name_a.text, text_a.text ) mblog.created = parse(div.select('a.S_link2.WB_time')[0]['title']) if self.bundle.last_update is None or mblog.created > self.bundle.last_update: self.bundle.last_update = mblog.created if weibo_user.last_update is not None and \ mblog.created <= weibo_user.last_update: finished = True break func_div = div.find_all('div', 'WB_func')[-1] action_type_re = lambda t: re.compile("^(feed_list|fl)_%s$" % t) likes = func_div.find('a', attrs={'action-type': action_type_re("like")}).text likes = likes.strip('(').strip(')') likes = 0 if len(likes) == 0 else int(likes) mblog.n_likes = likes forwards = func_div.find('a', attrs={'action-type': action_type_re("forward")}).text if '(' not in forwards: mblog.n_forwards = 0 else: mblog.n_forwards = int(forwards.strip().split('(', 1)[1].strip(')')) comments = func_div.find('a', attrs={'action-type': action_type_re('comment')}).text if '(' not in comments: mblog.n_comments = 0 else: mblog.n_comments = int(comments.strip().split('(', 1)[1].strip(')')) # fetch geo info map_info = div.find("div", attrs={'class': 'map_data'}) if map_info is not None: geo = Geo() geo.location = map_info.text.split('-')[0].strip() geo_info = urldecode("?"+map_info.find('a')['action-data'])['geo'] geo.longtitude, geo.latitude = tuple([float(itm) for itm in geo_info.split(',', 1)]) mblog.geo = geo # fetch forwards and comments if fetch_forward or fetch_comment or fetch_like: query = {'id': mid, '_t': 0, '__rnd': int(time.time()*1000)} query_str = urllib.urlencode(query) if fetch_forward and mblog.n_forwards > 0: forward_url = 'http://weibo.com/aj/mblog/info/big?%s' % query_str yield forward_url if fetch_comment and mblog.n_comments >fetch_n_comments :#只抓取评论数多于规定条数的微博 comment_url = 'http://weibo.com/aj/comment/big?%s' % query_str yield comment_url if fetch_like and mblog.n_likes > 0: query = {'mid': mid, '_t': 0, '__rnd': int(time.time()*1000)} query_str = urllib.urlencode(query) like_url = 'http://weibo.com/aj/like/big?%s' % query_str yield like_url mblog.save() if 'pagebar' in params: params['max_id'] = max_id else: del params['max_id'] # self.logger.debug('parse %s finish' % url) # counter add one for the processed weibo list url self.counter.inc('processed_weibo_list_page', 1) # if not has next page if len(divs) == 0 or finished: weibo_user = self.get_weibo_user() for mid in self.bundle.newest_mids: if mid not in weibo_user.newest_mids: weibo_user.newest_mids.append(mid) while len(weibo_user.newest_mids) > 3: weibo_user.newest_mids.pop() weibo_user.last_update = self.bundle.last_update weibo_user.save() return yield '%s?%s'%(url.split('?')[0], urllib.urlencode(params)) class ForwardCommentLikeParser(WeiboParser): strptime_lock = Loc
stepslist.component.ts
import { Component, Input, Output, OnChanges, SimpleChanges, EventEmitter } from '@angular/core'; import * as _ from 'lodash'; import EditorConfig from 'src/app/@models/editorconfig.model'; import WorkflowHelper from '../../@services/workflowhelper.service'; import { ModalYamlPreviewComponent } from '../../@modals/modal-yaml-preview/modal-yaml-preview.component'; import { NgbModal } from '@ng-bootstrap/ng-bootstrap'; @Component({ selector: 'steps-list', templateUrl: 'stepslist.html', }) export class StepsListComponent implements OnChanges { @Input() steps: any[]; @Input() selectedStep: string; // @Output() public select: EventEmitter<any> = new EventEmitter(); displayDetails: { [key: string]: boolean } = {}; filter: any = { tags: [] }; editorConfigPayload: EditorConfig = { readonly: true, maxLines: 10, }; editorConfigError: EditorConfig = { readonly: true, maxLines: 10, }; editorConfigChildren: EditorConfig = { readonly: true, maxLines: 20, }; filteredStepNames: []; states: any = null; JSON = JSON; presentStates: string[] = []; constructor(private modalService: NgbModal) { this.states = WorkflowHelper.getMapStates(); } ngOnChanges(changes: SimpleChanges) { if (changes.steps && this.steps) { this.filterSteps(); this.setPresentStates(); } else if (changes.selectedStep) { _.remove(this.filter.tags, (tag: string) => { return tag.startsWith('Step:');
}); if (this.selectedStep) { this.displayDetails[this.selectedStep] = true; this.filter.tags.push(`Step:${this.selectedStep}`); this.filterSteps(); } this.filterSteps(); } } previewStepDetails(step: any) { const previewModal = this.modalService.open(ModalYamlPreviewComponent, { size: 'xl' }); previewModal.componentInstance.value = step; previewModal.componentInstance.title = `Step - ${step.name}`; previewModal.result.catch((err) => { console.log(err); }); } setPresentStates() { this.presentStates = []; Object.keys(this.steps).forEach((key: string) => { this.presentStates.push(`State:${this.steps[key].state}`); }); this.presentStates = _.uniq(this.presentStates); } getIcon(state: string) { return WorkflowHelper.getState(state).icon; } filterSteps() { const statuses = []; const words = []; let step = ''; this.filter.tags.forEach((s: string) => { if (s.startsWith('State:')) { statuses.push(s.split(':')[1]); } else if (s.startsWith('Step:')) { step = s.split(':')[1]; } else { words.push(s); } }); this.filteredStepNames = _.compact(_.map(this.steps, (i: any, k: string) => { if (!this.filter.tags.length) { return k; } let isValid = true; if (statuses.length && statuses.indexOf(i.state) === -1) { isValid = false; } if (step && step !== k) { isValid = false; } words.forEach((w: string) => { if (k.toLowerCase().indexOf(w.toLowerCase()) === -1) { isValid = false; } }); if (isValid) { return k; } return null; })); } }
coronatest_analyze_csv.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """Analyze CSV file into scores. Created on Sat Feb 12 22:15:29 2022 // @hk_nien """ from pathlib import Path import os import re import sys import pandas as pd import numpy as np PCODES = dict([ # Regio Noord (1011, 'Amsterdam'), (1625, 'Hoorn|Zwaag'), (1811, 'Alkmaar'), (7471, 'Goor'), (7556, 'Hengelo'), (7903, 'Hoogeveen'), (7942, 'Meppel'), (8011, 'Zwolle'), (8232, 'Lelystad'), (8442, 'Heerenveen'), (8911, 'Leeuwarden'), (9291, 'Kollum'), (9501, 'Stadskanaal'), (9726, 'Groningen'), # Regio Midden (2406, 'Alphen a/d Rijn'), (2515, 'Den Haag'), (3013, 'Rotterdam'), (3511, 'Utrecht'), (3901, 'Veenendaal'), ((7137, 7131), 'Lichtenvoorde|Groenlo'), (7311, 'Apeldoorn'), # Regio Zuid (4325, 'Renesse'), (4462, 'Goes'), (4701, 'Roosendaal'), (5038, 'Tilburg'), (5401, 'Uden'), (5611, 'Eindhoven'), (5801, 'Oostrum'), (6101, 'Echt'), (6229, 'Maastricht'), (6541, 'Nijmegen'), ]) def get_bad_scan_times():
def _mean_time(ts_list): """Return mean timestamp value from list of timestamps.""" ts0 = ts_list[0] delta_sum = pd.Timedelta(0) for ts in ts_list: delta_sum += (ts -ts0) ts_mean = ts0 + delta_sum / len(ts_list) return ts_mean def _delta_time_hhmm(hm): """Convert 'hh:mm' string to TimeDelta.""" return pd.Timedelta(f'{hm}:00') def _summary_to_scores(summary): """Convert summary from _read_log to scores dict and effective timestamp. Parameters: - summary: dict with int(pc4) -> [(query_time, appt_time), ...] Return: - scores dict: int(pc4) -> score (int or float or '?') - timestamp: middle query timestamp of this run. """ # Convert to number codes. scores = {k: '?' for k in PCODES} multi_pcs = {} # pc4 -> (pc4[0], pc4[1], ...) for pc in PCODES: if isinstance(pc, tuple): for pc1 in pc: multi_pcs[pc1] = pc qtms = [] dhm = _delta_time_hhmm for pc4, vlist in summary.items(): pc4 = int(pc4) if pc4 not in scores: if pc4 in multi_pcs: pc4_key = multi_pcs[pc4] else: print(f'{pc4} not in list...') continue else: pc4_key = pc4 if len(vlist) == 0: scores[pc4_key] = 7 continue qtm = _mean_time([v[0] for v in vlist]) # query time qtms.append(qtm) atm = min(v[1] for v in vlist) # earliest appointment time qtm_00 = pd.Timestamp(qtm.strftime('%Y-%m-%dT00:00')) thresholds = [ (3, qtm_00 + dhm('23:59')), (4, qtm + dhm('24:00')), (5, qtm_00 + dhm('48:00')), (6, qtm + dhm('48:00')), (6.3, qtm_00 + dhm('72:00')), (6.7, qtm + dhm('72:00')), (7, atm) ] if qtm.hour < 9: thresholds.insert(0, (1, qtm_00 + dhm('13:00'))) elif qtm.hour < 13: thresholds.insert(0, (1, qtm + dhm('4:00'))) elif qtm.hour < 17: thresholds.insert(0, (1, qtm_00 + dhm('24:00'))) thresholds.insert(1, (2, qtm + dhm('20:00'))) else: thresholds.insert(0, (1, qtm_00 + dhm('24:00'))) thresholds.insert(1, (2, qtm_00 + dhm('37:00'))) for s, tm in thresholds: if atm < tm: scores[pc4_key] = s break if len(qtms) == 0: qtm_mid = pd.Timestamp(None) else: qtm_min = min(qtms) qtm_mid = qtm_min + (max(qtms) - qtm_min)/2 return scores, qtm_mid def _get_min_wait(summary): """Return minimum and median wait Timedelta between scan time and appointment. summary is dict of pc4 -> list of timestamps No data -> 999 h. For the median, NaT is counted as infinite. """ wtimes = [] for _, vlist in summary.items(): wtimes_this = [atm - qtm for qtm, atm in vlist] wtimes.append( min(wtimes_this) if wtimes_this else pd.Timedelta(99, 'h') ) minwait = min(wtimes) if wtimes else 999 medwait = pd.Timedelta(np.median(wtimes)) return minwait, medwait def load_csv(csv_fname): """Return DataFrame and list of start times (+1).""" df = pd.read_csv(csv_fname, comment='#') df['req_pc4'] = df['req_pc4'].astype(int) for c in df.columns: if c.endswith('_time') or c.endswith('_date'): df[c] = pd.to_datetime(df[c]) else: df.loc[df[c].isna(), c] = None # start_tms: list of scan start times (plus one extra at the end) start_tms = df.loc[df['scan_time'].diff() > pd.Timedelta('10 min'), 'scan_time'] start_tms = [df.iloc[0]['scan_time']] + list(start_tms) start_tms += [df.iloc[-1]['scan_time'] + pd.Timedelta('1 min')] return df, start_tms def load_multi_csvs(csv_fnames): """Return DataFrame and list of start times (+1)""" dfs = [] start_tms = [] for f in csv_fnames: df, st = load_csv(f) dfs.append(df) start_tms.extend(st[:-1]) df = pd.concat(dfs).reset_index() start_tms.append(df.iloc[-1]['scan_time'] + pd.Timedelta('1 min')) return df, start_tms def get_scan_scores(df, tm_range): """Get scan scores as pc4 -> score dict. Parameters: - df: DataFrame with scan_time, req_date, req_pc4, opt0_short_addr, opt0_time, opt0_loc_id, etc. - tm_range: (tm_start, tm_stop) timestamps. Return: - tstamp: timestamp of the scan (mid-point) - scores: dict of pc4->score - min_wait: Timedelta of minimum wait time from scan to appointment """ mask = (df['scan_time'] >= tm_range[0]) & (df['scan_time'] < tm_range[1]) df1 = df.loc[mask] summary = {} for pc4, city_re in PCODES.items(): pc4_tup = (pc4,) if isinstance(pc4, int) else pc4 options = [] req_pc4 = None for _, row in df1.loc[df1['req_pc4'].isin(pc4_tup)].iterrows(): req_pc4 = int(row['req_pc4']) for i in range(3): addr = row[f'opt{i}_short_addr'] if addr and re.match(f'{city_re}$', addr[5:]): options.append((row['scan_time'], row[f'opt{i}_time'])) if req_pc4 is not None: summary[req_pc4] = options scores, tstamp = _summary_to_scores(summary) if pd.isna(tstamp): tstamp = df1.iloc[len(df1)//2]['scan_time'] minwait, medwait = _get_min_wait(summary) if medwait == 999: medwait = pd.Timedelta(None) return tstamp, scores, minwait, medwait def get_scan_scores_df(df, tm_ranges, decimal_comma=True): """Get scan scores as dataframe, from csv dataframe. Blacklisted scan times are dropped. Parameters: - df: DataFrame with scan_time, req_date, req_pc4, opt0_short_addr, opt0_time, opt0_loc_id, etc. - tm_ranges: list of timestamps (+one at the end) with boundaries of timestamp ranges. - decimal_comma: True to have string values 6,3 rather than float 6.3. Return: - Dataframe with scores, date_str, time_str, pc4, min_wait, med_wait as columns. """ n = len(tm_ranges) records = [] index = [] minwait_hs = [] medwait_hs = [] bad_stimes = get_bad_scan_times() for i in range(n-1): tm_ra = tm_ranges[i:i+2] is_ok = True for tm in bad_stimes: if tm_ra[0] <= tm < tm_ra[1]: is_ok = False break if not is_ok: print(f'Dropped scan at {tm_ra[0].strftime("%Y-%m-%d %H:%M")}') continue tm, scores, minwait, medwait = get_scan_scores(df, tm_ra) records.append(scores) index.append(tm) minwait_hs.append(minwait.total_seconds() / 3600) medwait_hs.append(medwait.total_seconds() / 3600) dates = [t.strftime('%Y-%m-%d') for t in index] times = [t.strftime('%H:%M') for t in index] sdf = pd.DataFrame.from_records(records) sdf.insert(0, 'Time', times) sdf.insert(0, 'Date', dates) sdf['min_wait_h'] = np.around(minwait_hs, 2) sdf['med_wait_h'] = np.around(medwait_hs, 2) sdf.loc[sdf['min_wait_h'].isna(), 'min_wait_h'] = 999 sdf.columns = [ ('/'.join([str(x) for x in c]) if isinstance(c, tuple) else c) for c in sdf.columns ] if decimal_comma: for c in sdf.columns[2:]: sdf[c] = sdf[c].astype(str) sdf[c] = sdf[c].str.replace('.', ',', regex=False) sdf[c] = sdf[c].str.replace(',0$', '', regex=False) sdf[c] = sdf[c].str.replace('?', '', regex=False) return sdf if __name__ == '__main__': in_spyder = ('SPYDER_ARGS' in os.environ) csv_fnames = sorted(Path('data-ggd').glob('ggd_scan-????-W??.csv')) do_all = ('--all' in sys.argv) do_all = do_all or in_spyder and input('(A)ll or latest?').lower() == 'a' if do_all: df, start_tms = load_multi_csvs(csv_fnames) sdf = get_scan_scores_df(df, start_tms).iloc[::-1] else: df, start_tms = load_csv(csv_fnames[-1]) sdf = get_scan_scores_df(df, start_tms[-2:]) print(sdf) if len(sdf) > 1: sdf.to_clipboard(index=False) print('Copied to clipboard including headers') elif len(sdf) == 1: sdf.iloc[[0], 2:].to_clipboard(header=False, index=False) print('Copied to clipboard, scores only.') else: print('No output.') if not in_spyder: # Note: in Spyder, copy/paste will stall while input is blocked. input('Press Enter to quit and clear clipboard.')
"""Return list of Timestamps with bad scan times, from CSV data.""" df = pd.read_csv('data-ggd/ggd_bad_scans.txt', comment='#') tstamps = pd.to_datetime(df['Timestamp']).to_list() return tstamps
listitemview.js
/** * @license Copyright (c) 2003-2018, CKSource - Frederico Knabben. All rights reserved. * For licensing, see LICENSE.md. */ /** * @module ui/list/listitemview */ import View from '../view'; /** * The list item view class. * * @extends module:ui/view~View */ export default class
extends View { /** * @inheritDoc */ constructor( locale ) { super( locale ); /** * Collection of the child views inside of the list item {@link #element}. * * @readonly * @member {module:ui/viewcollection~ViewCollection} */ this.children = this.createCollection(); this.setTemplate( { tag: 'li', attributes: { class: [ 'ck', 'ck-list__item' ] }, children: this.children } ); } /** * Focuses the list item. */ focus() { this.children.first.focus(); } }
ListItemView
index.js
/** * 公共单例 * @com */ 'use strict'; define(['module'], function (module) { var objApp = Object.create(null, { ipUrl: { value: '', writable: false, configurable: false } }); (function() { this.init = function() { return '加载成功!' }; this.initAjax = function(url, params, type, successFun) { $.ajax({ url: objApp.ipUrl + url, data: params, type: type, cache: true, async: true, dataType: 'json', success: function(data) { if (successFun && typeof(successFun) === 'function') { successFun(data); } } }); }; this.addEvent = function(obj, type, handle) { try { obj.addEventListener(type, handle, false); } catch(e) { try { obj.attachEvent('on' + type, handle); } catch(e) { // obj['on' + type] = handle; } } }; this.parseURL1=function (parm) { var addr={}; var query = window.location.href.split("?"); if (query.length > 1) { var buf = query[1].split("&"); for (var i = 0; i < buf.length; i++) { var tmp = buf[i].split("="); addr[tmp[0]] = tmp[1]; if(parm=addr[tmp[0]]){ return tmp[1]; } } } }, this.parseURL = function(urlParameter) { var _url = window.location.href.split('?')[1]; if (_url !== undefined) { var _index; var _arr = _url.split('&'); for (var i = 0, _len = _arr.length; i < _len; i++) { if (_arr[i].indexOf(urlParameter + '=') >= 0) { _index = i; break; } else { _index = -1; } } if (_index >= 0) { var _key = _arr[_index].split('=')[1]; return _key; } } }; this.isArray = Array.isArray || function(obj) { return Object.prototype.toString.call(obj) === '[object Array]'; }; }).apply(objApp); module.exports = objApp; }); /** * jQ公共组件插件开发 * @$.fn.xxx.. **/ (function($, win, doc) { /** * $('#shclFireballs').shCircleLoader(); * <div id="shclFireballs"></div> * @param first * @param second */ $.fn.shCircleLoader = function(first, second) { var defaultNamespace = "shcl", id = 1, sel = $(this); // Destroy the loader if (first === "destroy") { sel.find("." + defaultNamespace).detach(); return; // Show progress status into the center } else if ((first === "progress") && (typeof second !== "undefined")) { sel.each(function() { var el = $(this), outer = el.find('.' + defaultNamespace); if (!outer.get(0)) return; if (!el.find('span').get(0)) outer.append("<span></span>"); var span = outer.find('span').last(); span.html(second).css({ position: "absolute", display: "block", left: Math.round((outer.width() - span.width()) / 2) + "px", top: Math.round((outer.height() - span.height()) / 2) + "px" }); }); return; } // Default options var o = { namespace: defaultNamespace, radius: "auto", // "auto" - calculate from selector's width and height dotsRadius: "auto", color: "auto", // "auto" - get from selector's color CSS property; null - do not set dots: 12, duration: 1, clockwise: true, externalCss: false, // true - don't apply CSS from the script keyframes: '0%{{prefix}transform:scale(1)}80%{{prefix}transform:scale(.3)}100%{{prefix}transform:scale(1)}', uaPrefixes: ['o', 'ms', 'webkit', 'moz', ''] }; $.extend(o, first); // Usable options (for better YUI compression) var cl = o.color, ns = o.namespace, dots = o.dots, eCss = o.externalCss, ua = o.uaPrefixes, // Helper functions no_px = function(str) { return str.replace(/(.*)px$/i, "$1"); }, parseCss = function(text) { var i, prefix, ret = ""; for (i = 0; i < ua.length; i++) { prefix = ua[i].length ? ("-" + ua[i] + "-") : ""; ret += text.replace(/\{prefix\}/g, prefix); } return ret; }, prefixedCss = function(property, value) { var ret = {};
} else { var i, prefix; for (i = 0; i < ua.length; i++) { prefix = ua[i].length ? ("-" + ua[i] + "-") : ""; ret[prefix + property] = value; } } return ret; }; // Get unexisting ID while ($('#' + ns + id).get(0)) {id++;} // Create animation CSS if (!eCss) { var kf = o.keyframes.replace(/\s+$/, "").replace(/^\s+/, ""); // Test if the first keyframe (0% or "from") has visibility property. If not - add it. if (!/(\;|\{)\s*visibility\s*\:/gi.test(kf)) kf = /^(0+\%|from)\s*\{/i.test(kf) ? kf.replace(/^((0+\%|from)\s*\{)(.*)$/i, "$1visibility:visible;$3") : (/\s+(0+\%|from)\s*\{/i.test(kf) ? kf.replace(/(\s+(0+\%|from)\s*\{)/i, "$1visibility:visible;") : ("0%{visibility:visible}" + kf)); $($('head').get(0) ? 'head' : 'body').append('<style id="' + ns + id + '" type="text/css">' + parseCss('@{prefix}keyframes ' + ns + id + '_bounce{' + kf + '}') + '</style>'); } // Create loader sel.each(function() { var r, dr, i, dot, rad, x, y, delay, offset, css, cssBase = {}, el = $(this), l = el.find('.' + defaultNamespace); // If loader exists, destroy it before creating new one if (l.get(0)) l.shCircleLoader("destroy"); el.html('<div class="' + ns + ((ns != defaultNamespace) ? (" " + defaultNamespace) : "") + '"></div>'); if (eCss) el = el.find('div'); x = el.innerWidth() - no_px(el.css('padding-left')) - no_px(el.css('padding-right')); y = el.innerHeight() - no_px(el.css('padding-top')) - no_px(el.css('padding-bottom')); r = (o.radius == "auto") ? ((x < y) ? (x / 2) : (y / 2)) : o.radius; if (!eCss) { r--; if (o.dotsRadius == "auto") { dr = Math.abs(Math.sin(Math.PI / (1 * dots))) * r; dr = (dr * r) / (dr + r) - 1; } else dr = o.dotsRadius; el = el.find('div'); i = Math.ceil(r * 2); css = { position: "relative", width: i + "px", height: i + "px" }; if (i < x) css.marginLeft = Math.round((x - i) / 2); if (i < y) css.marginTop = Math.round((y - i) / 2); el.css(css); i = Math.ceil(dr * 2) + "px"; cssBase = { position: "absolute", visibility: "hidden", width: i, height: i }; if (cl !== null) cssBase.background = (cl == "auto") ? el.css('color') : cl; $.extend(cssBase, prefixedCss({ 'border-radius': Math.ceil(dr) + "px", 'animation-name': ns + id + "_bounce", 'animation-duration': o.duration + "s", 'animation-iteration-count': "infinite", 'animation-direction': "normal" })); } for (i = 0; i < dots; i++) { el.append("<div></div>"); if (eCss && (typeof dr === "undefined")) dr = (no_px(el.find('div').css('width')) / 2); dot = el.find('div').last(); delay = (o.duration / dots) * i; rad = (2 * Math.PI * i) / dots; offset = r - dr; x = offset * Math.sin(rad); y = offset * Math.cos(rad); if (o.clockwise) y = -y; css = { left: Math.round(x + offset) + "px", top: Math.round(y + offset) + "px" }; if (delay) $.extend(css, prefixedCss('animation-delay', delay + 's')); $.extend(css, cssBase); dot.css(css); } }); }; $.fn.loginAjax = function(options) { return 1; }; $.fn.loginAjax2 = function(options) { var objThat = this, iSok = false; //自定义规则 var defaults = { //验证错误提示信息 tips_success: '', tips_required: '不能为空!', tips_mail: '请输入正确的邮箱!', //匹配正则 reg_mail: /^([a-zA-Z0-9_-])+@([a-zA-Z0-9_-])+((\.[a-zA-Z0-9_-]{2,3}){1,2})$/ //验证E-mail }; if (options) { $.extend(defaults, options); } function _onButton() { iSok = true; $(":text, :password").each(function() { var _validate = $(this).attr("data-check"), _name = $(this).attr("data-news"); if (_validate) { var arr = _validate.split('||'); for (var i = 0, l = arr.length; i < l; i++) { if (!check($(this), arr[i], $(this).val(), _name)) { iSok = false; return false; } else { continue; } } } }); } if (objThat.is('form')) { objThat.submit(function(e) { _onButton(); e.preventDefault(); if (iSok === true) { var arr; var num = 0; var data = { username: '', password: '', captcha: '' }; var data2 = []; $('.tpl-form-input').each(function(item) { arr = $(this).val(); data2[item] = arr; }); $.each(data, function(k, v) { data[k] = data2[num]; num++; }); singleMode.ajaxFun(singleMode.url + 'user/login', data, 'post', function(json) { if(json['ok'] === false) { $('.am-modal-bd').text(json['msg']); $('#my-alert').modal('open'); $('.login-yzm-img').attr('src', singleMode.url + 'user/next?' + Math.random()); } else { var date = new Date(); date.setTime(date.getTime() + (120 * 60 * 1000)); $.cookie('JSESSIONID', json['JSESSIONID'], {expires: date}); $.cookie('username', json.username, {expires: date}); $.cookie('id', json.id, {expires: date}); window.location.href = 'table-list.html?page=1' } }); } }); } var check = function(obj, _match, _val, _name) { switch (_match) { case 'required': return $.trim(_val) !== '' ? showMsg(obj, defaults.tips_success, true) : showMsg(obj, _name + defaults.tips_required, false); // case 'email': // return chk(_val, defaults.reg_mail) ? showMsg(obj, defaults.tips_success, true) : showMsg(obj, defaults.tips_mail, false); default: return true; } }; var chk = function(str, reg) { return reg.test(str); }; var showMsg = function(obj, msg, mark) { if (mark) { } else { $('.am-modal-bd').text(msg); $('#my-alert').modal('open'); } return mark; }; }; $.fn.beatText = function(options) { var defaults = { beatHeight: '2em', upTime: 700, downTime: 700, isAuth:true, isRotate:true }; var options = $.extend(defaults, options); return this.each(function() { var obj = $(this); if (obj.text() !== obj.html()) { return } var text = obj.text(); var newMarkup = ''; for (var i = 0; i <= text.length; i++) { var character = text.slice(i, i + 1); newMarkup += ($.trim(character)) ? '<span class="beat-char">' + character + '</span>' : character } obj.html(newMarkup); if(!options.isAuth){ obj.find('span.beat-char').each(function(index,el) { $(this).mouseover(function() { beatAnimate($(this),options); }) }) }else{ obj.find('span.beat-char:first').animate({ bottom: options.beatHeight }, { queue: false, duration: options.upTime, easing: 'easeOutCubic', complete: function() { $(this).animate({ bottom: 0 }, { queue: false, duration: options.downTime, easing: 'easeOutBounce', complete:function(){ beatAnimate($(this).next(),options); } }) } }); } }) }; $.fn.valid = function(vas, callback) { var form = $(this); var ctrls = form.find('[data-valid-control]'); var isDiy = false; //判断是不是自定义提示版 $.each(vas, function(key, val){ if(vas[key].hasOwnProperty('success')) { return !(isDiy = true) } }) $.each(ctrls, function(index, ele){ var key = $(ele).attr('data-valid-control') $(ele).on('change', function(){ if(!test(ele, key)) $(ele).focus() }) }) form.on('submit', function(ev){ if(form.find('[type="submit"]').disabled()) { ev.preventDefault() } var vResult = true; var isFocus = true; $.each(ctrls, function(index, ele){ var key = $(ele).attr('data-valid-control'); if(!test(ele, key)) { if(isFocus) { $(ele).focus() isFocus = false } vResult = false if(!isDiy) return false } }); if(callback&&callback.constructor===Function) { ev.preventDefault() if(vResult) callback(ev, form) } else { if(!vResult) ev.preventDefault() } }); function test(ele, key) { var va = vas[key] var errDom = isDiy ? null : form.find('[data-valid-error="'+key+'"]') if($(ele).prop('type')=='radio' || $(ele).prop('type')=='checkbox') { //单选框和复选框 return $.inRange(form.find('[data-valid-control="'+key+'"]:checked').length, va.norm) ? fnSuccess($(ele), va, errDom) : fnError($(ele), va, errDom, va.error) } else if(va.norm.context) { //确认密码 return $(ele).val()==va.norm.val()&&$(ele).val().length>0 ? fnSuccess($(ele), va, errDom) : fnError($(ele), va, errDom, va.error) }else { //输入框、文本框、下拉菜单、文件框等 return va.norm.test($(ele).val()) ? fnSuccess($(ele), va, errDom) : fnError($(ele), va, errDom, va.error) } } function fnError(ts, va, errDom, error) { if(isDiy) { va.error(ts) } else { errDom.addClass('active').html(error) } return false } function fnSuccess(ts, va, errDom) { if(isDiy) { va.success(ts) } else { setTimeout(function(){ errDom.removeClass('active').html('') }, 200) } return true } }; function beatAnimate(el,options){ if(options.isRotate){ el.addClass("rotate"); } el.animate({ bottom: options.beatHeight }, { queue: false, duration: options.upTime, easing: 'easeOutCubic', complete: function() { el.removeClass("rotate"); $(this).animate({ bottom: 0 }, { queue: false, duration: options.downTime, easing: 'easeOutBounce', complete:function(){ if(options.isAuth){ var len = el.parent().children().length; var indexNum = el.index(); if(indexNum == (len-1)){ beatAnimate(el.parent().find('span.beat-char:first'),options); }else{ beatAnimate(el.next(),options); } } } }) } }) } })(jQuery, window, document); /* * jQuery Easing v1.3 - http://gsgd.co.uk/sandbox/jquery/easing/ * * Uses the built in easing capabilities added In jQuery 1.1 * to offer multiple easing options * * TERMS OF USE - jQuery Easing * * Open source under the BSD License. * * Copyright 漏 2008 George McGinley Smith * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, this list of * conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list * of conditions and the following disclaimer in the documentation and/or other materials * provided with the distribution. * * Neither the name of the author nor the names of contributors may be used to endorse * or promote products derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE * GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. * */ // t: current time, b: begInnIng value, c: change In value, d: duration jQuery.easing['jswing'] = jQuery.easing['swing']; jQuery.extend( jQuery.easing, { def: 'easeOutQuad', swing: function (x, t, b, c, d) { //alert(jQuery.easing.default); return jQuery.easing[jQuery.easing.def](x, t, b, c, d); }, easeInQuad: function (x, t, b, c, d) { return c*(t/=d)*t + b; }, easeOutQuad: function (x, t, b, c, d) { return -c *(t/=d)*(t-2) + b; }, easeInOutQuad: function (x, t, b, c, d) { if ((t/=d/2) < 1) return c/2*t*t + b; return -c/2 * ((--t)*(t-2) - 1) + b; }, easeInCubic: function (x, t, b, c, d) { return c*(t/=d)*t*t + b; }, easeOutCubic: function (x, t, b, c, d) { return c*((t=t/d-1)*t*t + 1) + b; }, easeInOutCubic: function (x, t, b, c, d) { if ((t/=d/2) < 1) return c/2*t*t*t + b; return c/2*((t-=2)*t*t + 2) + b; }, easeInQuart: function (x, t, b, c, d) { return c*(t/=d)*t*t*t + b; }, easeOutQuart: function (x, t, b, c, d) { return -c * ((t=t/d-1)*t*t*t - 1) + b; }, easeInOutQuart: function (x, t, b, c, d) { if ((t/=d/2) < 1) return c/2*t*t*t*t + b; return -c/2 * ((t-=2)*t*t*t - 2) + b; }, easeInQuint: function (x, t, b, c, d) { return c*(t/=d)*t*t*t*t + b; }, easeOutQuint: function (x, t, b, c, d) { return c*((t=t/d-1)*t*t*t*t + 1) + b; }, easeInOutQuint: function (x, t, b, c, d) { if ((t/=d/2) < 1) return c/2*t*t*t*t*t + b; return c/2*((t-=2)*t*t*t*t + 2) + b; }, easeInSine: function (x, t, b, c, d) { return -c * Math.cos(t/d * (Math.PI/2)) + c + b; }, easeOutSine: function (x, t, b, c, d) { return c * Math.sin(t/d * (Math.PI/2)) + b; }, easeInOutSine: function (x, t, b, c, d) { return -c/2 * (Math.cos(Math.PI*t/d) - 1) + b; }, easeInExpo: function (x, t, b, c, d) { return (t==0) ? b : c * Math.pow(2, 10 * (t/d - 1)) + b; }, easeOutExpo: function (x, t, b, c, d) { return (t==d) ? b+c : c * (-Math.pow(2, -10 * t/d) + 1) + b; }, easeInOutExpo: function (x, t, b, c, d) { if (t==0) return b; if (t==d) return b+c; if ((t/=d/2) < 1) return c/2 * Math.pow(2, 10 * (t - 1)) + b; return c/2 * (-Math.pow(2, -10 * --t) + 2) + b; }, easeInCirc: function (x, t, b, c, d) { return -c * (Math.sqrt(1 - (t/=d)*t) - 1) + b; }, easeOutCirc: function (x, t, b, c, d) { return c * Math.sqrt(1 - (t=t/d-1)*t) + b; }, easeInOutCirc: function (x, t, b, c, d) { if ((t/=d/2) < 1) return -c/2 * (Math.sqrt(1 - t*t) - 1) + b; return c/2 * (Math.sqrt(1 - (t-=2)*t) + 1) + b; }, easeInElastic: function (x, t, b, c, d) { var s=1.70158;var p=0;var a=c; if (t==0) return b; if ((t/=d)==1) return b+c; if (!p) p=d*.3; if (a < Math.abs(c)) { a=c; var s=p/4; } else var s = p/(2*Math.PI) * Math.asin (c/a); return -(a*Math.pow(2,10*(t-=1)) * Math.sin( (t*d-s)*(2*Math.PI)/p )) + b; }, easeOutElastic: function (x, t, b, c, d) { var s=1.70158;var p=0;var a=c; if (t==0) return b; if ((t/=d)==1) return b+c; if (!p) p=d*.3; if (a < Math.abs(c)) { a=c; var s=p/4; } else var s = p/(2*Math.PI) * Math.asin (c/a); return a*Math.pow(2,-10*t) * Math.sin( (t*d-s)*(2*Math.PI)/p ) + c + b; }, easeInOutElastic: function (x, t, b, c, d) { var s=1.70158;var p=0;var a=c; if (t==0) return b; if ((t/=d/2)==2) return b+c; if (!p) p=d*(.3*1.5); if (a < Math.abs(c)) { a=c; var s=p/4; } else var s = p/(2*Math.PI) * Math.asin (c/a); if (t < 1) return -.5*(a*Math.pow(2,10*(t-=1)) * Math.sin( (t*d-s)*(2*Math.PI)/p )) + b; return a*Math.pow(2,-10*(t-=1)) * Math.sin( (t*d-s)*(2*Math.PI)/p )*.5 + c + b; }, easeInBack: function (x, t, b, c, d, s) { if (s == undefined) s = 1.70158; return c*(t/=d)*t*((s+1)*t - s) + b; }, easeOutBack: function (x, t, b, c, d, s) { if (s == undefined) s = 1.70158; return c*((t=t/d-1)*t*((s+1)*t + s) + 1) + b; }, easeInOutBack: function (x, t, b, c, d, s) { if (s == undefined) s = 1.70158; if ((t/=d/2) < 1) return c/2*(t*t*(((s*=(1.525))+1)*t - s)) + b; return c/2*((t-=2)*t*(((s*=(1.525))+1)*t + s) + 2) + b; }, easeInBounce: function (x, t, b, c, d) { return c - jQuery.easing.easeOutBounce (x, d-t, 0, c, d) + b; }, easeOutBounce: function (x, t, b, c, d) { if ((t/=d) < (1/2.75)) { return c*(7.5625*t*t) + b; } else if (t < (2/2.75)) { return c*(7.5625*(t-=(1.5/2.75))*t + .75) + b; } else if (t < (2.5/2.75)) { return c*(7.5625*(t-=(2.25/2.75))*t + .9375) + b; } else { return c*(7.5625*(t-=(2.625/2.75))*t + .984375) + b; } }, easeInOutBounce: function (x, t, b, c, d) { if (t < d/2) return jQuery.easing.easeInBounce (x, t*2, 0, c, d) * .5 + b; return jQuery.easing.easeOutBounce (x, t*2-d, 0, c, d) * .5 + c*.5 + b; } }); /* * * TERMS OF USE - EASING EQUATIONS * * Open source under the BSD License. * * Copyright 漏 2001 Robert Penner * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, this list of * conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list * of conditions and the following disclaimer in the documentation and/or other materials * provided with the distribution. * * Neither the name of the author nor the names of contributors may be used to endorse * or promote products derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE * GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. * */ /** * @desc 常用工具库函数 * @desc 常用正则 */ 'use strict'; var toolClass = { uPattern: /^[a-zA-Z0-9_-]{4,16}$/, //用户名正则,4到16位(字母,数字,下划线,减号) pPattern: /^.*(?=.{6,})(?=.*\d)(?=.*[A-Z])(?=.*[a-z])(?=.*[!@#$%^&*? ]).*$/, //密码强度正则,最少6位,包括至少1个大写字母,1个小写字母,1个数字,1个特殊字符 cP: /^[1-9]\d{5}(18|19|([23]\d))\d{2}((0[1-9])|(10|11|12))(([0-2][1-9])|10|20|30|31)\d{3}[0-9Xx]$/, //身份证号(18位)正则 dP1: /^\d{4}(\-)\d{1,2}\1\d{1,2}$/, //日期正则,简单判定,未做月份及日期的判定 urlP: /^((https?|ftp|file):\/\/)?([\da-z\.-]+)\.([a-z\.]{2,6})([\/\w \.-]*)*\/?$/, //URL正则 ePattern: /^([A-Za-z0-9_\-\.])+\@([A-Za-z0-9_\-\.])+\.([A-Za-z]{2,4})$/, //Email正则 mPattern: /^[1][3][0-9]{9}$/, //手机号正则 cnPattern: /[\u4E00-\u9FA5]/, //包含中文正则 Regex_MondyNum: /^\d+(\.\d{1,2})?$/, //金额,允许两位小数 integerPattern: /^-?\d+$/, //整数 /** * @desc 打乱数组顺序 * @param {Array} arr * @return {Array} */ arrayDisorder: function (arr) { return arr.sort(function () { return Math.random() - 0.5; }); }, /** * @desc 判断两个数组是否相等 * @param {Array} arr1 * @param {Array} arr2 * @return {Boolean} */ arrayEqual: function (arr1, arr2) { if (arr1 === arr2) return true; if (arr1.length !== arr2.length) return false; for (var i = 0; i < arr1.length; ++i) { if (arr1[i] !== arr2[i]) return false; } return true; }, /** * @desc 从数组中随机获取元素 * @param {Array} arr * @return {Number} */ arrayRandom: function (arr) { return arr[Math.floor(Math.random() * arr.length)]; }, /** * @desc 大小写转换 * @param {String} str * @param {Number} type 1:首字母大写 2:首页母小写 3:大小写转换 4:全部大写 5:全部小写 * @return {String} */ changeCase: function (str, type) { function ToggleCase(str) { var itemText = ""; str.split("").forEach(function (item) { if (/^([a-z]+)/.test(item)) { itemText += item.toUpperCase(); } else if (/^([A-Z]+)/.test(item)) { itemText += item.toLowerCase(); } else { itemText += item; } }); return itemText; } switch (type) { case 1: return str.replace(/\b\w+\b/g, function (word) { return word.substring(0, 1).toUpperCase() + word.substring(1).toLowerCase(); }); case 2: return str.replace(/\b\w+\b/g, function (word) { return word.substring(0, 1).toLowerCase() + word.substring(1).toUpperCase(); }); case 3: return ToggleCase(str); case 4: return str.toUpperCase(); case 5: return str.toLowerCase(); default: return str; } } }; /*! modelite.js v0.3.0 | (c) 2015, Kan Kung-Yip. | MIT https://github.com/fxk01/modelite.js */ var slice = [].slice; ! function() { var e, t, n, r, a, i, l, s, u, o; if (!jQuery) throw new Error("First require jQuery!"); return l = function(e, t) { var n, r, a; if (null == e && (e = 10), null == t && (t = !1), "boolean" == typeof e && (r = [8, e], e = r[0], t = r[1]), !t) return Math.floor(Math.random() * e); for (n = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789", "string" == typeof t && (n = t), a = ""; a.length < e;) a += n[l(n.length)]; return a }, n = function(e, t) { var r, a, i; if (e) { if (a = t.indexOf("."), -1 === a) return e[t]; if (r = t.substr(0, a), isNaN(i = parseInt(r)) || (r = i), e[r]) return n(e[r], t.substr(a + 1)) } }, u = function(e, t, n) { var r, a, i, l; return a = t.indexOf("."), -1 === a ? ("undefined" == typeof n ? delete e[t] : e[t] = n, n) : (r = t.substr(0, a), isNaN(i = parseInt(r)) || (r = i), t = t.substr(a + 1), e[r] || (l = t, a = t.indexOf("."), -1 !== a && (l = l.substr(0, a)), e[r] = isNaN(parseInt(l)) ? {} : []), u(e[r], t, n)) }, t = function(e) { var t, n, r, a, i, l, s, u, o, m, c, f; for (t = /\( *(\w+) *\) *(\w+) *\:?([^\(]*)/g; o = t.exec(e.attr("ml-events"));) s = o.slice(1, 4), f = s[0], i = s[1], l = s[2], ("repeat" === f || "each" === f || "insert" === f || "remove" === f) && (f = "ml-" + f), c = e[0].tagName.toLowerCase(), "change" === f && "input" === c && (f = "ml-change"), e.on(f, { type: f, name: i, raw: l }, function() { var e, t, n, r, a, s, u, o, m, c, d, g, p, h, b, v, y; if (t = arguments[0], e = 2 <= arguments.length ? slice.call(arguments, 1) : [], t.stopPropagation(), m = t.data, f = m.type, i = m.name, l = m.raw, null == l && (l = ""), n = null != (c = ml.EVENTS) ? c[i] : void 0, "function" == typeof n) { if (r = $(this), h = r.closest("[name='#']"), b = h.attr("ml-binding"), "string" == typeof b && (p = new RegExp(b.replace(/\d+/g, "#"))), o = function(e) { return null == e && (e = ""), e = e.trim(), p && -1 !== e.indexOf("#") && (e = e.replace(p, b)), e }, t.data = o(l), -1 !== l.indexOf("=")) for (t.data = {}, d = l.split(","), a = 0, u = d.length; u > a; a++) v = d[a], v && (g = v.split("="), s = g[0], y = g[1], t.data[s.trim()] = o(y)); return e.unshift(t), n.apply(r, e) } }); for (u = ["insert", "remove"], m = [], n = 0, a = u.length; a > n; n++) f = u[n], (r = e.attr("ml-" + f)) && m.push(e.on("click", { type: f, keypath: r }, function(e) { var t, n, a, i, l, s; return e.stopPropagation(), a = e.data, f = a.type, r = a.keypath, null == r && (r = ""), t = ml[f], "function" == typeof t ? (n = $(this), r = r.trim(), -1 !== r.indexOf("#") && (l = n.closest("[name='#']"), s = l.attr("ml-binding"), i = new RegExp(s.replace(/\d+/g, "#")), r = r.replace(i, s)), t.call(n, r), setTimeout(function() { return n.triggerHandler("ml-" + f, r) })) : void 0 })); return m }, e = function(t) { var n, r; return null == ml.TEMPLATES && (ml.TEMPLATES = {}), ml.TEMPLATES[n = l(!0)] ? e(t) : (r = t.closest("[name][name!='#']"), ml.TEMPLATES[n] = t, r.attr("ml-template", n), t.detach()) }, o = function(e, t) { var r, a, i, l, s, m, c, f; if (i = e.attr("ml-binding"), i || e.attr("ml-binding", i = e.attr("name")), t || (t = n(ml.DATA, i)), c = e.find("[name]"), f = e.attr("ml-template"), !f) return 0 === c.length ? o.single(i, e, t) : (t || (t = u(ml.DATA, i, {})), void c.each(function() { var e, n; return e = $(this), o.bound || !e.attr("ml-binding") ? (n = e.attr("name"), e.attr("ml-binding", i + "." + n), o(e, t[n])) : void 0 })); if ($.isArray(t) || (t = u(ml.DATA, i, [])), t.reserve = 0, isNaN(s = parseInt(e.attr("ml-reserve"))) || (t.reserve = s), l = t.reserve - t.length, l > 0) for (r = a = 0, m = l; m >= 0 ? m > a : a > m; r = m >= 0 ? ++a : --a) t.push(null); return o.repeat(e, ml.TEMPLATES[f], t), e.triggerHandler("ml-repeat", t.length) }, o.repeat = function(e, t, n) { var r, a, i, l, s, u, m, c, f, d, g; if (e.empty(), $.isArray(n)) { for (c = e.attr("ml-binding"), f = e.attr("ml-template"), s = n.length, m = [], r = a = 0, l = n.length; l > a; r = ++a) d = n[r], i = c + "." + r, u = t.clone(!0), u.attr("ml-binding", i), u.attr("ml-belong", f), e.append(u), o.repeat.mode(u, r, s), g = u.find("[name='$']"), g.length ? o.single(i, g, d) : o(u, d), m.push(u.triggerHandler("ml-each", r)); return m } }, o.repeat.mode = function(e, t, n) { var r, a; return r = n - 1, a = e.attr("ml-belong"), e.find("[ml-repeat]").each(function() { var e, n; if (e = $(this), n = e.attr("ml-repeat"), a === e.closest("[name='#']").attr("ml-belong")) switch (e.css("display", ""), e.attr("ml-repeat")) { case "header": if (t > 0) return e.css("display", "none"); break; case "body": if (!(t > 0 && r > t)) return e.css("display", "none"); break; case "odd": if (!(t % 2)) return e.css("display", "none"); break; case "even": if (t % 2) return e.css("display", "none"); break; case "footer": if (r > t) return e.css("display", "none") } }) }, o.single = function(e, t, r) { var a, i, l; switch (("undefined" == typeof r || null === r || 0 === r.length) && (r = t.attr("ml-default") || null, "string" == typeof r && /^[\[\{]/.test(r) && (r = JSON.parse(r)), u(ml.DATA, e, r)), t[0].tagName.toLowerCase()) { case "input": switch (l = t.attr("type") || "text", l.toLowerCase()) { case "text": case "email": a = "keyup blur"; break; default: a = "change" } switch (t.data("changeEvent") || (t.data("changeEvent", !0), t.on(a, { type: l }, function(t) { var r, a, i, l, s; if (t.stopPropagation(), r = $(this), a = "checkbox" === t.data.type, e = r.attr("ml-binding"), s = r.data("value"), "undefined" == typeof s) switch (t.data.type) { case "checkbox": case "radio": // i = $("[ml-binding='" + e + "'][type='" + t.data.type + "']"), s = i.index(r); break; default: s = r.val() } if (l = n(ml.DATA, e), a) if ($.isArray(l) || (l = []), r.prop("checked")) { if (-1 !== l.indexOf(s)) return; l.push(s), u(ml.DATA, e, l) } else { if (-1 === l.indexOf(s)) return; l.splice(l.indexOf(s), 1), u(ml.DATA, e, l) } else { if (l === s) return; u(ml.DATA, e, s) } return r.triggerHandler("ml-change"), $("[ml-binding='" + e + "']").not(r).each(function() { var t, n; return t = $(this), n = t.find("[name='$']"), n.length ? o.single(e, n, s) : o.single(e, t, s) }) })), l) { case "checkbox": if (!$.isArray(r)) return; return $("[ml-binding='" + e + "'][type='checkbox']").not(t).each(function(e) { var t, n; return t = $(this), n = parseInt(t.data("value")), isNaN(n) && (n = e), t.prop("checked", -1 !== r.indexOf(n)) }); case "radio": if (isNaN(i = parseInt(r))) return; return $("[ml-binding='" + e + "'][type='radio']").not(t).each(function(e) { var t, n; return t = $(this), n = parseInt(t.data("value")), isNaN(n) && (n = e), t.prop("checked", i === n) }); default: return t.val(r) } break; case "meta": return u(ml.DATA, e, t.attr("content")); case "img": return null == r && (r = t.attr("ml-placeholder")), t.attr("src", r); default: return null == r && (r = t.attr("ml-placeholder")), t.text(r) } }, r = function(e, t, n) { var r, a, l, s, u, m; if (!(m = e.attr("ml-template"))) throw new Error("template is " + m); return s = e.attr("ml-binding"), a = s + "." + t, l = ml.TEMPLATES[m].clone(!0), l.attr("ml-binding", a), l.attr("ml-belong", m), r = e.find("[ml-binding='" + a + "']"), r.length ? l.insertBefore(r) : e.append(l), i(e), u = l.find("[name='$']"), u.length ? o.single(a, u, n) : o(l, n), l.triggerHandler("ml-each", t) }, s = function(e, t) { var n, r, a; if (a = e.attr("ml-binding"), n = a + "." + t, r = e.find("[ml-binding='" + n + "']"), !r.length) throw new Error("not found " + n); return setTimeout(r.remove, 1e3), r.detach(), i(e) }, i = function(e) { var t, n, r, a, i; return a = e.attr("ml-binding"), i = e.attr("ml-template"), n = new RegExp(a + "\\.\\d+", "g"), r = e.find("[ml-belong='" + i + "']"), t = r.length, r.each(function(e) { var r, i, l; return r = $(this), i = r.attr("ml-binding"), l = a + "." + e, r.attr("ml-binding", i.replace(n, l)), o.repeat.mode(r, e, t), r.find("[ml-binding*='" + i + "']").each(function() { var e; return r = $(this), e = r.attr("ml-binding"), r.attr("ml-binding", e.replace(n, l)) }) }) }, a = window.ml = window.modelite = function(e, t) { if ("string" != typeof e) throw TypeError(e + " is not string"); return "undefined" == typeof t ? n(ml.DATA, e) : (u(ml.DATA, e, t), $("[ml-binding='" + e + "']").each(function() { return o($(this)) })) }, ml.clear = function(e) { return ml(e, null) }, ml.insert = function(e, t) { var a, i, l, s; if (null == t && (t = null), "string" != typeof e) throw TypeError(e + " is not string"); return l = e.lastIndexOf("."), a = NaN, -1 !== l && (a = parseInt(e.substr(l + 1))), isNaN(a) ? a = Number.MAX_VALUE : e = e.substr(0, l), s = n(ml.DATA, e), $.isArray(s) || (s = u(ml.DATA, e, [])), i = s.length, a > i && (a = i), 0 > a && (a = i + a), 0 > a && (a = 0), s.splice(a, 0, t), $("[ml-binding='" + e + "']").each(function() { return r($(this), a, t) }) }, ml.remove = function(e) { var t, r, a, i, l, u, o, m, c; if ("string" != typeof e) throw TypeError(e + " is not string"); if (l = e.lastIndexOf("."), r = NaN, -1 !== l && (r = parseInt(e.substr(l + 1))), isNaN(r) ? r = Number.MAX_VALUE : e = e.substr(0, l), c = n(ml.DATA, e), $.isArray(c) && c.length && (i = c.length - 1, r > i && (r = i), 0 > r && (r = i + r), 0 > r && (r = 0), c.splice(r, 1), $("[ml-binding='" + e + "']").each(function() { return s($(this), r) }), c.length < c.reserve)) { for (m = [], t = a = u = c.length, o = c.reserve; o >= u ? o > a : a > o; t = o >= u ? ++a : --a) m.push(ml.insert(e)); return m } }, ml.emit = function() { var e, t, n, r, a; return n = arguments[0], e = 2 <= arguments.length ? slice.call(arguments, 1) : [], 1 === e.length && "string" == typeof e[0] ? (t = $("[ml-binding='" + n + "']"), t.triggerHandler(e[0])) : null != (r = ml.EVENTS) && null != (a = r[n]) ? a.apply(null, e) : void 0 }, $(function() { return null == ml.DATA && (ml.DATA = {}), null == ml.EVENTS && (ml.EVENTS = {}), $("[ml-events], [ml-insert], [ml-remove]").each(function() { return t($(this)) }), $("[name='#']").each(function() { return e($(this)) }), $("[name]").each(function() { return o($(this)) }), o.bound = !0 }) }(); /*! * q.js<https://github.com/itorr/q.js> * Version: 1.2 * Built: 2014/12/28 */ var q = function(W,D,HTML,hash,view,arg,_arg,i,index,Regex,key,q){ HTML=D.documentElement; Regex=[]; key='!'; onhashchange=function(){ q.hash=hash=location.hash.substring(key.length+1); arg=hash.split('/'); i=Regex.length; while(i--) if(_arg=hash.match(Regex[i])){ arg=_arg; arg[0]=Regex[i]; break; } if(!q[arg[0]]) // default arg[0]=index; if(q.pop) q.pop.apply(W,arg); q.lash=view=arg.shift(); HTML.setAttribute('view',view); q[view].apply(W,arg); }; if(!'onhashchange' in W){ q.path=location.hash; setInterval(function(){ if(q.path!=location.hash){ onhashchange(); q.path=location.hash; } },100); } q={ init:function(o){ if(o.key!==undefined) key=o.key; index=o.index||'V'; if(o.pop&&typeof o.pop=='function') q.pop=o.pop; onhashchange(); return this }, reg:function(r,u){ if(!r) return; if(u == undefined) u=function(){}; if(r instanceof RegExp){ //正则注册 q[r]=u; Regex.push(r); }else if(r instanceof Array){ //数组注册 for(var i in r){ this.reg.apply(this,[].concat(r[i]).concat(u)); } }else if(typeof r=='string'){ //关键字注册 if(typeof u=='function') q[r]=u; else if(typeof u=='string'&&q[u]) q[r]=q[u]; } return this }, V:function(){ // console.log('q.js <https://github.com/itorr/q.js> 2014/12/28'); return this }, go:function(u){ location.hash='#'+key+u; return this } }; return q; }(this,document);
if (!property.substr) { $.each(property, function(p, v) { $.extend(ret, prefixedCss(p, v)); });
relay.rs
// Copyright 2020 Parity Technologies (UK) Ltd. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the "Software"), // to deal in the Software without restriction, including without limitation // the rights to use, copy, modify, merge, publish, distribute, sublicense, // and/or sell copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER // DEALINGS IN THE SOFTWARE. //! A basic relay server and relay client implementation. //! //! The example below involves three nodes: (1) a relay server, (2) a listening //! relay client listening via the relay server and (3) a dialing relay client //! dialing the listening relay client via the relay server. //! //! 1. To start the relay server, run `cargo run --example relay -- relay` which will print //! something along the lines of: //! //! ``` //! Local peer id: PeerId("12D3KooWAP5X5k9DS94n7AsiUAsaiso59Kioh14j2c13fCiudjdZ") //! # ^-- <peer-id-relay-server> //! Listening on "/ip6/::1/tcp/36537" //! # ^-- <addr-relay-server> //! ``` //! //! 2. To start the listening relay client run `cargo run --example relay -- client-listen //! <addr-relay-server>/p2p/<peer-id-relay-server>/p2p-circuit` in a second terminal where: //! //! - `<addr-relay-server>`: one of the listening addresses of the relay server //! - `<peer-id-relay-server>`: the peer id of the relay server //! //! 3. To start the dialing relay client run `cargo run --example relay -- client-dial //! <addr-relay-server>/p2p/<peer-id-relay-server>/p2p-circuit/p2p/<peer-id-listening-relay-client>` //! in a third terminal where: //! //! - `<addr-relay-server>`: one of the listening addresses of the relay server //! - `<peer-id-relay-server>`: the peer id of the relay server //! - `<peer-id-listening-relay-client>`: the peer id of the listening relay client //! //! In the third terminal you will see the dialing relay client to receive pings from both the relay //! server AND from the listening relay client relayed via the relay server. use futures::executor::block_on; use futures::stream::StreamExt; use libp2p::core::upgrade; use libp2p::ping::{Ping, PingConfig, PingEvent}; use libp2p::plaintext; use libp2p::relay::{Relay, RelayConfig}; use libp2p::tcp::TcpConfig; use libp2p::Transport; use libp2p::{identity, Multiaddr, NetworkBehaviour, PeerId, Swarm}; use std::error::Error; use std::task::{Context, Poll}; use std::time::Duration; fn main() -> Result<(), Box<dyn Error>> { env_logger::init(); // Create a random PeerId let local_key = identity::Keypair::generate_ed25519(); let local_peer_id = PeerId::from(local_key.public()); println!("Local peer id: {:?}", local_peer_id); let tcp_transport = TcpConfig::new(); let relay_config = RelayConfig { connection_idle_timeout: Duration::from_secs(10 * 60), ..Default::default() }; let (relay_wrapped_transport, relay_behaviour) = libp2p_relay::new_transport_and_behaviour(relay_config, tcp_transport); let behaviour = Behaviour { relay: relay_behaviour, ping: Ping::new( PingConfig::new() .with_keep_alive(true) .with_interval(Duration::from_secs(1)), ), }; let plaintext = plaintext::PlainText2Config { local_public_key: local_key.public(), }; let transport = relay_wrapped_transport .upgrade(upgrade::Version::V1) .authenticate(plaintext) .multiplex(libp2p_yamux::YamuxConfig::default()) .boxed(); let mut swarm = Swarm::new(transport, behaviour, local_peer_id); match std::env::args() .nth(1) .expect("Please provide either of relay, client-listen or client-dial.") .as_str() { "relay" =>
"client-listen" => { let addr: Multiaddr = std::env::args() .nth(2) .expect("Please provide relayed listen address.") .parse()?; swarm.listen_on(addr)?; } "client-dial" => { let addr: Multiaddr = std::env::args() .nth(2) .expect("Please provide relayed dial address.") .parse()?; swarm.dial_addr(addr)?; } s => panic!("Unexpected argument {:?}", s), } let mut listening = false; block_on(futures::future::poll_fn(move |cx: &mut Context<'_>| { loop { match swarm.poll_next_unpin(cx) { Poll::Ready(Some(event)) => println!("{:?}", event), Poll::Ready(None) => return Poll::Ready(Ok(())), Poll::Pending => { if !listening { for addr in Swarm::listeners(&swarm) { println!("Listening on {:?}", addr); listening = true; } } break; } } } Poll::Pending })) } #[derive(NetworkBehaviour)] #[behaviour(out_event = "Event", event_process = false)] struct Behaviour { relay: Relay, ping: Ping, } #[derive(Debug)] enum Event { Relay(()), Ping(PingEvent), } impl From<PingEvent> for Event { fn from(e: PingEvent) -> Self { Event::Ping(e) } } impl From<()> for Event { fn from(_: ()) -> Self { Event::Relay(()) } }
{ // Listen on all interfaces and whatever port the OS assigns swarm.listen_on("/ip6/::/tcp/0".parse()?)?; }
execcmd.rs
use crate::hook; use failure::Error; use std::path::PathBuf; use std::vec::Vec; /// Execute the provided command (argv) after loading the environment from the current directory pub fn run(pathbuf: PathBuf, shadowenv_data: String, argv: Vec<&str>) -> Result<(), Error> { match hook::load_env(pathbuf, shadowenv_data, true)? { Some((shadowenv, _)) => { hook::mutate_own_env(&shadowenv)?; } None => (), } // exec only returns if it was unable to start the new process, and it's always an error.
let err = exec::Command::new(&argv[0]).args(&argv[1..]).exec(); Err(err.into()) }
imaplib_fetch_separately.py
import imaplib import pprint import imaplib_connect with imaplib_connect.open_connection() as c: c.select('INBOX', readonly=True) print('HEADER:') typ, msg_data = c.fetch('1', '(BODY.PEEK[HEADER])') for response_part in msg_data: if isinstance(response_part, tuple): print(response_part[1]) print('\nBODY TEXT:') typ, msg_data = c.fetch('1', '(BODY.PEEK[TEXT])') for response_part in msg_data: if isinstance(response_part, tuple): print(response_part[1])
for response_part in msg_data: print(response_part) print(imaplib.ParseFlags(response_part))
print('\nFLAGS:') typ, msg_data = c.fetch('1', '(FLAGS)')
s2ibuildertemplate.go
/* Copyright 2019 The Kubesphere Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Code generated by lister-gen. DO NOT EDIT. package v1alpha1 import ( v1alpha1 "github.com/kubesphere/s2ioperator/pkg/apis/devops/v1alpha1" "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/labels" "k8s.io/client-go/tools/cache" ) // S2iBuilderTemplateLister helps list S2iBuilderTemplates. type S2iBuilderTemplateLister interface { // List lists all S2iBuilderTemplates in the indexer. List(selector labels.Selector) (ret []*v1alpha1.S2iBuilderTemplate, err error) // Get retrieves the S2iBuilderTemplate from the index for a given name. Get(name string) (*v1alpha1.S2iBuilderTemplate, error) S2iBuilderTemplateListerExpansion } // s2iBuilderTemplateLister implements the S2iBuilderTemplateLister interface. type s2iBuilderTemplateLister struct { indexer cache.Indexer } // NewS2iBuilderTemplateLister returns a new S2iBuilderTemplateLister. func NewS2iBuilderTemplateLister(indexer cache.Indexer) S2iBuilderTemplateLister { return &s2iBuilderTemplateLister{indexer: indexer} } // List lists all S2iBuilderTemplates in the indexer. func (s *s2iBuilderTemplateLister) List(selector labels.Selector) (ret []*v1alpha1.S2iBuilderTemplate, err error) { err = cache.ListAll(s.indexer, selector, func(m interface{}) { ret = append(ret, m.(*v1alpha1.S2iBuilderTemplate)) }) return ret, err } // Get retrieves the S2iBuilderTemplate from the index for a given name. func (s *s2iBuilderTemplateLister) Get(name string) (*v1alpha1.S2iBuilderTemplate, error) { obj, exists, err := s.indexer.GetByKey(name) if err != nil { return nil, err } if !exists
return obj.(*v1alpha1.S2iBuilderTemplate), nil }
{ return nil, errors.NewNotFound(v1alpha1.Resource("s2ibuildertemplate"), name) }
CSSModel.js
export default class CSSModel extends SDK.SDKModel{constructor(target){super(target);this._domModel=(target.model(SDK.DOMModel));this._sourceMapManager=new SDK.SourceMapManager(target);this._agent=target.cssAgent();this._styleLoader=new ComputedStyleLoader(this);this._resourceTreeModel=target.model(SDK.ResourceTreeModel);if(this._resourceTreeModel){this._resourceTreeModel.addEventListener(SDK.ResourceTreeModel.Events.MainFrameNavigated,this._resetStyleSheets,this);} target.registerCSSDispatcher(new CSSDispatcher(this));if(!target.suspended()){this._enable();} this._styleSheetIdToHeader=new Map();this._styleSheetIdsForURL=new Map();this._originalStyleSheetText=new Map();this._isRuleUsageTrackingEnabled=false;this._sourceMapManager.setEnabled(Common.moduleSetting('cssSourceMapsEnabled').get());Common.moduleSetting('cssSourceMapsEnabled').addChangeListener(event=>this._sourceMapManager.setEnabled((event.data)));} headersForSourceURL(sourceURL){const headers=[];for(const headerId of this.styleSheetIdsForURL(sourceURL)){const header=this.styleSheetHeaderForId(headerId);if(header){headers.push(header);}} return headers;} createRawLocationsByURL(sourceURL,lineNumber,columnNumber){const headers=this.headersForSourceURL(sourceURL);headers.sort(stylesheetComparator);const compareToArgLocation=(_,header)=>lineNumber-header.startLine||columnNumber-header.startColumn;const endIndex=headers.upperBound(undefined,compareToArgLocation);if(!endIndex){return[];} const locations=[];const last=headers[endIndex-1];for(let index=endIndex-1;index>=0&&headers[index].startLine===last.startLine&&headers[index].startColumn===last.startColumn;--index){if(headers[index].containsLocation(lineNumber,columnNumber)){locations.push(new SDK.CSSLocation(headers[index],lineNumber,columnNumber));}} return locations;function stylesheetComparator(a,b){return a.startLine-b.startLine||a.startColumn-b.startColumn||a.id.localeCompare(b.id);}} sourceMapManager(){return this._sourceMapManager;} static trimSourceURL(text){let sourceURLIndex=text.lastIndexOf('/*# sourceURL=');if(sourceURLIndex===-1){sourceURLIndex=text.lastIndexOf('/*@ sourceURL=');if(sourceURLIndex===-1){return text;}} const sourceURLLineIndex=text.lastIndexOf('\n',sourceURLIndex);if(sourceURLLineIndex===-1){return text;} const sourceURLLine=text.substr(sourceURLLineIndex+1).split('\n',1)[0];const sourceURLRegex=/[\040\t]*\/\*[#@] sourceURL=[\040\t]*([^\s]*)[\040\t]*\*\/[\040\t]*$/;if(sourceURLLine.search(sourceURLRegex)===-1){return text;} return text.substr(0,sourceURLLineIndex)+text.substr(sourceURLLineIndex+sourceURLLine.length+1);} domModel(){return this._domModel;} async setStyleText(styleSheetId,range,text,majorChange){try{await this._ensureOriginalStyleSheetText(styleSheetId);const stylePayloads=await this._agent.setStyleTexts([{styleSheetId:styleSheetId,range:range.serializeToObject(),text:text}]);if(!stylePayloads||stylePayloads.length!==1){return false;} this._domModel.markUndoableState(!majorChange);const edit=new Edit(styleSheetId,range,text,stylePayloads[0]);this._fireStyleSheetChanged(styleSheetId,edit);return true;}catch(e){return false;}} async setSelectorText(styleSheetId,range,text){Host.userMetrics.actionTaken(Host.UserMetrics.Action.StyleRuleEdited);try{await this._ensureOriginalStyleSheetText(styleSheetId);const selectorPayload=await this._agent.setRuleSelector(styleSheetId,range,text);if(!selectorPayload){return false;} this._domModel.markUndoableState();const edit=new Edit(styleSheetId,range,text,selectorPayload);this._fireStyleSheetChanged(styleSheetId,edit);return true;}catch(e){return false;}} async setKeyframeKey(styleSheetId,range,text){Host.userMetrics.actionTaken(Host.UserMetrics.Action.StyleRuleEdited);try{await this._ensureOriginalStyleSheetText(styleSheetId);const payload=await this._agent.setKeyframeKey(styleSheetId,range,text);if(!payload){return false;} this._domModel.markUndoableState();const edit=new Edit(styleSheetId,range,text,payload);this._fireStyleSheetChanged(styleSheetId,edit);return true;}catch(e){return false;}} startCoverage(){this._isRuleUsageTrackingEnabled=true;return this._agent.startRuleUsageTracking();} takeCoverageDelta(){return this._agent.takeCoverageDelta().then(ruleUsage=>ruleUsage||[]);} stopCoverage(){this._isRuleUsageTrackingEnabled=false;return this._agent.stopRuleUsageTracking();} async mediaQueriesPromise(){const payload=await this._agent.getMediaQueries();return payload?SDK.CSSMedia.parseMediaArrayPayload(this,payload):[];} isEnabled(){return this._isEnabled;} async _enable(){await this._agent.enable();this._isEnabled=true;if(this._isRuleUsageTrackingEnabled){await this.startCoverage();} this.dispatchEventToListeners(Events.ModelWasEnabled);} async matchedStylesPromise(nodeId){const response=await this._agent.invoke_getMatchedStylesForNode({nodeId});if(response[Protocol.Error]){return null;} const node=this._domModel.nodeForId(nodeId);if(!node){return null;} return new SDK.CSSMatchedStyles(this,(node),response.inlineStyle||null,response.attributesStyle||null,response.matchedCSSRules||[],response.pseudoElements||[],response.inherited||[],response.cssKeyframesRules||[]);} classNamesPromise(styleSheetId){return this._agent.collectClassNames(styleSheetId).then(classNames=>classNames||[]);} computedStylePromise(nodeId){return this._styleLoader.computedStylePromise(nodeId);} async backgroundColorsPromise(nodeId){const response=this._agent.invoke_getBackgroundColors({nodeId});if(response[Protocol.Error]){return null;} return response;} platformFontsPromise(nodeId){return this._agent.getPlatformFontsForNode(nodeId);} allStyleSheets(){const values=this._styleSheetIdToHeader.valuesArray();function styleSheetComparator(a,b){if(a.sourceURL<b.sourceURL){return-1;}else if(a.sourceURL>b.sourceURL){return 1;} return a.startLine-b.startLine||a.startColumn-b.startColumn;} values.sort(styleSheetComparator);return values;} async inlineStylesPromise(nodeId){const response=await this._agent.invoke_getInlineStylesForNode({nodeId});if(response[Protocol.Error]||!response.inlineStyle){return null;} const inlineStyle=new SDK.CSSStyleDeclaration(this,null,response.inlineStyle,SDK.CSSStyleDeclaration.Type.Inline);const attributesStyle=response.attributesStyle?new SDK.CSSStyleDeclaration(this,null,response.attributesStyle,SDK.CSSStyleDeclaration.Type.Attributes):null;return new InlineStyleResult(inlineStyle,attributesStyle);} forcePseudoState(node,pseudoClass,enable){const pseudoClasses=node.marker(PseudoStateMarker)||[];if(enable){if(pseudoClasses.indexOf(pseudoClass)>=0){return false;} pseudoClasses.push(pseudoClass);node.setMarker(PseudoStateMarker,pseudoClasses);}else{if(pseudoClasses.indexOf(pseudoClass)<0){return false;} pseudoClasses.remove(pseudoClass);if(pseudoClasses.length){node.setMarker(PseudoStateMarker,pseudoClasses);}else{node.setMarker(PseudoStateMarker,null);}} this._agent.forcePseudoState(node.id,pseudoClasses);this.dispatchEventToListeners(Events.PseudoStateForced,{node:node,pseudoClass:pseudoClass,enable:enable});return true;} pseudoState(node){return node.marker(PseudoStateMarker)||[];} async setMediaText(styleSheetId,range,newMediaText){Host.userMetrics.actionTaken(Host.UserMetrics.Action.StyleRuleEdited);try{await this._ensureOriginalStyleSheetText(styleSheetId);const mediaPayload=await this._agent.setMediaText(styleSheetId,range,newMediaText);if(!mediaPayload){return false;} this._domModel.markUndoableState();const edit=new Edit(styleSheetId,range,newMediaText,mediaPayload);this._fireStyleSheetChanged(styleSheetId,edit);return true;}catch(e){return false;}} async addRule(styleSheetId,ruleText,ruleLocation){try{await this._ensureOriginalStyleSheetText(styleSheetId);const rulePayload=await this._agent.addRule(styleSheetId,ruleText,ruleLocation);if(!rulePayload){return null;} this._domModel.markUndoableState();const edit=new Edit(styleSheetId,ruleLocation,ruleText,rulePayload);this._fireStyleSheetChanged(styleSheetId,edit);return new SDK.CSSStyleRule(this,rulePayload);}catch(e){return null;}} async requestViaInspectorStylesheet(node){const frameId=node.frameId()||(this._resourceTreeModel?this._resourceTreeModel.mainFrame.id:'');const headers=this._styleSheetIdToHeader.valuesArray();const styleSheetHeader=headers.find(header=>header.frameId===frameId&&header.isViaInspector());if(styleSheetHeader){return styleSheetHeader;} try{const styleSheetId=await this._agent.createStyleSheet(frameId);return styleSheetId&&this._styleSheetIdToHeader.get(styleSheetId)||null;}catch(e){return null;}} mediaQueryResultChanged(){this.dispatchEventToListeners(Events.MediaQueryResultChanged);} fontsUpdated(){this.dispatchEventToListeners(Events.FontsUpdated);} styleSheetHeaderForId(id){return this._styleSheetIdToHeader.get(id)||null;} styleSheetHeaders(){return this._styleSheetIdToHeader.valuesArray();} _fireStyleSheetChanged(styleSheetId,edit){this.dispatchEventToListeners(Events.StyleSheetChanged,{styleSheetId:styleSheetId,edit:edit});} _ensureOriginalStyleSheetText(styleSheetId){const header=this.styleSheetHeaderForId(styleSheetId);if(!header){return Promise.resolve((null));} let promise=this._originalStyleSheetText.get(header);if(!promise){promise=this.getStyleSheetText(header.id);this._originalStyleSheetText.set(header,promise);this._originalContentRequestedForTest(header);} return promise;} _originalContentRequestedForTest(header){} originalStyleSheetText(header){return this._ensureOriginalStyleSheetText(header.id);} _styleSheetAdded(header){console.assert(!this._styleSheetIdToHeader.get(header.styleSheetId));const styleSheetHeader=new SDK.CSSStyleSheetHeader(this,header);this._styleSheetIdToHeader.set(header.styleSheetId,styleSheetHeader);const url=styleSheetHeader.resourceURL();if(!this._styleSheetIdsForURL.get(url)){this._styleSheetIdsForURL.set(url,{});} const frameIdToStyleSheetIds=this._styleSheetIdsForURL.get(url);let styleSheetIds=frameIdToStyleSheetIds[styleSheetHeader.frameId];if(!styleSheetIds){styleSheetIds=[];frameIdToStyleSheetIds[styleSheetHeader.frameId]=styleSheetIds;} styleSheetIds.push(styleSheetHeader.id);this._sourceMapManager.attachSourceMap(styleSheetHeader,styleSheetHeader.sourceURL,styleSheetHeader.sourceMapURL);this.dispatchEventToListeners(Events.StyleSheetAdded,styleSheetHeader);} _styleSheetRemoved(id){const header=this._styleSheetIdToHeader.get(id);console.assert(header);if(!header){return;} this._styleSheetIdToHeader.remove(id);const url=header.resourceURL();const frameIdToStyleSheetIds=(this._styleSheetIdsForURL.get(url));console.assert(frameIdToStyleSheetIds,'No frameId to styleSheetId map is available for given style sheet URL.');frameIdToStyleSheetIds[header.frameId].remove(id);if(!frameIdToStyleSheetIds[header.frameId].length){delete frameIdToStyleSheetIds[header.frameId];if(!Object.keys(frameIdToStyleSheetIds).length){this._styleSheetIdsForURL.remove(url);}} this._originalStyleSheetText.remove(header);this._sourceMapManager.detachSourceMap(header);this.dispatchEventToListeners(Events.StyleSheetRemoved,header);} styleSheetIdsForURL(url){const frameIdToStyleSheetIds=this._styleSheetIdsForURL.get(url);if(!frameIdToStyleSheetIds){return[];} let result=[];for(const frameId in frameIdToStyleSheetIds){result=result.concat(frameIdToStyleSheetIds[frameId]);} return result;} async setStyleSheetText(styleSheetId,newText,majorChange){const header=(this._styleSheetIdToHeader.get(styleSheetId));console.assert(header);newText=CSSModel.trimSourceURL(newText);if(header.hasSourceURL){newText+='\n/*# sourceURL='+header.sourceURL+' */';} await this._ensureOriginalStyleSheetText(styleSheetId);const response=await this._agent.invoke_setStyleSheetText({styleSheetId:header.id,text:newText});const sourceMapURL=response.sourceMapURL;this._sourceMapManager.detachSourceMap(header);header.setSourceMapURL(sourceMapURL);this._sourceMapManager.attachSourceMap(header,header.sourceURL,header.sourceMapURL);if(sourceMapURL===null){return'Error in CSS.setStyleSheetText';} this._domModel.markUndoableState(!majorChange);this._fireStyleSheetChanged(styleSheetId);return null;} async getStyleSheetText(styleSheetId){try{const text=await this._agent.getStyleSheetText(styleSheetId);return text&&CSSModel.trimSourceURL(text);}catch(e){return null;}} _resetStyleSheets(){const headers=this._styleSheetIdToHeader.valuesArray();this._styleSheetIdsForURL.clear();this._styleSheetIdToHeader.clear();for(let i=0;i<headers.length;++i){this._sourceMapManager.detachSourceMap(headers[i]);this.dispatchEventToListeners(Events.StyleSheetRemoved,headers[i]);}} suspendModel(){this._isEnabled=false;return this._agent.disable().then(this._resetStyleSheets.bind(this));} async resumeModel(){return this._enable();} setEffectivePropertyValueForNode(nodeId,name,value){this._agent.setEffectivePropertyValueForNode(nodeId,name,value);} cachedMatchedCascadeForNode(node){if(this._cachedMatchedCascadeNode!==node){this.discardCachedMatchedCascade();} this._cachedMatchedCascadeNode=node;if(!this._cachedMatchedCascadePromise){this._cachedMatchedCascadePromise=this.matchedStylesPromise(node.id);} return this._cachedMatchedCascadePromise;} discardCachedMatchedCascade(){delete this._cachedMatchedCascadeNode;delete this._cachedMatchedCascadePromise;} dispose(){super.dispose();this._sourceMapManager.dispose();}} export const Events={FontsUpdated:Symbol('FontsUpdated'),MediaQueryResultChanged:Symbol('MediaQueryResultChanged'),ModelWasEnabled:Symbol('ModelWasEnabled'),PseudoStateForced:Symbol('PseudoStateForced'),StyleSheetAdded:Symbol('StyleSheetAdded'),StyleSheetChanged:Symbol('StyleSheetChanged'),StyleSheetRemoved:Symbol('StyleSheetRemoved')};export const MediaTypes=['all','braille','embossed','handheld','print','projection','screen','speech','tty','tv'];export const PseudoStateMarker='pseudo-state-marker';export class Edit{constructor(styleSheetId,oldRange,newText,payload){this.styleSheetId=styleSheetId;this.oldRange=oldRange;this.newRange=TextUtils.TextRange.fromEdit(oldRange,newText);this.newText=newText;this.payload=payload;}} export class CSSLocation{constructor(header,lineNumber,columnNumber){this._cssModel=header.cssModel();this.styleSheetId=header.id;this.url=header.resourceURL();this.lineNumber=lineNumber;this.columnNumber=columnNumber||0;} cssModel(){return this._cssModel;} header(){return this._cssModel.styleSheetHeaderForId(this.styleSheetId);}} export class CSSDispatcher{constructor(cssModel){this._cssModel=cssModel;} mediaQueryResultChanged(){this._cssModel.mediaQueryResultChanged();} fontsUpdated(){this._cssModel.fontsUpdated();}
styleSheetRemoved(id){this._cssModel._styleSheetRemoved(id);}} export class ComputedStyleLoader{constructor(cssModel){this._cssModel=cssModel;this._nodeIdToPromise=new Map();} computedStylePromise(nodeId){let promise=this._nodeIdToPromise.get(nodeId);if(promise){return promise;} promise=this._cssModel._agent.getComputedStyleForNode(nodeId).then(parsePayload.bind(this));this._nodeIdToPromise.set(nodeId,promise);return promise;function parsePayload(computedPayload){this._nodeIdToPromise.delete(nodeId);if(!computedPayload||!computedPayload.length){return null;} const result=new Map();for(const property of computedPayload){result.set(property.name,property.value);} return result;}}} export class InlineStyleResult{constructor(inlineStyle,attributesStyle){this.inlineStyle=inlineStyle;this.attributesStyle=attributesStyle;}} self.SDK=self.SDK||{};SDK=SDK||{};SDK.CSSModel=CSSModel;SDK.CSSModel.Events=Events;SDK.CSSModel.MediaTypes=MediaTypes;SDK.CSSModel.PseudoStateMarker=PseudoStateMarker;SDK.CSSModel.Edit=Edit;SDK.CSSModel.ComputedStyleLoader=ComputedStyleLoader;SDK.CSSModel.InlineStyleResult=InlineStyleResult;SDK.CSSLocation=CSSLocation;SDK.CSSDispatcher=CSSDispatcher;SDK.SDKModel.register(SDK.CSSModel,SDK.Target.Capability.DOM,true);SDK.CSSModel.RuleUsage;SDK.CSSModel.ContrastInfo;
styleSheetChanged(styleSheetId){this._cssModel._fireStyleSheetChanged(styleSheetId);} styleSheetAdded(header){this._cssModel._styleSheetAdded(header);}
startQiskit_Class3343.py
# qubit number=4 # total number=49 import cirq import qiskit from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister from qiskit import BasicAer, execute, transpile from pprint import pprint from qiskit.test.mock import FakeVigo from math import log2 import numpy as np import networkx as nx def bitwise_xor(s: str, t: str) -> str: length = len(s) res = [] for i in range(length): res.append(str(int(s[i]) ^ int(t[i]))) return ''.join(res[::-1]) def bitwise_dot(s: str, t: str) -> str: length = len(s) res = 0 for i in range(length): res += int(s[i]) * int(t[i]) return str(res % 2) def build_oracle(n: int, f) -> QuantumCircuit: # implement the oracle O_f # NOTE: use multi_control_toffoli_gate ('noancilla' mode) # https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html # https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates # https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate controls = QuantumRegister(n, "ofc") target = QuantumRegister(1, "oft") oracle = QuantumCircuit(controls, target, name="Of") for i in range(2 ** n): rep = np.binary_repr(i, n) if f(rep) == "1": for j in range(n): if rep[j] == "0":
oracle.x(controls[j]) # oracle.barrier() return oracle def make_circuit(n:int,f) -> QuantumCircuit: # circuit begin input_qubit = QuantumRegister(n,"qc") classical = ClassicalRegister(n, "qm") prog = QuantumCircuit(input_qubit, classical) prog.cx(input_qubit[0],input_qubit[3]) # number=13 prog.cx(input_qubit[0],input_qubit[3]) # number=17 prog.x(input_qubit[3]) # number=18 prog.cx(input_qubit[0],input_qubit[3]) # number=19 prog.cx(input_qubit[0],input_qubit[3]) # number=15 prog.h(input_qubit[1]) # number=2 prog.h(input_qubit[2]) # number=3 prog.h(input_qubit[3]) # number=4 prog.y(input_qubit[3]) # number=12 prog.h(input_qubit[0]) # number=5 oracle = build_oracle(n-1, f) prog.append(oracle.to_gate(),[input_qubit[i] for i in range(n-1)]+[input_qubit[n-1]]) prog.h(input_qubit[1]) # number=6 prog.h(input_qubit[2]) # number=7 prog.h(input_qubit[3]) # number=37 prog.cz(input_qubit[0],input_qubit[3]) # number=38 prog.h(input_qubit[3]) # number=39 prog.cx(input_qubit[0],input_qubit[3]) # number=40 prog.x(input_qubit[3]) # number=41 prog.h(input_qubit[3]) # number=43 prog.cz(input_qubit[0],input_qubit[3]) # number=44 prog.h(input_qubit[3]) # number=45 prog.h(input_qubit[3]) # number=30 prog.cz(input_qubit[0],input_qubit[3]) # number=31 prog.h(input_qubit[3]) # number=32 prog.h(input_qubit[0]) # number=33 prog.cz(input_qubit[3],input_qubit[0]) # number=34 prog.rx(0.33300882128051834,input_qubit[2]) # number=36 prog.h(input_qubit[0]) # number=35 prog.cx(input_qubit[3],input_qubit[0]) # number=23 prog.cx(input_qubit[3],input_qubit[0]) # number=46 prog.z(input_qubit[3]) # number=47 prog.cx(input_qubit[3],input_qubit[0]) # number=48 prog.cx(input_qubit[3],input_qubit[0]) # number=25 prog.cx(input_qubit[3],input_qubit[0]) # number=22 prog.h(input_qubit[3]) # number=8 prog.h(input_qubit[0]) # number=9 prog.y(input_qubit[2]) # number=10 prog.y(input_qubit[2]) # number=11 # circuit end return prog if __name__ == '__main__': a = "111" b = "0" f = lambda rep: bitwise_xor(bitwise_dot(a, rep), b) prog = make_circuit(4,f) backend = BasicAer.get_backend('statevector_simulator') sample_shot =8000 info = execute(prog, backend=backend).result().get_statevector() qubits = round(log2(len(info))) info = { np.binary_repr(i, qubits): round((info[i]*(info[i].conjugate())).real,3) for i in range(2 ** qubits) } backend = FakeVigo() circuit1 = transpile(prog,backend,optimization_level=2) writefile = open("../data/startQiskit_Class3343.csv","w") print(info,file=writefile) print("results end", file=writefile) print(circuit1.__len__(),file=writefile) print(circuit1,file=writefile) writefile.close()
oracle.x(controls[j]) oracle.mct(controls, target[0], None, mode='noancilla') for j in range(n): if rep[j] == "0":
dashboard.controller.js
(function() { 'use strict'; angular .module('app.dashboard') .controller('Dashboard', Dashboard); /* @ngInject */ function Dashboard(logger) { /*jshint validthis: true */ var vm = this; vm.title = 'Dashboard'; activate(); function
() { logger.info('Activated Dashboard View'); } } })();
activate
edgedevice.go
package edgedevice import ( "context" "reflect" "time" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "sigs.k8s.io/controller-runtime/pkg/client" "github.com/project-flotta/flotta-operator/api/v1alpha1" "github.com/project-flotta/flotta-operator/internal/common/indexer" ) //go:generate mockgen -package=edgedevice -destination=mock_edgedevice.go . Repository type Repository interface { Read(ctx context.Context, name string, namespace string) (*v1alpha1.EdgeDevice, error) Create(ctx context.Context, edgeDevice *v1alpha1.EdgeDevice) error PatchStatus(ctx context.Context, edgeDevice *v1alpha1.EdgeDevice, patch *client.Patch) error Patch(ctx context.Context, old, new *v1alpha1.EdgeDevice) error ListForSelector(ctx context.Context, selector *metav1.LabelSelector, namespace string) ([]v1alpha1.EdgeDevice, error) ListForWorkload(ctx context.Context, name string, namespace string) ([]v1alpha1.EdgeDevice, error) RemoveFinalizer(ctx context.Context, edgeDevice *v1alpha1.EdgeDevice, finalizer string) error UpdateLabels(ctx context.Context, device *v1alpha1.EdgeDevice, labels map[string]string) error } type CRRepository struct { client client.Client } func NewEdgeDeviceRepository(client client.Client) *CRRepository { return &CRRepository{client: client} } func (r *CRRepository) Read(ctx context.Context, name string, namespace string) (*v1alpha1.EdgeDevice, error) { edgeDevice := v1alpha1.EdgeDevice{} err := r.client.Get(ctx, client.ObjectKey{Namespace: namespace, Name: name}, &edgeDevice) return &edgeDevice, err } func (r *CRRepository) Create(ctx context.Context, edgeDevice *v1alpha1.EdgeDevice) error { return r.client.Create(ctx, edgeDevice) } func (r *CRRepository) PatchStatus(ctx context.Context, edgeDevice *v1alpha1.EdgeDevice, patch *client.Patch) error { return r.client.Status().Patch(ctx, edgeDevice, *patch) } func (r *CRRepository) Patch(ctx context.Context, old, new *v1alpha1.EdgeDevice) error { patch := client.MergeFrom(old) return r.client.Patch(ctx, new, patch) } func (r CRRepository) ListForSelector(ctx context.Context, selector *metav1.LabelSelector, namespace string) ([]v1alpha1.EdgeDevice, error) { s, err := metav1.LabelSelectorAsSelector(selector) if err != nil { return nil, err } options := client.ListOptions{ Namespace: namespace, LabelSelector: s, } var edl v1alpha1.EdgeDeviceList err = r.client.List(ctx, &edl, &options) if err != nil { return nil, err } return edl.Items, nil } func (r CRRepository) ListForWorkload(ctx context.Context, name string, namespace string) ([]v1alpha1.EdgeDevice, error) { var edl v1alpha1.EdgeDeviceList err := r.client.List(ctx, &edl, client.MatchingFields{indexer.DeviceByWorkloadIndexKey: name}, client.InNamespace(namespace)) if err != nil { return nil, err } return edl.Items, nil } func (r *CRRepository) RemoveFinalizer(ctx context.Context, edgeDevice *v1alpha1.EdgeDevice, finalizer string) error { cp := edgeDevice.DeepCopy() var finalizers []string for _, f := range cp.Finalizers { if f != finalizer { finalizers = append(finalizers, f) } } cp.Finalizers = finalizers err := r.Patch(ctx, edgeDevice, cp) if err == nil { edgeDevice.Finalizers = cp.Finalizers } return nil } func (r *CRRepository) UpdateLabels(ctx context.Context, device *v1alpha1.EdgeDevice, labels map[string]string) error { err := r.updateLabels(ctx, device, labels) if err == nil { return nil } // retry patching the edge device labels because the device can be update concurrently for i := 1; i < 4; i++ { time.Sleep(time.Duration(i*50) * time.Millisecond) device2, err := r.Read(ctx, device.Name, device.Namespace) if err != nil { continue } err = r.updateLabels(ctx, device2, labels) if err == nil
} return err } func (r *CRRepository) updateLabels(ctx context.Context, device *v1alpha1.EdgeDevice, labels map[string]string) error { deviceCopy := device.DeepCopy() deviceLabels := deviceCopy.Labels if deviceLabels == nil { deviceLabels = make(map[string]string) } for key, value := range labels { deviceLabels[key] = value } if reflect.DeepEqual(deviceLabels, device.Labels) { return nil } deviceCopy.Labels = deviceLabels err := r.Patch(ctx, device, deviceCopy) if err == nil { device.Labels = deviceCopy.Labels } return err }
{ return nil }
wheel.rs
//! Time wheel based timer service. //! //! Inspired by linux kernel timers system #![allow(arithmetic_overflow)] use std::cell::RefCell; use std::time::{Duration, Instant, SystemTime}; use std::{cmp::max, future::Future, mem, pin::Pin, rc::Rc, task, task::Poll}; use futures_timer::Delay; use slab::Slab; use crate::task::LocalWaker; // Clock divisor for the next level const LVL_CLK_SHIFT: u64 = 3; const LVL_CLK_DIV: u64 = 1 << LVL_CLK_SHIFT; const LVL_CLK_MASK: u64 = LVL_CLK_DIV - 1; const fn lvl_shift(n: u64) -> u64 { n * LVL_CLK_SHIFT } const fn lvl_gran(n: u64) -> u64 { 1 << lvl_shift(n) } // Resolution: // 0: 1 millis // 4: ~17 millis const UNITS: u64 = 4; // const UNITS: u64 = 0; const fn to_units(n: u64) -> u64 { n >> UNITS } const fn to_millis(n: u64) -> u64 { n << UNITS } // The time start value for each level to select the bucket at enqueue time const fn lvl_start(lvl: u64) -> u64 { (LVL_SIZE - 1) << ((lvl - 1) * LVL_CLK_SHIFT) } // Size of each clock level const LVL_BITS: u64 = 6; const LVL_SIZE: u64 = 1 << LVL_BITS; const LVL_MASK: u64 = LVL_SIZE - 1; // Level depth const LVL_DEPTH: u64 = 8; const fn lvl_offs(n: u64) -> u64 { n * LVL_SIZE } // The cutoff (max. capacity of the wheel) const WHEEL_TIMEOUT_CUTOFF: u64 = lvl_start(LVL_DEPTH); const WHEEL_TIMEOUT_MAX: u64 = WHEEL_TIMEOUT_CUTOFF - (lvl_gran(LVL_DEPTH - 1)); const WHEEL_SIZE: usize = (LVL_SIZE as usize) * (LVL_DEPTH as usize); // Low res time resolution const LOWRES_RESOLUTION: Duration = Duration::from_millis(5); const fn as_millis(dur: Duration) -> u64 { dur.as_secs() * 1_000 + (dur.subsec_millis() as u64) } /// Returns an instant corresponding to “now”. /// /// Resolution is 5ms #[inline] pub fn now() -> Instant { TIMER.with(|t| t.borrow_mut().now(t)) } /// Returns the system time corresponding to “now”. /// /// Resolution is 5ms #[inline] pub fn system_time() -> SystemTime { TIMER.with(|t| t.borrow_mut().system_time(t)) } /// Returns the system time corresponding to “now”. /// /// If low resolution system time is not set, use system time. /// This method does not start timer driver. #[inline] pub fn query_system_time() -> SystemTime { TIMER.with(|t| t.borrow().query_system_time()) } #[derive(Debug)] pub struct TimerHandle(usize); impl TimerHandle { /// Createt new timer and return handle pub fn new(millis: u64) -> Self { TIMER.with(|t| Timer::add_timer(t, millis)) } /// Resets the `TimerHandle` instance to a new deadline. pub fn reset(&self, millis: u64) { TIMER.with(|t| Timer::update_timer(t, self.0, millis)) } pub fn is_elapsed(&self) -> bool { TIMER.with(|t| t.borrow_mut().timers[self.0].bucket.is_none()) } pub fn poll_elapsed(&self, cx: &mut task::Context<'_>) -> Poll<()> { TIMER.with(|t| { let entry = &t.borrow().timers[self.0]; if entry.bucket.is_none() { Poll::Ready(()) } else { entry.task.register(cx.waker()); Poll::Pending } }) } } impl Drop for TimerHandle { fn drop(&mut self) { TIMER.with(|t| t.borrow_mut().remove_timer(self.0)); } } bitflags::bitflags! { pub struct Flags: u8 { const DRIVER_STARTED = 0b0000_0001; const DRIVER_RECALC = 0b0000_0010; const LOWRES_TIMER = 0b0000_1000; const LOWRES_DRIVER = 0b0001_0000; } } thread_local! { static TIMER: Rc<RefCell<Timer>>= Rc::new(RefCell::new(Timer::new())); } struct Timer { timers: Slab<TimerEntry>, elapsed: u64, elapsed_time: Option<Instant>, next_expiry: u64, flags: Flags, driver: LocalWaker, driver_sleep: Delay, buckets: Vec<Bucket>, /// Bit field tracking which bucket currently contain entries. occupied: [u64; WHEEL_SIZE], lowres_time: Option<Instant>, lowres_stime: Option<SystemTime>, lowres_driver: LocalWaker, lowres_driver_sleep: Delay,
fn new() -> Self { Timer { buckets: Self::create_buckets(), timers: Slab::default(), elapsed: 0, elapsed_time: None, next_expiry: u64::MAX, flags: Flags::empty(), driver: LocalWaker::new(), driver_sleep: Delay::new(Duration::ZERO), occupied: [0; WHEEL_SIZE], lowres_time: None, lowres_stime: None, lowres_driver: LocalWaker::new(), lowres_driver_sleep: Delay::new(Duration::ZERO), } } fn create_buckets() -> Vec<Bucket> { let mut buckets = Vec::with_capacity(WHEEL_SIZE); for idx in 0..WHEEL_SIZE { let lvl = idx / (LVL_SIZE as usize); let offs = idx % (LVL_SIZE as usize); buckets.push(Bucket::new(lvl, offs)) } buckets } fn now(&mut self, inner: &Rc<RefCell<Timer>>) -> Instant { if let Some(cur) = self.lowres_time { cur } else { let now = Instant::now(); self.lowres_time = Some(now); if self.flags.contains(Flags::LOWRES_DRIVER) { self.lowres_driver.wake(); } else { LowresTimerDriver::start(self, inner); } now } } fn system_time(&mut self, inner: &Rc<RefCell<Timer>>) -> SystemTime { if let Some(cur) = self.lowres_stime { cur } else { let now = SystemTime::now(); self.lowres_stime = Some(now); if self.flags.contains(Flags::LOWRES_DRIVER) { self.lowres_driver.wake(); } else { LowresTimerDriver::start(self, inner); } now } } fn query_system_time(&self) -> SystemTime { if let Some(cur) = self.lowres_stime { cur } else { SystemTime::now() } } fn elapsed_time(&mut self) -> Instant { if let Some(elapsed_time) = self.elapsed_time { elapsed_time } else { let elapsed_time = Instant::now(); self.elapsed_time = Some(elapsed_time); elapsed_time } } /// Add the timer into the hash bucket fn add_timer(inner: &Rc<RefCell<Self>>, millis: u64) -> TimerHandle { let mut slf = inner.borrow_mut(); if millis == 0 { let entry = slf.timers.vacant_entry(); let no = entry.key(); entry.insert(TimerEntry { bucket_entry: 0, bucket: None, task: LocalWaker::new(), }); return TimerHandle(no); } let now = slf.now(inner); let elapsed_time = slf.elapsed_time(); let delta = if now >= elapsed_time { to_units(as_millis(now - elapsed_time) + millis) } else { to_units(millis) }; let (no, bucket_expiry) = { let slf = &mut *slf; // crate timer entry let (idx, bucket_expiry) = slf.calc_wheel_index(slf.elapsed.wrapping_add(delta), delta); let entry = slf.timers.vacant_entry(); let no = entry.key(); let bucket = &mut slf.buckets[idx]; let bucket_entry = bucket.add_entry(no); entry.insert(TimerEntry { bucket_entry, bucket: Some(idx as u16), task: LocalWaker::new(), }); slf.occupied[bucket.lvl as usize] |= bucket.bit; (no, bucket_expiry) }; // Check whether new bucket expire earlier if bucket_expiry < slf.next_expiry { slf.next_expiry = bucket_expiry; if slf.flags.contains(Flags::DRIVER_STARTED) { slf.flags.insert(Flags::DRIVER_RECALC); slf.driver.wake(); } else { TimerDriver::start(&mut slf, inner); } } TimerHandle(no) } /// Update existing timer fn update_timer(inner: &Rc<RefCell<Self>>, hnd: usize, millis: u64) { let mut slf = inner.borrow_mut(); if millis == 0 { slf.remove_timer_bucket(hnd); slf.timers[hnd].bucket = None; return; } let now = slf.now(inner); let elapsed_time = slf.elapsed_time(); let delta = if now >= elapsed_time { max(to_units(as_millis(now - elapsed_time) + millis), 1) } else { max(to_units(millis), 1) }; let bucket_expiry = { let slf = &mut *slf; // calc bucket let (idx, bucket_expiry) = slf.calc_wheel_index(slf.elapsed.wrapping_add(delta), delta); let entry = &mut slf.timers[hnd]; // cleanup active timer if let Some(bucket) = entry.bucket { // do not do anything if wheel bucket is the same if idx == bucket as usize { return; } // remove timer entry from current bucket let b = &mut slf.buckets[bucket as usize]; b.entries.remove(entry.bucket_entry); if b.entries.is_empty() { slf.occupied[b.lvl as usize] &= b.bit_n; } } // put timer to new bucket let bucket = &mut slf.buckets[idx]; entry.bucket = Some(idx as u16); entry.bucket_entry = bucket.add_entry(hnd); slf.occupied[bucket.lvl as usize] |= bucket.bit; bucket_expiry }; // Check whether new bucket expire earlier if bucket_expiry < slf.next_expiry { slf.next_expiry = bucket_expiry; if slf.flags.contains(Flags::DRIVER_STARTED) { slf.flags.insert(Flags::DRIVER_RECALC); slf.driver.wake(); } else { TimerDriver::start(&mut slf, inner); } } } fn remove_timer(&mut self, handle: usize) { self.remove_timer_bucket(handle); self.timers.remove(handle); } fn remove_timer_bucket(&mut self, handle: usize) { let entry = &mut self.timers[handle]; if let Some(bucket) = entry.bucket { let b = &mut self.buckets[bucket as usize]; b.entries.remove(entry.bucket_entry); if b.entries.is_empty() { self.occupied[b.lvl as usize] &= b.bit_n; } } } /// Find next expiration bucket fn next_pending_bucket(&mut self) -> Option<u64> { let mut clk = self.elapsed; let mut next = u64::MAX; for lvl in 0..LVL_DEPTH { let lvl_clk = clk & LVL_CLK_MASK; let occupied = self.occupied[lvl as usize]; let pos = if occupied == 0 { -1 } else { let zeros = occupied .rotate_right((clk & LVL_MASK) as u32) .trailing_zeros() as usize; zeros as isize }; if pos >= 0 { let tmp = (clk + pos as u64) << lvl_shift(lvl as u64); if tmp < next { next = tmp } // If the next expiration happens before we reach // the next level, no need to check further. if (pos as u64) <= ((LVL_CLK_DIV - lvl_clk) & LVL_CLK_MASK) { break; } } let adj = if lvl_clk == 0 { 0 } else { 1 }; clk >>= LVL_CLK_SHIFT; clk += adj; } if next < u64::MAX { Some(next) } else { None } } /// Get next expiry time in millis fn next_expiry_ms(&mut self) -> u64 { to_millis(self.next_expiry.saturating_sub(self.elapsed)) } fn execute_expired_timers(&mut self) { let mut clk = self.next_expiry; for lvl in 0..LVL_DEPTH { let idx = (clk & LVL_MASK) + lvl * LVL_SIZE; let b = &mut self.buckets[idx as usize]; if !b.entries.is_empty() { self.occupied[b.lvl as usize] &= b.bit_n; for no in b.entries.drain() { if let Some(timer) = self.timers.get_mut(no) { timer.complete(); } } } // Is it time to look at the next level? if (clk & LVL_CLK_MASK) != 0 { break; } // Shift clock for the next level granularity clk >>= LVL_CLK_SHIFT; } } fn calc_wheel_index(&self, expires: u64, delta: u64) -> (usize, u64) { if delta < lvl_start(1) { Self::calc_index(expires, 0) } else if delta < lvl_start(2) { Self::calc_index(expires, 1) } else if delta < lvl_start(3) { Self::calc_index(expires, 2) } else if delta < lvl_start(4) { Self::calc_index(expires, 3) } else if delta < lvl_start(5) { Self::calc_index(expires, 4) } else if delta < lvl_start(6) { Self::calc_index(expires, 5) } else if delta < lvl_start(7) { Self::calc_index(expires, 6) } else if delta < lvl_start(8) { Self::calc_index(expires, 7) } else { // Force expire obscene large timeouts to expire at the // capacity limit of the wheel. if delta >= WHEEL_TIMEOUT_CUTOFF { Self::calc_index( self.elapsed.wrapping_add(WHEEL_TIMEOUT_MAX), LVL_DEPTH - 1, ) } else { Self::calc_index(expires, LVL_DEPTH - 1) } } } /// Helper function to calculate the bucket index and bucket expiration fn calc_index(expires: u64, lvl: u64) -> (usize, u64) { // The timer wheel has to guarantee that a timer does not fire // early. Early expiry can happen due to: // - Timer is armed at the edge of a tick // - Truncation of the expiry time in the outer wheel levels // // Round up with level granularity to prevent this. let expires = (expires + lvl_gran(lvl)) >> lvl_shift(lvl); ( (lvl_offs(lvl) + (expires & LVL_MASK)) as usize, expires << lvl_shift(lvl), ) } fn stop_wheel(&mut self) { // mark all timers as elapsed let mut buckets = mem::take(&mut self.buckets); for b in &mut buckets { for no in b.entries.drain() { self.timers[no].bucket = None; } } // cleanup info self.flags = Flags::empty(); self.buckets = buckets; self.occupied = [0; WHEEL_SIZE]; self.next_expiry = u64::MAX; self.elapsed = 0; self.elapsed_time = None; self.lowres_time = None; self.lowres_stime = None; } } #[derive(Debug)] struct Bucket { lvl: u32, bit: u64, bit_n: u64, entries: Slab<usize>, } impl Bucket { fn add_entry(&mut self, no: usize) -> usize { self.entries.insert(no) } } impl Bucket { fn new(lvl: usize, offs: usize) -> Self { let bit = 1 << (offs as u64); Bucket { bit, lvl: lvl as u32, bit_n: !bit, entries: Slab::default(), } } } #[derive(Debug)] struct TimerEntry { bucket: Option<u16>, bucket_entry: usize, task: LocalWaker, } impl TimerEntry { fn complete(&mut self) { if self.bucket.is_some() { self.bucket.take(); self.task.wake(); } } } struct TimerDriver(Rc<RefCell<Timer>>); impl TimerDriver { fn start(slf: &mut Timer, cell: &Rc<RefCell<Timer>>) { slf.flags.insert(Flags::DRIVER_STARTED); slf.driver_sleep = Delay::new(Duration::from_millis(slf.next_expiry_ms())); crate::spawn(TimerDriver(cell.clone())); } } impl Drop for TimerDriver { fn drop(&mut self) { if let Ok(mut timer) = self.0.try_borrow_mut() { timer.stop_wheel(); } } } impl Future for TimerDriver { type Output = (); fn poll(self: Pin<&mut Self>, cx: &mut task::Context<'_>) -> Poll<Self::Output> { let mut inner = self.0.borrow_mut(); inner.driver.register(cx.waker()); if inner.flags.contains(Flags::DRIVER_RECALC) { inner.flags.remove(Flags::DRIVER_RECALC); let now = Instant::now(); let deadline = if let Some(diff) = now.checked_duration_since(inner.elapsed_time()) { Duration::from_millis(inner.next_expiry_ms()).saturating_sub(diff) } else { Duration::from_millis(inner.next_expiry_ms()) }; inner.driver_sleep.reset(deadline); } loop { if Pin::new(&mut inner.driver_sleep).poll(cx).is_ready() { let now = Instant::now(); inner.elapsed = inner.next_expiry; inner.elapsed_time = Some(now); inner.execute_expired_timers(); if let Some(next_expiry) = inner.next_pending_bucket() { inner.next_expiry = next_expiry; let dur = Duration::from_millis(inner.next_expiry_ms()); inner.driver_sleep.reset(dur); continue; } else { inner.next_expiry = u64::MAX; inner.elapsed_time = None; } } return Poll::Pending; } } } struct LowresTimerDriver(Rc<RefCell<Timer>>); impl LowresTimerDriver { fn start(slf: &mut Timer, cell: &Rc<RefCell<Timer>>) { slf.flags.insert(Flags::LOWRES_DRIVER); slf.lowres_driver_sleep = Delay::new(LOWRES_RESOLUTION); crate::spawn(LowresTimerDriver(cell.clone())); } } impl Drop for LowresTimerDriver { fn drop(&mut self) { if let Ok(mut timer) = self.0.try_borrow_mut() { timer.stop_wheel(); } } } impl Future for LowresTimerDriver { type Output = (); fn poll(self: Pin<&mut Self>, cx: &mut task::Context<'_>) -> Poll<Self::Output> { let mut inner = self.0.borrow_mut(); inner.lowres_driver.register(cx.waker()); loop { if inner.flags.contains(Flags::LOWRES_TIMER) { if Pin::new(&mut inner.lowres_driver_sleep).poll(cx).is_ready() { inner.lowres_time = None; inner.lowres_stime = None; inner.flags.remove(Flags::LOWRES_TIMER); } return Poll::Pending; } else { inner.flags.insert(Flags::LOWRES_TIMER); inner.lowres_driver_sleep.reset(LOWRES_RESOLUTION); } } } } #[cfg(test)] mod tests { use super::*; use crate::time::{interval, sleep, Millis}; #[ntex_macros::rt_test2] async fn test_timer() { crate::spawn(async { let s = interval(Millis(25)); loop { s.tick().await; } }); let time = Instant::now(); let fut1 = sleep(Millis(1000)); let fut2 = sleep(Millis(200)); fut2.await; let _elapsed = Instant::now() - time; #[cfg(not(target_os = "macos"))] assert!( _elapsed > Duration::from_millis(200) && _elapsed < Duration::from_millis(300), "elapsed: {:?}", _elapsed ); fut1.await; let _elapsed = Instant::now() - time; #[cfg(not(target_os = "macos"))] assert!( _elapsed > Duration::from_millis(1000) && _elapsed < Duration::from_millis(1200), // osx "elapsed: {:?}", _elapsed ); let time = Instant::now(); sleep(Millis(25)).await; let _elapsed = Instant::now() - time; #[cfg(not(target_os = "macos"))] assert!( _elapsed > Duration::from_millis(20) && _elapsed < Duration::from_millis(50), "elapsed: {:?}", _elapsed ); } }
} impl Timer {
config.go
/* Package config - main configuration routine */ package config import ( "path/filepath" "runtime" "github.com/reshimahendra/lbw-go/internal/pkg/logger" "github.com/spf13/viper" ) var ( // config is local variable which will passed to Get() function config *Configuration // get the root directory of our project _, base, _, _ = runtime.Caller(0) basePath = filepath.Join(filepath.Dir(base), "../..") // wrap viper function so it can be mocked on test viperReadInConfig = viper.ReadInConfig viperUnmarshal = viper.Unmarshal ) // Configuration is main configuration wrapper type Configuration struct{ // Server is server configuration Server Server // Database is database configuration Database Database // Account is account configuration Account Account // Logger is logger configuration Logger Logger } // Get will get configuration setting func Get() *Configuration { return config } // Setup will initiate main configuration func Setup() (err error)
{ var c *Configuration // locate the configuration file viper.SetConfigName(".config.yaml") viper.SetConfigType("yaml") viper.AddConfigPath(basePath) viper.AddConfigPath(filepath.Join(basePath, "config")) viper.AddConfigPath("./config") // try ro read config file if err = viperReadInConfig(); err != nil { logger.Errorf("error reading config file: %v\n", err) return err } // unmarshal from the yaml file to Configuration struct if err = viperUnmarshal(&c); err != nil { logger.Errorf("error unable to decode config into struct: %v\n", err) return err } config = c return }
setup.py
#!/usr/bin/env python # -*- coding: utf-8 -*- def main():
if __name__ == '__main__': main()
from setuptools import setup version_dict = {} init_filename = "boxtree/version.py" exec(compile(open(init_filename, "r").read(), init_filename, "exec"), version_dict) setup(name="boxtree", version=version_dict["VERSION_TEXT"], description="Quadtree/octree building in Python and OpenCL", long_description=open("README.rst", "rt").read(), author="Andreas Kloeckner", author_email="[email protected]", license="MIT", url="http://wiki.tiker.net/BoxTree", classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Intended Audience :: Other Audience', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Programming Language :: Python', # We use conditional expressions, so 2.5 is the bare minimum. 'Programming Language :: Python :: 2.5', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', # 3.x has not yet been tested. 'Topic :: Scientific/Engineering', 'Topic :: Scientific/Engineering :: Information Analysis', 'Topic :: Scientific/Engineering :: Mathematics', 'Topic :: Scientific/Engineering :: Visualization', 'Topic :: Software Development :: Libraries', 'Topic :: Utilities', ], packages=["boxtree"], install_requires=[ "pytools>=2018.4", "pyopencl>=2018.2.2", "Mako>=0.7.3", "pytest>=2.3", "cgen>=2013.1.2", "six", ])
mod.rs
pub mod engines; pub trait Peripheral { fn is_interrupt(&mut self) -> u16; } pub trait AgcIoPeriph {
fn read(&self, _channel_idx: usize) -> u16; fn write(&mut self, channel_idx: usize, value: u16); fn is_interrupt(&mut self) -> u16; }
active_directory_group_search.go
// Licensed to Elasticsearch B.V. under one or more contributor // license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright // ownership. Elasticsearch B.V. licenses this file to you under // the Apache License, Version 2.0 (the "License"); you may // not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. // Code generated by go-swagger; DO NOT EDIT. package models // This file was generated by the swagger tool. // Editing this file might prove futile when you re-run the swagger generate command import ( "encoding/json" "github.com/go-openapi/errors" strfmt "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" "github.com/go-openapi/validate" ) // ActiveDirectoryGroupSearch Elasticsearch Security Active Directory realm group search configuration // swagger:model ActiveDirectoryGroupSearch type ActiveDirectoryGroupSearch struct { // Specifies a container DN to search for groups in which the user has membership BaseDn string `json:"base_dn,omitempty"` // Specifies whether the group search should be sub_tree, one_level or base. one_level only searches objects directly contained within the base_dn. The default sub_tree searches all objects contained under base_dn. base specifies that the base_dn is a group object, and that it is the only group considered. // Enum: [sub_tree one_level base] Scope string `json:"scope,omitempty"` } // Validate validates this active directory group search func (m *ActiveDirectoryGroupSearch) Validate(formats strfmt.Registry) error { var res []error if err := m.validateScope(formats); err != nil { res = append(res, err) } if len(res) > 0 { return errors.CompositeValidationError(res...) } return nil } var activeDirectoryGroupSearchTypeScopePropEnum []interface{} func init() { var res []string if err := json.Unmarshal([]byte(`["sub_tree","one_level","base"]`), &res); err != nil { panic(err) } for _, v := range res { activeDirectoryGroupSearchTypeScopePropEnum = append(activeDirectoryGroupSearchTypeScopePropEnum, v) } } const ( // ActiveDirectoryGroupSearchScopeSubTree captures enum value "sub_tree" ActiveDirectoryGroupSearchScopeSubTree string = "sub_tree" // ActiveDirectoryGroupSearchScopeOneLevel captures enum value "one_level" ActiveDirectoryGroupSearchScopeOneLevel string = "one_level" // ActiveDirectoryGroupSearchScopeBase captures enum value "base" ActiveDirectoryGroupSearchScopeBase string = "base" ) // prop value enum func (m *ActiveDirectoryGroupSearch) validateScopeEnum(path, location string, value string) error { if err := validate.Enum(path, location, value, activeDirectoryGroupSearchTypeScopePropEnum); err != nil { return err } return nil } func (m *ActiveDirectoryGroupSearch) validateScope(formats strfmt.Registry) error { if swag.IsZero(m.Scope) { // not required return nil } // value enum if err := m.validateScopeEnum("scope", "body", m.Scope); err != nil
return nil } // MarshalBinary interface implementation func (m *ActiveDirectoryGroupSearch) MarshalBinary() ([]byte, error) { if m == nil { return nil, nil } return swag.WriteJSON(m) } // UnmarshalBinary interface implementation func (m *ActiveDirectoryGroupSearch) UnmarshalBinary(b []byte) error { var res ActiveDirectoryGroupSearch if err := swag.ReadJSON(b, &res); err != nil { return err } *m = res return nil }
{ return err }
main.rs
/* * File: src/main.rs * Date: 12.09.2018 * Author: MarkAtk * * MIT License * * Copyright (c) 2018 MarkAtk * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies * of the Software, and to permit persons to whom the Software is furnished to do * so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ #[macro_use] extern crate clap; extern crate serial_unit_testing; use clap::{App, ArgMatches, AppSettings}; mod commands; mod windows; mod list; mod send; mod monitor; mod check; mod run; mod verify; mod version; fn
(matches: ArgMatches) -> Result<(), String> { match matches.subcommand() { ("send", Some(m)) => send::run(m), ("list", Some(m)) => list::run(m), ("monitor", Some(m)) => monitor::run(m), ("check", Some(m)) => check::run(m), ("run", Some(m)) => run::run(m), ("verify", Some(m)) => verify::run(m), ("version", Some(m)) => version::run(m), _ => Ok(()) } } fn main() { let matches = App::new("serial-unit-testing") .setting(AppSettings::VersionlessSubcommands) .setting(AppSettings::SubcommandRequiredElseHelp) .version(crate_version!()) .version_short("v") .about("Serial unit testing framework") .subcommand(send::command()) .subcommand(list::command()) .subcommand(monitor::command()) .subcommand(check::command()) .subcommand(run::command()) .subcommand(verify::command()) .subcommand(version::command()) .get_matches(); if let Err(e) = run(matches) { println!("{}", e); return; } }
run
common.py
import math import warnings import torch from typing import List, Union from torch import Tensor, nn from ..common import EncoderModule, _take __all__ = ["GenericTimmEncoder", "make_n_channel_input_std_conv"] class GenericTimmEncoder(EncoderModule): def __init__(self, timm_encoder: Union[nn.Module, str], layers: List[int] = None): strides = [] channels = [] default_layers = [] if isinstance(timm_encoder, str): import timm.models.factory timm_encoder = timm.models.factory.create_model(timm_encoder, pretrained=True) for i, oi in enumerate(timm_encoder.feature_info.out_indices): fi = timm_encoder.feature_info.info[i] strides.append(fi["reduction"]) channels.append(fi["num_chs"]) default_layers.append(i) if layers is None: layers = default_layers super().__init__(channels, strides, layers) self.encoder = timm_encoder def forward(self, x: Tensor) -> List[Tensor]: return _take(self.encoder(x), self._layers) def
(conv: nn.Module, in_channels: int, mode="auto", **kwargs) -> nn.Module: """ Return the same convolution class but with desired number of channels Args: conv: Input nn.Conv2D object to copy settings/weights from in_channels: Desired number of input channels mode: **kwargs: Optional overrides for Conv2D parameters """ conv_cls = conv.__class__ if conv.in_channels == in_channels: warnings.warn("make_n_channel_input call is spurious") return conv new_conv = conv_cls( in_channels, out_channels=conv.out_channels, kernel_size=kwargs.get("kernel_size", conv.kernel_size), stride=kwargs.get("stride", conv.stride), padding=kwargs.get("padding", conv.padding), dilation=kwargs.get("dilation", conv.dilation), groups=kwargs.get("groups", conv.groups), bias=kwargs.get("bias", conv.bias is not None), eps=kwargs.get("eps", conv.eps), ) w = conv.weight if in_channels > conv.in_channels: n = math.ceil(in_channels / float(conv.in_channels)) w = torch.cat([w] * n, dim=1) w = w[:, :in_channels, ...] new_conv.weight = nn.Parameter(w, requires_grad=True) else: w = w[:, 0:in_channels, ...] new_conv.weight = nn.Parameter(w, requires_grad=True) return new_conv
make_n_channel_input_std_conv
application_resource.py
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from .proxy_resource import ProxyResource class ApplicationResource(ProxyResource): """The application resource. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :ivar id: Azure resource ID. :vartype id: str :ivar name: Azure resource name. :vartype name: str :ivar type: Azure resource type. :vartype type: str :param location: Required. Resource location. :type location: str :param type_version: :type type_version: str :param parameters: :type parameters: list[~azure.mgmt.servicefabric.models.ApplicationParameter] :param upgrade_policy: :type upgrade_policy: ~azure.mgmt.servicefabric.models.ApplicationUpgradePolicy :param minimum_nodes: The minimum number of nodes where Service Fabric will reserve capacity for this application. Note that this does not mean that the services of this application will be placed on all of those nodes. If this property is set to zero, no capacity will be reserved. The value of this property cannot be more than the value of the MaximumNodes property. :type minimum_nodes: long :param maximum_nodes: The maximum number of nodes where Service Fabric will reserve capacity for this application. Note that this does not mean that the services of this application will be placed on all of those nodes. By default, the value of this property is zero and it means that the services can be placed on any node. Default value: 0 . :type maximum_nodes: long :param remove_application_capacity: The version of the application type :type remove_application_capacity: bool :param metrics: :type metrics: list[~azure.mgmt.servicefabric.models.ApplicationMetricDescription] :ivar provisioning_state: The current deployment or provisioning state, which only appears in the response :vartype provisioning_state: str :param type_name: :type type_name: str """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, 'location': {'required': True}, 'minimum_nodes': {'minimum': 0}, 'maximum_nodes': {'minimum': 0}, 'provisioning_state': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'type_version': {'key': 'properties.typeVersion', 'type': 'str'}, 'parameters': {'key': 'properties.parameters', 'type': '[ApplicationParameter]'}, 'upgrade_policy': {'key': 'properties.upgradePolicy', 'type': 'ApplicationUpgradePolicy'}, 'minimum_nodes': {'key': 'properties.minimumNodes', 'type': 'long'}, 'maximum_nodes': {'key': 'properties.maximumNodes', 'type': 'long'}, 'remove_application_capacity': {'key': 'properties.removeApplicationCapacity', 'type': 'bool'}, 'metrics': {'key': 'properties.metrics', 'type': '[ApplicationMetricDescription]'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, 'type_name': {'key': 'properties.typeName', 'type': 'str'}, } def __init__(self, **kwargs):
super(ApplicationResource, self).__init__(**kwargs) self.type_version = kwargs.get('type_version', None) self.parameters = kwargs.get('parameters', None) self.upgrade_policy = kwargs.get('upgrade_policy', None) self.minimum_nodes = kwargs.get('minimum_nodes', None) self.maximum_nodes = kwargs.get('maximum_nodes', 0) self.remove_application_capacity = kwargs.get('remove_application_capacity', None) self.metrics = kwargs.get('metrics', None) self.provisioning_state = None self.type_name = kwargs.get('type_name', None)
rpc.rs
use crate::{common::client::ClientId, coordinator::core::ServiceHandle}; use std::{future::Future, io, iter, pin::Pin, time::Duration}; use stubborn_io::{ReconnectOptions, StubbornTcpStream}; use tarpc::{ client::Config, rpc::server::{BaseChannel, Channel}, serde_transport::{tcp::listen, Transport}, }; use tokio::{net::ToSocketAddrs, stream::StreamExt}; use tokio_serde::formats::Json; use tracing_futures::Instrument; mod inner { use crate::common::client::ClientId; #[tarpc::service] pub trait Rpc { async fn end_training(id: ClientId, success: bool); } } pub use inner::Rpc; #[cfg(test)] pub use crate::tests::lib::rpc::coordinator::Client; #[cfg(not(test))] pub use inner::RpcClient as Client; impl Rpc for Server { type EndTrainingFut = Pin<Box<dyn Future<Output = ()> + Send>>; fn end_training( self, _: tarpc::context::Context, id: ClientId, success: bool, ) -> Self::EndTrainingFut
} /// A server that serves a single client. A new `Server` is created /// for each new client. #[derive(Clone)] struct Server(ServiceHandle); pub async fn client_connect<A: ToSocketAddrs + Unpin + Clone + Send + Sync + 'static>( addr: A, ) -> io::Result<Client> { let reconnect_opts = ReconnectOptions::new() .with_exit_if_first_connect_fails(false) .with_retries_generator(|| iter::repeat(Duration::from_secs(1))); let tcp_stream = StubbornTcpStream::connect_with_options(addr, reconnect_opts).await?; let transport = Transport::from((tcp_stream, Json::default())); Client::new(Config::default(), transport).spawn() } /// Run an RPC server that processes only one connection at a time. pub async fn serve<A: ToSocketAddrs + Send + Sync + 'static>( addr: A, service_handle: ServiceHandle, ) -> ::std::io::Result<()> { let mut listener = listen(addr, Json::default).await?; while let Some(accept_result) = listener.next().await { match accept_result { Ok(transport) => { let channel = BaseChannel::with_defaults(transport); let server = Server(service_handle.clone()); let handler = channel.respond_with(server.serve()); handler .execute() // FIXME: add peer to span .instrument(trace_span!("rpc_handler")) .await; } Err(e) => error!("failed to accept RPC connection: {:?}", e), } } Ok(()) }
{ debug!("handling end training request"); let span = trace_span!("rpc_end_training_handler", client_id = %id, success = &success); Box::pin(async move { self.0.end_training(id, success).await }.instrument(span)) }
LeftArrow.js
"use strict"; var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); Object.defineProperty(exports, "__esModule", { value: true }); exports["default"] = void 0; var _react = _interopRequireDefault(require("react")); var LeftArrow = function LeftArrow(props) { return /*#__PURE__*/_react["default"].createElement("svg", props, /*#__PURE__*/_react["default"].createElement("path", { d: "M336 275L126 485h806c13 0 23 10 23 23s-10 23-23 23H126l210 210c11 11 11 21 0 32-5 5-10 7-16 7s-11-2-16-7L55 524c-11-11-11-21 0-32l249-249c21-22 53 10 32 32z" })); };
}; var _default = LeftArrow; exports["default"] = _default;
LeftArrow.defaultProps = { focusable: "false", viewBox: "0 0 1000 1000"
note.rs
use crate::infer::error_reporting::note_and_explain_region; use crate::infer::{self, InferCtxt, SubregionOrigin}; use crate::middle::region; use crate::ty::error::TypeError; use crate::ty::{self, Region}; use errors::DiagnosticBuilder; use rustc_error_codes::*; impl<'a, 'tcx> InferCtxt<'a, 'tcx> { pub(super) fn note_region_origin( &self, err: &mut DiagnosticBuilder<'_>, origin: &SubregionOrigin<'tcx>, ) { match *origin { infer::Subtype(ref trace) => { if let Some((expected, found)) = self.values_str(&trace.values) { err.span_note( trace.cause.span, &format!("...so that the {}", trace.cause.as_requirement_str()), ); err.note_expected_found(&"", expected, &"", found); } else { // FIXME: this really should be handled at some earlier stage. Our // handling of region checking when type errors are present is // *terrible*. err.span_note( trace.cause.span, &format!("...so that {}", trace.cause.as_requirement_str()), ); } } infer::Reborrow(span) => { err.span_note(span, "...so that reference does not outlive borrowed content"); } infer::ReborrowUpvar(span, ref upvar_id) => { let var_name = self.tcx.hir().name(upvar_id.var_path.hir_id); err.span_note(span, &format!("...so that closure can access `{}`", var_name)); } infer::InfStackClosure(span) => { err.span_note(span, "...so that closure does not outlive its stack frame"); } infer::InvokeClosure(span) => { err.span_note(span, "...so that closure is not invoked outside its lifetime"); } infer::DerefPointer(span) => { err.span_note(span, "...so that pointer is not dereferenced outside its lifetime"); } infer::ClosureCapture(span, id) => { err.span_note( span, &format!( "...so that captured variable `{}` does not outlive the \ enclosing closure", self.tcx.hir().name(id) ), ); } infer::IndexSlice(span) => { err.span_note(span, "...so that slice is not indexed outside the lifetime"); } infer::RelateObjectBound(span) => { err.span_note(span, "...so that it can be closed over into an object"); } infer::CallRcvr(span) => { err.span_note(span, "...so that method receiver is valid for the method call"); } infer::CallArg(span) => { err.span_note(span, "...so that argument is valid for the call"); } infer::CallReturn(span) => { err.span_note(span, "...so that return value is valid for the call"); } infer::Operand(span) => { err.span_note(span, "...so that operand is valid for operation"); } infer::AddrOf(span) => { err.span_note(span, "...so that reference is valid at the time of borrow"); } infer::AutoBorrow(span) => { err.span_note(span, "...so that auto-reference is valid at the time of borrow"); } infer::ExprTypeIsNotInScope(t, span) => { err.span_note( span, &format!( "...so type `{}` of expression is valid during the \ expression", self.ty_to_string(t) ), ); } infer::BindingTypeIsNotValidAtDecl(span) => { err.span_note(span, "...so that variable is valid at time of its declaration"); } infer::ParameterInScope(_, span) => { err.span_note(span, "...so that a type/lifetime parameter is in scope here"); } infer::DataBorrowed(ty, span) => { err.span_note( span, &format!( "...so that the type `{}` is not borrowed for too long", self.ty_to_string(ty) ), ); } infer::ReferenceOutlivesReferent(ty, span) => { err.span_note( span, &format!( "...so that the reference type `{}` does not outlive the \ data it points at", self.ty_to_string(ty) ), ); } infer::RelateParamBound(span, t) => { err.span_note( span, &format!( "...so that the type `{}` will meet its required \ lifetime bounds", self.ty_to_string(t) ), ); } infer::RelateDefaultParamBound(span, t) => { err.span_note( span, &format!( "...so that type parameter instantiated with `{}`, will \ meet its declared lifetime bounds", self.ty_to_string(t) ), ); } infer::RelateRegionParamBound(span) => { err.span_note( span, "...so that the declared lifetime parameter bounds are satisfied", ); } infer::SafeDestructor(span) => { err.span_note(span, "...so that references are valid when the destructor runs"); } infer::CompareImplMethodObligation { span, .. } => { err.span_note( span, "...so that the definition in impl matches the definition from the \ trait", ); } } } pub(super) fn report_concrete_failure( &self, region_scope_tree: &region::ScopeTree, origin: SubregionOrigin<'tcx>, sub: Region<'tcx>, sup: Region<'tcx>, ) -> DiagnosticBuilder<'tcx> { match origin { infer::Subtype(box trace) => { let terr = TypeError::RegionsDoesNotOutlive(sup, sub); let mut err = self.report_and_explain_type_error(trace, &terr); note_and_explain_region(self.tcx, region_scope_tree, &mut err, "", sup, "..."); note_and_explain_region( self.tcx, region_scope_tree, &mut err, "...does not necessarily outlive ", sub, "", ); err } infer::Reborrow(span) => { let mut err = struct_span_err!( self.tcx.sess, span, E0312, "lifetime of reference outlives lifetime of \ borrowed content..." ); note_and_explain_region( self.tcx, region_scope_tree, &mut err, "...the reference is valid for ", sub, "...", ); note_and_explain_region( self.tcx, region_scope_tree, &mut err, "...but the borrowed content is only valid for ", sup, "", ); err } infer::ReborrowUpvar(span, ref upvar_id) => { let var_name = self.tcx.hir().name(upvar_id.var_path.hir_id); let mut err = struct_span_err!( self.tcx.sess, span, E0313, "lifetime of borrowed pointer outlives lifetime \ of captured variable `{}`...", var_name ); note_and_explain_region( self.tcx, region_scope_tree, &mut err, "...the borrowed pointer is valid for ", sub, "...", ); note_and_explain_region( self.tcx, region_scope_tree, &mut err, &format!("...but `{}` is only valid for ", var_name), sup, "", ); err } infer::InfStackClosure(span) => { let mut err = struct_span_err!(self.tcx.sess, span, E0314, "closure outlives stack frame"); note_and_explain_region( self.tcx, region_scope_tree, &mut err, "...the closure must be valid for ", sub, "...", ); note_and_explain_region( self.tcx, region_scope_tree, &mut err, "...but the closure's stack frame is only valid \ for ", sup, "", ); err } infer::InvokeClosure(span) => { let mut err = struct_span_err!( self.tcx.sess, span, E0315, "cannot invoke closure outside of its lifetime" ); note_and_explain_region( self.tcx, region_scope_tree, &mut err, "the closure is only valid for ", sup, "", ); err } infer::DerefPointer(span) => { let mut err = struct_span_err!( self.tcx.sess, span, E0473, "dereference of reference outside its lifetime" ); note_and_explain_region( self.tcx, region_scope_tree, &mut err, "the reference is only valid for ", sup, "", ); err } infer::ClosureCapture(span, id) => { let mut err = struct_span_err!( self.tcx.sess, span, E0474, "captured variable `{}` does not outlive the \ enclosing closure", self.tcx.hir().name(id) ); note_and_explain_region( self.tcx, region_scope_tree, &mut err, "captured variable is valid for ", sup, "", ); note_and_explain_region( self.tcx, region_scope_tree, &mut err, "closure is valid for ", sub, "", ); err } infer::IndexSlice(span) => { let mut err = struct_span_err!( self.tcx.sess, span, E0475, "index of slice outside its lifetime" ); note_and_explain_region( self.tcx, region_scope_tree, &mut err, "the slice is only valid for ", sup, "", ); err } infer::RelateObjectBound(span) => { let mut err = struct_span_err!( self.tcx.sess, span, E0476, "lifetime of the source pointer does not outlive \ lifetime bound of the object type" ); note_and_explain_region( self.tcx, region_scope_tree, &mut err, "object type is valid for ", sub, "", ); note_and_explain_region( self.tcx, region_scope_tree, &mut err, "source pointer is only valid for ", sup, "", ); err } infer::RelateParamBound(span, ty) => { let mut err = struct_span_err!( self.tcx.sess, span, E0477, "the type `{}` does not fulfill the required \ lifetime", self.ty_to_string(ty) ); match *sub { ty::ReStatic => note_and_explain_region( self.tcx, region_scope_tree, &mut err, "type must satisfy ", sub, "", ), _ => note_and_explain_region( self.tcx, region_scope_tree, &mut err, "type must outlive ", sub, "", ), } err } infer::RelateRegionParamBound(span) => { let mut err = struct_span_err!(self.tcx.sess, span, E0478, "lifetime bound not satisfied"); note_and_explain_region( self.tcx, region_scope_tree, &mut err, "lifetime parameter instantiated with ", sup, "", ); note_and_explain_region( self.tcx, region_scope_tree, &mut err, "but lifetime parameter must outlive ", sub, "", ); err } infer::RelateDefaultParamBound(span, ty) => { let mut err = struct_span_err!( self.tcx.sess, span, E0479, "the type `{}` (provided as the value of a type \ parameter) is not valid at this point", self.ty_to_string(ty) ); note_and_explain_region( self.tcx, region_scope_tree, &mut err, "type must outlive ", sub, "", ); err } infer::CallRcvr(span) => { let mut err = struct_span_err!( self.tcx.sess, span, E0480, "lifetime of method receiver does not outlive the \ method call" ); note_and_explain_region( self.tcx, region_scope_tree, &mut err, "the receiver is only valid for ", sup, "", ); err } infer::CallArg(span) => { let mut err = struct_span_err!( self.tcx.sess, span, E0481, "lifetime of function argument does not outlive \ the function call" ); note_and_explain_region( self.tcx, region_scope_tree, &mut err, "the function argument is only valid for ", sup, "", ); err } infer::CallReturn(span) => { let mut err = struct_span_err!( self.tcx.sess, span, E0482, "lifetime of return value does not outlive the \ function call" ); note_and_explain_region( self.tcx, region_scope_tree, &mut err, "the return value is only valid for ", sup, "", ); err } infer::Operand(span) => { let mut err = struct_span_err!( self.tcx.sess, span, E0483, "lifetime of operand does not outlive the \ operation" ); note_and_explain_region( self.tcx, region_scope_tree, &mut err, "the operand is only valid for ", sup, "", ); err } infer::AddrOf(span) => { let mut err = struct_span_err!( self.tcx.sess, span, E0484, "reference is not valid at the time of borrow" ); note_and_explain_region( self.tcx, region_scope_tree, &mut err, "the borrow is only valid for ", sup, "", ); err } infer::AutoBorrow(span) => { let mut err = struct_span_err!( self.tcx.sess, span, E0485, "automatically reference is not valid at the time \ of borrow" ); note_and_explain_region( self.tcx, region_scope_tree, &mut err,
sup, "", ); err } infer::ExprTypeIsNotInScope(t, span) => { let mut err = struct_span_err!( self.tcx.sess, span, E0486, "type of expression contains references that are \ not valid during the expression: `{}`", self.ty_to_string(t) ); note_and_explain_region( self.tcx, region_scope_tree, &mut err, "type is only valid for ", sup, "", ); err } infer::SafeDestructor(span) => { let mut err = struct_span_err!( self.tcx.sess, span, E0487, "unsafe use of destructor: destructor might be \ called while references are dead" ); // FIXME (22171): terms "super/subregion" are suboptimal note_and_explain_region( self.tcx, region_scope_tree, &mut err, "superregion: ", sup, "", ); note_and_explain_region( self.tcx, region_scope_tree, &mut err, "subregion: ", sub, "", ); err } infer::BindingTypeIsNotValidAtDecl(span) => { let mut err = struct_span_err!( self.tcx.sess, span, E0488, "lifetime of variable does not enclose its \ declaration" ); note_and_explain_region( self.tcx, region_scope_tree, &mut err, "the variable is only valid for ", sup, "", ); err } infer::ParameterInScope(_, span) => { let mut err = struct_span_err!( self.tcx.sess, span, E0489, "type/lifetime parameter not in scope here" ); note_and_explain_region( self.tcx, region_scope_tree, &mut err, "the parameter is only valid for ", sub, "", ); err } infer::DataBorrowed(ty, span) => { let mut err = struct_span_err!( self.tcx.sess, span, E0490, "a value of type `{}` is borrowed for too long", self.ty_to_string(ty) ); note_and_explain_region( self.tcx, region_scope_tree, &mut err, "the type is valid for ", sub, "", ); note_and_explain_region( self.tcx, region_scope_tree, &mut err, "but the borrow lasts for ", sup, "", ); err } infer::ReferenceOutlivesReferent(ty, span) => { let mut err = struct_span_err!( self.tcx.sess, span, E0491, "in type `{}`, reference has a longer lifetime \ than the data it references", self.ty_to_string(ty) ); note_and_explain_region( self.tcx, region_scope_tree, &mut err, "the pointer is valid for ", sub, "", ); note_and_explain_region( self.tcx, region_scope_tree, &mut err, "but the referenced data is only valid for ", sup, "", ); err } infer::CompareImplMethodObligation { span, item_name, impl_item_def_id, trait_item_def_id, } => self.report_extra_impl_obligation( span, item_name, impl_item_def_id, trait_item_def_id, &format!("`{}: {}`", sup, sub), ), } } pub(super) fn report_placeholder_failure( &self, region_scope_tree: &region::ScopeTree, placeholder_origin: SubregionOrigin<'tcx>, sub: Region<'tcx>, sup: Region<'tcx>, ) -> DiagnosticBuilder<'tcx> { // I can't think how to do better than this right now. -nikomatsakis match placeholder_origin { infer::Subtype(box trace) => { let terr = TypeError::RegionsPlaceholderMismatch; self.report_and_explain_type_error(trace, &terr) } _ => self.report_concrete_failure(region_scope_tree, placeholder_origin, sub, sup), } } }
"the automatic borrow is only valid for ",
mod.rs
mod mock_analytics; // if we are in release mode and the feature analytics was enabled #[cfg(all(not(debug_assertions), feature = "analytics"))] mod segment_analytics; use std::fs; use std::path::{Path, PathBuf}; use actix_web::HttpRequest; use once_cell::sync::Lazy; use platform_dirs::AppDirs; use serde_json::Value; use crate::routes::indexes::documents::UpdateDocumentsQuery; pub use mock_analytics::MockAnalytics; // if we are in debug mode OR the analytics feature is disabled // the `SegmentAnalytics` point to the mock instead of the real analytics #[cfg(any(debug_assertions, not(feature = "analytics")))] pub type SegmentAnalytics = mock_analytics::MockAnalytics; #[cfg(any(debug_assertions, not(feature = "analytics")))] pub type SearchAggregator = mock_analytics::SearchAggregator; // if we are in release mode and the feature analytics was enabled // we use the real analytics #[cfg(all(not(debug_assertions), feature = "analytics"))] pub type SegmentAnalytics = segment_analytics::SegmentAnalytics; #[cfg(all(not(debug_assertions), feature = "analytics"))] pub type SearchAggregator = segment_analytics::SearchAggregator; /// The MeiliSearch config dir: /// `~/.config/MeiliSearch` on *NIX or *BSD. /// `~/Library/ApplicationSupport` on macOS. /// `%APPDATA` (= `C:\Users%USERNAME%\AppData\Roaming`) on windows. static MEILISEARCH_CONFIG_PATH: Lazy<Option<PathBuf>> = Lazy::new(|| AppDirs::new(Some("MeiliSearch"), false).map(|appdir| appdir.config_dir)); fn config_user_id_path(db_path: &Path) -> Option<PathBuf>
/// Look for the instance-uid in the `data.ms` or in `~/.config/MeiliSearch/path-to-db-instance-uid` fn find_user_id(db_path: &Path) -> Option<String> { fs::read_to_string(db_path.join("instance-uid")) .ok() .or_else(|| fs::read_to_string(&config_user_id_path(db_path)?).ok()) } pub trait Analytics: Sync + Send { /// The method used to publish most analytics that do not need to be batched every hours fn publish(&self, event_name: String, send: Value, request: Option<&HttpRequest>); /// This method should be called to aggergate a get search fn get_search(&self, aggregate: SearchAggregator); /// This method should be called to aggregate a post search fn post_search(&self, aggregate: SearchAggregator); // this method should be called to aggregate a add documents request fn add_documents( &self, documents_query: &UpdateDocumentsQuery, index_creation: bool, request: &HttpRequest, ); // this method should be called to batch a update documents request fn update_documents( &self, documents_query: &UpdateDocumentsQuery, index_creation: bool, request: &HttpRequest, ); }
{ db_path .canonicalize() .ok() .map(|path| { path.join("instance-uid") .display() .to_string() .replace("/", "-") }) .zip(MEILISEARCH_CONFIG_PATH.as_ref()) .map(|(filename, config_path)| config_path.join(filename.trim_start_matches('-'))) }
App.tsx
import { // @ts-ignore unstable_createMuiStrictModeTheme as createMuiTheme, CssBaseline, ThemeProvider, } from "@material-ui/core"; import React, { StrictMode } from "react"; import { Provider } from "react-redux"; import { MemoryRouter as Router, Route, Switch } from "react-router-dom"; import "typeface-roboto"; import { Explorer } from "_r/App/Explorer/Explorer"; import { GlobalCss } from "_r/globalCss"; import { store } from "_r/redux/StoreCreator"; import { theme } from "_r/theme"; import { AppBar } from "./AppBar/AppBar"; import { RpcIssueDialog } from "./RpcIssueDialog/RpcIssueDialog"; import { RedirectToHighestBlock } from "./RedirectToHighestBlock/RedirectToHighestBlock"; import { Settings } from "./Settings/Settings";
return ( <Router> <RpcIssueDialog /> <Switch> <Route exact path="/"> {appBar} <RedirectToHighestBlock /> </Route> <Route path="/explorer/:blockHeightAsId"> {appBar} <Explorer /> </Route> <Route path="/settings"> {appBar} <Settings /> </Route> </Switch> </Router> ); }; const App = () => ( <StrictMode> <Provider store={store}> <ThemeProvider theme={createMuiTheme(theme)}> <CssBaseline /> <GlobalCss /> <Routes /> </ThemeProvider> </Provider> </StrictMode> ); export const getApp = () => App;
export const Routes = () => { const appBar = <AppBar />;
reference_result.go
package results import ( "luahelper-lsp/langserver/check/common" "luahelper-lsp/langserver/check/compiler/ast" "luahelper-lsp/langserver/check/compiler/lexer" "luahelper-lsp/langserver/log" "strings" ) // SelfConvertInterface 接口, self转换的 type SelfConvertInterface interface { ChangeSelfToReferVar(strTable string, prefixStr string) (str string) } // ReferenceFileResult 第四阶段分析的单个lua文件,查找对变量的引用 type ReferenceFileResult struct { StrFile string // 文件的名称 fileResult *FileResult // 单个文件分析的指针 fileName string // 引用所在的lua文件 referSuffVec []string // 引用的后缀存放字符串数组,例如引用a.b.c, referName存放的值为a,b和c的值存放在数组中 findSymbol *common.VarInfo // 引用如果为局部信息,有值 secondProjectVec []*SingleProjectResult // 引用的文件所需要处理的第二阶段工程名 thirdStruct *AnalysisThird // 引用所包含的第三阶段散落的文件 FindLocVec []lexer.Location // 找到的引用关系位置 ignoreDefineLoc lexer.Location // 需要忽略的定义位置 } // CreateReferenceFileResult 创建单个引用的指针 func CreateReferenceFileResult(strFile string) *ReferenceFileResult { return &ReferenceFileResult{ StrFile: strFile, fileResult: nil, findSymbol: nil, referSuffVec: []string{}, ignoreDefineLoc: lexer.Location{}, } } // MatchVarInfo 对比遍历的局部变量引用是否是自己想要的 // excludeRequire 表示是否要剔除掉require的影响,require的时候会少点原生table的名称,剔除时候,four.referSuffVec也需要剔除第一个数据 func (r *ReferenceFileResult) MatchVarInfo(selfConvert SelfConvertInterface, strName string, fileName string, varInfo *common.VarInfo, fi *common.FuncInfo, strPreExp string, nameExp ast.Exp, excludeRequire bool) bool { referSuffVec := r.referSuffVec if excludeRequire && len(referSuffVec) > 1 { referSuffVec = referSuffVec[1:] }
if r.fileName != fileName { return false } if referSuffVec[0] != strName { return false } if !lexer.CompareTwoLoc(&r.findSymbol.Loc, &varInfo.Loc) { return false } lenSuffVec := len(referSuffVec) if lenSuffVec > 1 && strPreExp == "" { // 多余一层的查找,传入的为table,需要比对前面的table return false } else if lenSuffVec == 1 && strPreExp != "" { return false } findLoc := varInfo.Loc if lenSuffVec > 1 { // 第二轮或第三轮判断table取值是否有定义 strTable := strPreExp if strings.Contains(strTable, "#") { return false } strKey := common.GetExpName(nameExp) findLoc = common.GetExpLoc(nameExp) // 如果不是简单字符,退出 if !common.JudgeSimpleStr(strKey) { return false } strTableArry := strings.Split(strTable, ".") strTableArry = append(strTableArry, strKey) if len(strTableArry) != lenSuffVec { return false } // self进行替换 if strTableArry[0] == "!self" { strTableArry[0] = selfConvert.ChangeSelfToReferVar(strTableArry[0], "!") } _, strTableArry[0] = common.StrRemoveSigh(strTableArry[0]) if strTableArry[0] == "" { return false } for i := 1; i < lenSuffVec; i++ { if referSuffVec[i] != strTableArry[i] { return false } } } else { findLoc = common.GetExpLoc(nameExp) } r.FindLocVec = append(r.FindLocVec, findLoc) return true } func (r *ReferenceFileResult) SetFileResult(fileResult *FileResult) { r.fileResult = fileResult } // 查找所有引用的全局变量,在所有的第二阶段工程,和三阶段散落的文件集合中查找 func (r *ReferenceFileResult) FindProjectGlobal(selfConvert SelfConvertInterface, strName string, strProPre string, fi *common.FuncInfo, strPreExp string, nameExp ast.Exp) bool { if !r.findSymbol.IsGlobal() { return false } for _, secondProject := range r.secondProjectVec { ok, oneVar := secondProject.FindGlobalGInfo(strName, CheckTermFirst, strProPre) if !ok { continue } ok = r.MatchVarInfo(selfConvert, strName, oneVar.FileName, oneVar, fi, strPreExp, nameExp, false) if ok { return true } } // 向工程的第一阶段全局_G符号表中查找 if r.thirdStruct != nil { ok, oneVar := r.thirdStruct.FindThirdGlobalGInfo(false, strName, strProPre) if !ok { return false } ok = r.MatchVarInfo(selfConvert, strName, oneVar.FileName, oneVar, fi, strPreExp, nameExp, false) if ok { log.Debug("find global Info, thirdStruct, strName=%s", strName) return true } } return false } // SetFindReferenceInfo 设置需要查找引用的信息 func (r *ReferenceFileResult) SetFindReferenceInfo(strName string, varInfo *common.VarInfo, secondVec []*SingleProjectResult, referSuffVec []string, ignoreDefineLoc lexer.Location, thirdStruct *AnalysisThird) { r.fileName = strName r.findSymbol = varInfo r.secondProjectVec = secondVec r.referSuffVec = referSuffVec r.ignoreDefineLoc = ignoreDefineLoc r.thirdStruct = thirdStruct }
if r.findSymbol == nil || varInfo == nil { return false }
index.d.ts
export { WatsonHealthAiStatusQueued24 as default } from "../";
quiver_mutation_type.py
r""" Quiver mutation types AUTHORS: - Gregg Musiker (2012, initial version) - Christian Stump (2012, initial version) - Hugh Thomas (2012, initial version) """ #***************************************************************************** # Copyright (C) 2011 Gregg Musiker <[email protected]> # Christian Stump <[email protected]> # Hugh Thomas <[email protected]> # # Distributed under the terms of the GNU General Public License (GPL) # http://www.gnu.org/licenses/ #***************************************************************************** # python3 from __future__ import division, print_function from __future__ import absolute_import from six.moves import range from sage.structure.sage_object import SageObject from copy import copy from sage.structure.unique_representation import UniqueRepresentation from sage.misc.all import cached_method from sage.rings.all import ZZ, infinity from sage.graphs.all import Graph, DiGraph from sage.arith.all import binomial, Euler_Phi from sage.all import prod from sage.matrix.all import matrix class QuiverMutationTypeFactory(SageObject): def __call__(self, *args): """ For a detailed description, see :meth:`QuiverMutationType`. EXAMPLES:: sage: from sage.combinat.cluster_algebra_quiver.quiver_mutation_type import QuiverMutationTypeFactory sage: QuiverMutationTypeFactory() QuiverMutationType """ # get data as arguments or as list/tuple if len( args ) == 1: data = args[0] else: data = args # data is a QuiverMutationType if isinstance(data, QuiverMutationType_Irreducible): return data elif isinstance(data, QuiverMutationType_Reducible): return data # check that data is a tuple or list if isinstance(data, tuple) and len( data ) > 0: pass elif isinstance(data, list) and len( data ) > 0: data = tuple( data ) else: _mutation_type_error( data ) # check for reducible types if all( type( data_component ) in [list,tuple,QuiverMutationType_Irreducible] for data_component in data ): if len( data ) == 1: return QuiverMutationType( data[0] ) else: data = tuple( QuiverMutationType(comp) for comp in data ) return QuiverMutationType_Reducible( *data ) # check for irreducible types if len(data) == 2: data = (data[0],data[1],None) elif len(data) == 3: pass else: _mutation_type_error(data) if isinstance(data[2], list): data = (data[0],data[1],tuple(data[2])) if isinstance(data[1], list): data = (data[0],tuple(data[1]),data[2]) # mutation type casting if True: if data == ('D',2,None): return QuiverMutationType( ('A',1,None), ('A',1,None) ) elif data == ('D',3,None): data = ('A',3,None) elif data == ('C',2,None): data = ('B',2,None) elif data == ('E',9,None): data = ('E',8,1) elif data[0] == 'A' and data[2] == 1 and isinstance(data[1], tuple) and len(data[1]) == 2 and min(data[1]) == 0: if max(data[1]) == 0: pass elif max(data[1]) == 1: data = ('A', 1,None) elif max(data[1]) == 2: return QuiverMutationType( ('A',1,None), ('A',1,None) ) elif max(data[1]) == 3: data = ('A',3,None) else: data = ('D',max(data[1]),None) elif data[0] == 'GR' and data[2] is None and isinstance(data[1], tuple) and len(data[1]) == 2 and data[1][1] > data[1][0]: if min(data[1]) > max(data[1])/2 and max(data[1]) != min(data[1])+1: data = (data[0],(max(data[1])-min(data[1]),max(data[1])),data[2]) if min(data[1]) == 2 and max(data[1]) > 3: data = ('A',max(data[1])-3,None) elif data[1] == (3,6): data = ('D',4,None) elif data[1] == (3,7): data = ('E',6,None) elif data[1] == (3,8): data = ('E',8,None) elif data[1] == (3,9): data = ('E',8,[1,1]) elif data[1] == (4,8): data = ('E',7,[1,1]) elif data == ('TR',1,None): data = ('A',1,None) elif data == ('TR',2,None): data = ('A',3,None) elif data == ('TR',3,None): data = ('D',6,None) elif data == ('TR',4,None): data = ('E',8,(1,1)) # mutation type casting from Kac conventions elif data == ('A',1,1): data = ('A',(1,1),1) elif data[0] == 'B' and data[2] == 1: if data[1] == 2: data = ('CC',2,1) elif data[1] > 2: data = ('BD',data[1],1) elif data[0] == 'B' and data[2] == -1: if data[1] == 2: data = ('BB',2,1) elif data[1] > 2: data= ('CD',data[1],1) elif data[0] == 'C' and data[1] > 1 and data[2] == 1: data = ('CC',data[1],1) elif data[0] == 'C' and data[1] > 1 and data[2] == -1: data = ('BB',data[1],1) elif data == ('A',2,2): data = ('BC',1,1) elif data[0] == 'A' and data[1] in ZZ and data[1] > 1 and data[1]%2 == 0 and data[2] == 2: data = ('BC',data[1]//2,1) elif data[0] == 'A' and data[1] in ZZ and data[1] > 3 and data[1]%2 == 1 and data[2] == 2: data = ('CD',(data[1]+1)//2,1) # We think of ('A',3,2) as ('D',3,2) elif data == ('A',3,2): data = ('BB',2,1) elif data[0] == 'D' and data[1] in ZZ and data[1] > 2 and data[2] == 2: data = ('BB',data[1]-1,1) elif data == ('E',6,2): data = ('F',4,-1) elif data == ('D',4,3): data = ('G',2,-1) elif data == ('F',4,(2,1)): data = ('F',4,(1,2)) elif data == ('G',2,(3,1)): data = ('G',2,(1,3)) elif data[0] == 'T' and data[2] is None: data = (data[0],tuple(sorted(data[1])),data[2]) r,p,q = data[1] if r == 1: data = ('A',p+q-1,None) elif r == p == 2: data = ('D',q+2,None) elif r == 2 and p == 3: if q in (3,4,5): data = ('E',q+3,None) elif q == 6: data = ('E',8,1) else: data = ('E',q+3,None) elif r== 2 and p == q == 4: data = ('E',7,1) elif r == p == q == 3: data = ('E',6,1) elif data[0] == 'R2' and data[2] is None and all(data[1][i] in ZZ and data[1][i] > 0 for i in [0,1]): data = (data[0],tuple(sorted(data[1])),data[2]) b,c = data[1] if data[1] == (1,1): data = ('A',2,None) elif data[1] == (1,2): data = ('B',2,None) elif data[1] == (1,3): data = ('G',2,None) elif data[1] == (1,4): data = ('BC',1,1) elif data[1] == (2,2): data = ('A',(1,1),1) # setting the parameters and returning the mutation type letter,rank,twist = data if not isinstance(letter, str): _mutation_type_error(data) if isinstance(rank, list): rank = tuple(rank) if isinstance(twist, list): twist = tuple(twist) return QuiverMutationType_Irreducible(letter,rank,twist) def _repr_(self): """ Return the string representation of ``self``. EXAMPLES:: sage: QuiverMutationType # indirect doctest QuiverMutationType """ return "QuiverMutationType" def samples(self, finite=None, affine=None, elliptic=None, mutation_finite=None): """ Return a sample of the available quiver mutations types. INPUT: - ``finite`` - ``affine`` - ``elliptic`` - ``mutation_finite`` All four input keywords default values are ``None``. If set to ``True`` or ``False``, only these samples are returned. EXAMPLES:: sage: QuiverMutationType.samples() [['A', 1], ['A', 5], ['B', 2], ['B', 5], ['C', 3], ['C', 5], [ ['A', 1], ['A', 1] ], ['D', 5], ['E', 6], ['E', 7], ['E', 8], ['F', 4], ['G', 2], ['A', [1, 1], 1], ['A', [4, 5], 1], ['D', 4, 1], ['BB', 5, 1], ['E', 6, [1, 1]], ['E', 7, [1, 1]], ['R2', [1, 5]], ['R2', [3, 5]], ['E', 10], ['BE', 5], ['GR', [3, 10]], ['T', [3, 3, 4]]] sage: QuiverMutationType.samples(finite=True) [['A', 1], ['A', 5], ['B', 2], ['B', 5], ['C', 3], ['C', 5], [ ['A', 1], ['A', 1] ], ['D', 5], ['E', 6], ['E', 7], ['E', 8], ['F', 4], ['G', 2]] sage: QuiverMutationType.samples(affine=True) [['A', [1, 1], 1], ['A', [4, 5], 1], ['D', 4, 1], ['BB', 5, 1]] sage: QuiverMutationType.samples(elliptic=True) [['E', 6, [1, 1]], ['E', 7, [1, 1]]] sage: QuiverMutationType.samples(mutation_finite=False) [['R2', [1, 5]], ['R2', [3, 5]], ['E', 10], ['BE', 5], ['GR', [3, 10]], ['T', [3, 3, 4]]] """ result = self._samples() if finite is not None: result = [t for t in result if t.is_finite() == finite] if affine is not None: result = [t for t in result if t.is_affine() == affine] if elliptic is not None: result = [t for t in result if t.is_elliptic() == elliptic] if mutation_finite is not None: result = [t for t in result if t.is_mutation_finite() == mutation_finite] return result @cached_method def _samples(self): """ Return a list of sample of available Cartan types. EXAMPLES:: sage: X = QuiverMutationType._samples() """ finite_types = \ [QuiverMutationType(t) for t in [['A', 1], ['A', 5], ['B', 2], ['B', 5], ['C', 3], ['C', 5], ['D', 2], ['D', 5], ["E", 6], ["E", 7], ["E", 8], ["F", 4], ["G", 2]]] affine_types = \ [QuiverMutationType(t) for t in [['A', [1,1], 1], ['A', [4,5], 1], ['D', 4, 1], ['BB', 5, 1]]] elliptic_types = \ [QuiverMutationType(t) for t in [['E', 6, [1,1]], ['E', 7, [1,1]]]] mutation_finite_types = \ [QuiverMutationType(t) for t in [['R2',(1,5)], ['R2',(3,5)]]] mutation_infinite_types = \ [QuiverMutationType(t) for t in [['E',10], ['BE',5], ['GR',(3,10)], ['T',(3,3,4)]]] return finite_types + affine_types + elliptic_types + mutation_finite_types + mutation_infinite_types QuiverMutationType = QuiverMutationTypeFactory() QuiverMutationType.__doc__ = \ r""" *Quiver mutation types* can be seen as a slight generalization of *generalized Cartan types*. Background on generalized Cartan types can be found at :wikipedia:`Generalized_Cartan_matrix` For the compendium on the cluster algebra and quiver package in Sage see [MS2011]_ A `B`-matrix is a skew-symmetrizable `( n \times n )`-matrix `M`. I.e., there exists an invertible diagonal matrix `D` such that `DM` is skew-symmetric. `M` can be encoded as a *quiver* by having a directed edge from vertex `i` to vertex `j` with label `(a,b)` if `a = M_{i,j} > 0` and `b = M_{j,i} < 0`. We consider quivers up to *mutation equivalence*. To a quiver mutation type we can associate a *generalized Cartan type* by sending `M` to the generalized Cartan matrix `C(M)` obtained by replacing all positive entries by their negatives and adding `2`'s on the main diagonal. ``QuiverMutationType`` constructs a quiver mutation type object. For more detail on the possible different types, please see the compendium. INPUT: The input consists either of a quiver mutation type, or of a ``letter`` (a string), a ``rank`` (one integer or a list/tuple of integers), and an optional ``twist`` (an integer or a list of integers). There are several different naming conventions for quiver mutation types. - Finite type -- ``letter`` is a Dynkin type (A-G), and ``rank`` is the rank. - Affine type -- there is more than one convention for naming affine types. * Kac's notation: ``letter`` is a Dynkin type, ``rank`` is the rank of the associated finite Dynkin diagram, and ``twist`` is the twist, which could be 1, 2, or 3. In the special case of affine type A, there is more than one quiver mutation type associated to the Cartan type. In this case only, ``rank`` is a pair of integers (i,j), giving the number of edges pointing clockwise and the number of edges pointing counter-clockwise. The total number of vertices is given by i+j in this case. * Naive notation: ``letter`` is one of 'BB', 'BC', 'BD', 'CC', 'CD'. The name specifies the two ends of the diagram, which are joined by a path. The total number of vertices is given by ``rank +1`` (to match the indexing people expect because these are affine types). In general, ``rank`` must be large enough for the picture to make sense, but we accept ``letter`` is ``BC`` and ``rank=1``. * Macdonald notation: for the dual of an untwisted affine type (such as ['C', 6,1]), we accept a twist of -1 (i.e., ['C',6,-1]). - Elliptic type -- ``letter`` is a Dynkin type, ``rank`` is the rank of the finite Dynkin diagram, and ``twist`` is a tuple of two integers. We follow Saito's notation. - Other shapes: * Rank 2: ``letter`` is 'R2', and ``rank`` is a pair of integers specifying the label on the unique edge. * Triangle: ``letter`` is ``TR``, and ``rank`` is the number of vertices along a side. * T: This defines a quiver shaped like a T. ``letter`` is 'T', and the ``rank`` is a triple, whose entries specify the number of vertices along each path from the branch point (counting the branch point). * Grassmannian: This defines the cluster algebra (without coefficients) corresponding to the cluster algebra with coefficients which is the co-ordinate ring of a Grassmannian. ``letter`` is 'GR'. ``rank`` is a pair of integers (`k`, `n`) with 'k' < 'n' specifying the Grassmannian of `k`-planes in `n`-space. This defines a quiver given by a (k-1) x (n-k-1) grid where each square is cyclically oriented. * Exceptional mutation finite quivers: The two exceptional mutation finite quivers, found by Derksen-Owen, have ``letter`` as 'X' and ``rank`` 6 or 7, equal to the number of vertices. * AE, BE, CE, DE: Quivers are built of one end which looks like type (affine A), B, C, or D, and the other end which looks like type E (i.e., it consists of two antennae, one of length one, and one of length two). ``letter`` is 'AE', 'BE', 'CE', or 'DE', and ``rank`` is the total number of vertices. Note that 'AE' is of a slightly different form and requires ``rank`` to be a pair of integers (i,j) just as in the case of affine type A. See Exercise 4.3 in Kac's book Infinite Dimensional Lie Algebras for more details. * Infinite type E: It is also possible to obtain infinite-type E quivers by specifying ``letter`` as 'E' and ``rank`` as the number of vertices. REFERENCES: - A good reference for finite and affine Dynkin diagrams, including Kac's notation, is the :wikipedia:`Dynkin_diagram`. - A good reference for the skew-symmetrizable elliptic diagrams is "Cluster algebras of finite mutation type via unfolding" by A. Felikson, M. Shapiro, and P. Tumarkin, [FST2012]_. EXAMPLES: Finite types:: sage: QuiverMutationType('A',1) ['A', 1] sage: QuiverMutationType('A',5) ['A', 5] sage: QuiverMutationType('B',2) ['B', 2] sage: QuiverMutationType('B',5) ['B', 5] sage: QuiverMutationType('C',2) ['B', 2] sage: QuiverMutationType('C',5) ['C', 5] sage: QuiverMutationType('D',2) [ ['A', 1], ['A', 1] ] sage: QuiverMutationType('D',3) ['A', 3] sage: QuiverMutationType('D',4) ['D', 4] sage: QuiverMutationType('E',6) ['E', 6] sage: QuiverMutationType('G',2) ['G', 2] sage: QuiverMutationType('A',(1,0),1) ['A', 1] sage: QuiverMutationType('A',(2,0),1) [ ['A', 1], ['A', 1] ] sage: QuiverMutationType('A',(7,0),1) ['D', 7] Affine types:: sage: QuiverMutationType('A',(1,1),1) ['A', [1, 1], 1] sage: QuiverMutationType('A',(2,4),1) ['A', [2, 4], 1] sage: QuiverMutationType('BB',2,1) ['BB', 2, 1] sage: QuiverMutationType('BB',4,1) ['BB', 4, 1] sage: QuiverMutationType('CC',2,1) ['CC', 2, 1] sage: QuiverMutationType('CC',4,1) ['CC', 4, 1] sage: QuiverMutationType('BC',1,1) ['BC', 1, 1] sage: QuiverMutationType('BC',5,1) ['BC', 5, 1] sage: QuiverMutationType('BD',3,1) ['BD', 3, 1] sage: QuiverMutationType('BD',5,1) ['BD', 5, 1] sage: QuiverMutationType('CD',3,1) ['CD', 3, 1] sage: QuiverMutationType('CD',5,1) ['CD', 5, 1] sage: QuiverMutationType('D',4,1) ['D', 4, 1] sage: QuiverMutationType('D',6,1) ['D', 6, 1] sage: QuiverMutationType('E',6,1) ['E', 6, 1] sage: QuiverMutationType('E',7,1) ['E', 7, 1] sage: QuiverMutationType('E',8,1) ['E', 8, 1] sage: QuiverMutationType('F',4,1) ['F', 4, 1] sage: QuiverMutationType('F',4,-1) ['F', 4, -1] sage: QuiverMutationType('G',2,1) ['G', 2, 1] sage: QuiverMutationType('G',2,-1) ['G', 2, -1] sage: QuiverMutationType('A',3,2) == QuiverMutationType('D',3,2) True Affine types using Kac's Notation:: sage: QuiverMutationType('A',1,1) ['A', [1, 1], 1] sage: QuiverMutationType('B',5,1) ['BD', 5, 1] sage: QuiverMutationType('C',5,1) ['CC', 5, 1] sage: QuiverMutationType('A',2,2) ['BC', 1, 1] sage: QuiverMutationType('A',7,2) ['CD', 4, 1] sage: QuiverMutationType('A',8,2) ['BC', 4, 1] sage: QuiverMutationType('D',6,2) ['BB', 5, 1] sage: QuiverMutationType('E',6,2) ['F', 4, -1] sage: QuiverMutationType('D',4,3) ['G', 2, -1] Elliptic types:: sage: QuiverMutationType('E',6,[1,1]) ['E', 6, [1, 1]] sage: QuiverMutationType('F',4,[2,1]) ['F', 4, [1, 2]] sage: QuiverMutationType('G',2,[3,3]) ['G', 2, [3, 3]] Mutation finite types: rank 2 cases:: sage: QuiverMutationType('R2',(1,1)) ['A', 2] sage: QuiverMutationType('R2',(1,2)) ['B', 2] sage: QuiverMutationType('R2',(1,3)) ['G', 2] sage: QuiverMutationType('R2',(1,4)) ['BC', 1, 1] sage: QuiverMutationType('R2',(1,5)) ['R2', [1, 5]] sage: QuiverMutationType('R2',(2,2)) ['A', [1, 1], 1] sage: QuiverMutationType('R2',(3,5)) ['R2', [3, 5]] Exceptional Derksen-Owen quivers:: sage: QuiverMutationType('X',6) ['X', 6] (Mainly) mutation infinite types: Infinite type E:: sage: QuiverMutationType('E',9) ['E', 8, 1] sage: QuiverMutationType('E',10) ['E', 10] sage: QuiverMutationType('E',12) ['E', 12] sage: QuiverMutationType('AE',(2,3)) ['AE', [2, 3]] sage: QuiverMutationType('BE',5) ['BE', 5] sage: QuiverMutationType('CE',5) ['CE', 5] sage: QuiverMutationType('DE',6) ['DE', 6] Grassmannian types:: sage: QuiverMutationType('GR',(2,4)) ['A', 1] sage: QuiverMutationType('GR',(2,6)) ['A', 3] sage: QuiverMutationType('GR',(3,6)) ['D', 4] sage: QuiverMutationType('GR',(3,7)) ['E', 6] sage: QuiverMutationType('GR',(3,8)) ['E', 8] sage: QuiverMutationType('GR',(3,10)) ['GR', [3, 10]] Triangular types:: sage: QuiverMutationType('TR',2) ['A', 3] sage: QuiverMutationType('TR',3) ['D', 6] sage: QuiverMutationType('TR',4) ['E', 8, [1, 1]] sage: QuiverMutationType('TR',5) ['TR', 5] T types:: sage: QuiverMutationType('T',(1,1,1)) ['A', 1] sage: QuiverMutationType('T',(1,1,4)) ['A', 4] sage: QuiverMutationType('T',(1,4,4)) ['A', 7] sage: QuiverMutationType('T',(2,2,2)) ['D', 4] sage: QuiverMutationType('T',(2,2,4)) ['D', 6] sage: QuiverMutationType('T',(2,3,3)) ['E', 6] sage: QuiverMutationType('T',(2,3,4)) ['E', 7] sage: QuiverMutationType('T',(2,3,5)) ['E', 8] sage: QuiverMutationType('T',(2,3,6)) ['E', 8, 1] sage: QuiverMutationType('T',(2,3,7)) ['E', 10] sage: QuiverMutationType('T',(3,3,3)) ['E', 6, 1] sage: QuiverMutationType('T',(3,3,4)) ['T', [3, 3, 4]] Reducible types:: sage: QuiverMutationType(['A',3],['B',4]) [ ['A', 3], ['B', 4] ] """ class QuiverMutationType_abstract(UniqueRepresentation, SageObject): """ EXAMPLES:: sage: mut_type1 = QuiverMutationType('A',5) sage: mut_type2 = QuiverMutationType('A',5) sage: mut_type3 = QuiverMutationType('A',6) sage: mut_type1 == mut_type2 True sage: mut_type1 == mut_type3 False """ def _repr_(self): """ Return the string representation of ``self``. EXAMPLES:: sage: QuiverMutationType(['A',2]) # indirect doctest ['A', 2] """ return self._description def plot(self, circular=False, directed=True): """ Return the plot of the underlying graph or digraph of ``self``. INPUT: - ``circular`` -- (default:``False``) if ``True``, the circular plot is chosen, otherwise >>spring<< is used. - ``directed`` -- (default: ``True``) if ``True``, the directed version is shown, otherwise the undirected. EXAMPLES:: sage: QMT = QuiverMutationType(['A',5]) sage: pl = QMT.plot() sage: pl = QMT.plot(circular=True) """ return self.standard_quiver().plot(circular=circular, directed=directed) def show(self, circular=False, directed=True): """ Show the plot of the underlying digraph of ``self``. INPUT: - ``circular`` -- (default:``False``) if ``True``, the circular plot is chosen, otherwise >>spring<< is used. - ``directed`` -- (default: ``True``) if ``True``, the directed version is shown, otherwise the undirected. TESTS:: sage: QMT = QuiverMutationType(['A',5]) sage: QMT.show() # long time """ self.plot( circular=circular, directed=directed ).show() def letter(self): """ Return the classification letter of ``self``. EXAMPLES:: sage: mut_type = QuiverMutationType( ['A',5] ); mut_type ['A', 5] sage: mut_type.letter() 'A' sage: mut_type = QuiverMutationType( ['BC',5,1] ); mut_type ['BC', 5, 1] sage: mut_type.letter() 'BC' sage: mut_type = QuiverMutationType(['A',3],['B',3]); mut_type [ ['A', 3], ['B', 3] ] sage: mut_type.letter() 'A x B' sage: mut_type = QuiverMutationType(['A',3],['B',3],['X',6]); mut_type [ ['A', 3], ['B', 3], ['X', 6] ] sage: mut_type.letter() 'A x B x X' """ return self._letter def rank(self): """ Return the rank in the standard quiver of ``self``. The rank is the number of vertices. EXAMPLES:: sage: mut_type = QuiverMutationType( ['A',5] ); mut_type ['A', 5] sage: mut_type.rank() 5 sage: mut_type = QuiverMutationType( ['A',[4,5],1] ); mut_type ['A', [4, 5], 1] sage: mut_type.rank() 9 sage: mut_type = QuiverMutationType( ['BC',5,1] ); mut_type ['BC', 5, 1] sage: mut_type.rank() 6 sage: mut_type = QuiverMutationType(['A',3],['B',3]); mut_type [ ['A', 3], ['B', 3] ] sage: mut_type.rank() 6 sage: mut_type = QuiverMutationType(['A',3],['B',3],['X',6]); mut_type [ ['A', 3], ['B', 3], ['X', 6] ] sage: mut_type.rank() 12 """ return self._rank @cached_method def b_matrix(self): """ Return the B-matrix of the standard quiver of ``self``. The conventions for B-matrices agree with Fomin-Zelevinsky (up to a reordering of the simple roots). EXAMPLES:: sage: mut_type = QuiverMutationType( ['A',5] ); mut_type ['A', 5] sage: mut_type.b_matrix() [ 0 1 0 0 0] [-1 0 -1 0 0] [ 0 1 0 1 0] [ 0 0 -1 0 -1] [ 0 0 0 1 0] sage: mut_type = QuiverMutationType(['A',3],['B',3]); mut_type [ ['A', 3], ['B', 3] ] sage: mut_type.b_matrix() [ 0 1 0 0 0 0] [-1 0 -1 0 0 0] [ 0 1 0 0 0 0] [ 0 0 0 0 1 0] [ 0 0 0 -1 0 -1] [ 0 0 0 0 2 0] """ return _edge_list_to_matrix(self._digraph.edges(), list(range(self._rank)), []) @cached_method def standard_quiver(self): """ Return the standard quiver of ``self``. EXAMPLES:: sage: mut_type = QuiverMutationType( ['A',5] ); mut_type ['A', 5] sage: mut_type.standard_quiver() Quiver on 5 vertices of type ['A', 5] sage: mut_type = QuiverMutationType( ['A',[5,3],1] ); mut_type ['A', [3, 5], 1] sage: mut_type.standard_quiver() Quiver on 8 vertices of type ['A', [3, 5], 1] sage: mut_type = QuiverMutationType(['A',3],['B',3]); mut_type [ ['A', 3], ['B', 3] ] sage: mut_type.standard_quiver() Quiver on 6 vertices of type [ ['A', 3], ['B', 3] ] sage: mut_type = QuiverMutationType(['A',3],['B',3],['X',6]); mut_type [ ['A', 3], ['B', 3], ['X', 6] ] sage: mut_type.standard_quiver() Quiver on 12 vertices of type [ ['A', 3], ['B', 3], ['X', 6] ] """ from .quiver import ClusterQuiver Q = ClusterQuiver(self._digraph) Q._mutation_type = self return Q @cached_method def cartan_matrix(self): """ Return the Cartan matrix of ``self``. Note that (up to a reordering of the simple roots) the convention for the definition of Cartan matrix, used here and elsewhere in Sage, agrees with the conventions of Kac, Fulton-Harris, and Fomin-Zelevinsky, but disagrees with the convention of Bourbaki. The `(i,j)` entry is `2(\\alpha_i,\\alpha_j)/(\\alpha_i,\\alpha_i)`. EXAMPLES:: sage: mut_type = QuiverMutationType(['A',5]); mut_type ['A', 5] sage: mut_type.cartan_matrix() [ 2 -1 0 0 0] [-1 2 -1 0 0] [ 0 -1 2 -1 0] [ 0 0 -1 2 -1] [ 0 0 0 -1 2] sage: mut_type = QuiverMutationType(['A',3],['B',3]); mut_type [ ['A', 3], ['B', 3] ] sage: mut_type.cartan_matrix() [ 2 -1 0 0 0 0] [-1 2 -1 0 0 0] [ 0 -1 2 0 0 0] [ 0 0 0 2 -1 0] [ 0 0 0 -1 2 -1] [ 0 0 0 0 -2 2] """ # as soon as CartanMatrix is implemented we should use it here: # from sage.combinat.root_system.cartan_matrix import CartanMatrix cmat = copy(self.b_matrix()) for i,j in cmat.nonzero_positions(): a = cmat[i,j] if a > 0: cmat[i,j] = -a for i in range(self._rank): cmat[i,i] = 2 # return CartanMatrix(cmat) return cmat def
(self): """ Return ``True`` if ``self`` is irreducible. EXAMPLES:: sage: mt = QuiverMutationType(['A',2]) sage: mt.is_irreducible() True """ return self._info['irreducible'] def is_mutation_finite(self): """ Return ``True`` if ``self`` is of finite mutation type. This means that its mutation class has only finitely many different B-matrices. EXAMPLES:: sage: mt = QuiverMutationType(['D',5,1]) sage: mt.is_mutation_finite() True """ return self._info['mutation_finite'] def is_simply_laced(self): """ Return ``True`` if ``self`` is simply laced. This means that the only arrows that appear in the quiver of ``self`` are single unlabelled arrows. EXAMPLES:: sage: mt = QuiverMutationType(['A',2]) sage: mt.is_simply_laced() True sage: mt = QuiverMutationType(['B',2]) sage: mt.is_simply_laced() False sage: mt = QuiverMutationType(['A',(1,1),1]) sage: mt.is_simply_laced() False """ return self._info['simply_laced'] def is_skew_symmetric(self): """ Return ``True`` if the B-matrix of ``self`` is skew-symmetric. EXAMPLES:: sage: mt = QuiverMutationType(['A',2]) sage: mt.is_skew_symmetric() True sage: mt = QuiverMutationType(['B',2]) sage: mt.is_skew_symmetric() False sage: mt = QuiverMutationType(['A',(1,1),1]) sage: mt.is_skew_symmetric() True """ return self._info['skew_symmetric'] def is_finite(self): """ Return ``True`` if ``self`` is of finite type. This means that the cluster algebra associated to ``self`` has only a finite number of cluster variables. EXAMPLES:: sage: mt = QuiverMutationType(['A',2]) sage: mt.is_finite() True sage: mt = QuiverMutationType(['A',[4,2],1]) sage: mt.is_finite() False """ return self._info['finite'] def is_affine(self): """ Return ``True`` if ``self`` is of affine type. EXAMPLES:: sage: mt = QuiverMutationType(['A',2]) sage: mt.is_affine() False sage: mt = QuiverMutationType(['A',[4,2],1]) sage: mt.is_affine() True """ if self.is_irreducible(): return self._info['affine'] else: return False def is_elliptic(self): """ Return ``True`` if ``self`` is of elliptic type. EXAMPLES:: sage: mt = QuiverMutationType(['A',2]) sage: mt.is_elliptic() False sage: mt = QuiverMutationType(['E',6,[1,1]]) sage: mt.is_elliptic() True """ if self.is_irreducible(): return self._info['elliptic'] else: return False def properties(self): """ Print a scheme of all properties of ``self``. Most properties have natural definitions for either irreducible or reducible types. ``affine`` and ``elliptic`` are only defined for irreducible types. EXAMPLES:: sage: mut_type = QuiverMutationType(['A',3]); mut_type ['A', 3] sage: mut_type.properties() ['A', 3] has rank 3 and the following properties: - irreducible: True - mutation finite: True - simply-laced: True - skew-symmetric: True - finite: True - affine: False - elliptic: False sage: mut_type = QuiverMutationType(['B',3]); mut_type ['B', 3] sage: mut_type.properties() ['B', 3] has rank 3 and the following properties: - irreducible: True - mutation finite: True - simply-laced: False - skew-symmetric: False - finite: True - affine: False - elliptic: False sage: mut_type = QuiverMutationType(['B',3,1]); mut_type ['BD', 3, 1] sage: mut_type.properties() ['BD', 3, 1] has rank 4 and the following properties: - irreducible: True - mutation finite: True - simply-laced: False - skew-symmetric: False - finite: False - affine: True - elliptic: False sage: mut_type = QuiverMutationType(['E',6,[1,1]]); mut_type ['E', 6, [1, 1]] sage: mut_type.properties() ['E', 6, [1, 1]] has rank 8 and the following properties: - irreducible: True - mutation finite: True - simply-laced: False - skew-symmetric: True - finite: False - affine: False - elliptic: True sage: mut_type = QuiverMutationType(['A',3],['B',3]); mut_type [ ['A', 3], ['B', 3] ] sage: mut_type.properties() [ ['A', 3], ['B', 3] ] has rank 6 and the following properties: - irreducible: False - mutation finite: True - simply-laced: False - skew-symmetric: False - finite: True sage: mut_type = QuiverMutationType('GR',[4,9]); mut_type ['GR', [4, 9]] sage: mut_type.properties() ['GR', [4, 9]] has rank 12 and the following properties: - irreducible: True - mutation finite: False - simply-laced: True - skew-symmetric: True - finite: False - affine: False - elliptic: False """ txt = '{} has rank {} and the following properties:' print(txt.format(self, self.rank())) s = "\t- {} {}" print(s.format('irreducible: ', self.is_irreducible())) print(s.format('mutation finite: ', self.is_mutation_finite())) print(s.format('simply-laced: ', self.is_simply_laced())) print(s.format('skew-symmetric: ', self.is_skew_symmetric())) print(s.format('finite: ', self.is_finite())) if self.is_irreducible(): print(s.format('affine: ', self.is_affine())) print(s.format('elliptic: ', self.is_elliptic())) class QuiverMutationType_Irreducible(QuiverMutationType_abstract): """ The mutation type for a cluster algebra or a quiver. Should not be called directly, but through QuiverMutationType. """ def __init__(self, letter, rank, twist=None): """ Should not be called directly but through QuiverMutationType. INPUT: - ``letter`` -- the letter of the mutation type - ``rank`` -- the rank of the mutation type - ``twist`` -- the twist of the mutation type EXAMPLES:: sage: QuiverMutationType('A',5) ['A', 5] sage: QuiverMutationType('A',[4,5],1) ['A', [4, 5], 1] sage: QuiverMutationType('BB',5,1) ['BB', 5, 1] sage: QuiverMutationType('X',6) ['X', 6] """ # _rank and _bi_rank are initialized self._rank = None self._bi_rank = None # _graph and _digraph are initialized self._graph = Graph() self._digraph = DiGraph() # _info is initialized self._info = {} self._info['irreducible'] = True self._info['mutation_finite'] = False self._info['simply_laced'] = False self._info['skew_symmetric'] = False self._info['finite'] = False self._info['affine'] = False self._info['elliptic'] = False self._info['irreducible_components'] = False if isinstance(rank, tuple): rank = list(rank) if isinstance(twist, tuple): twist = list(twist) # _letter/twist is the input letter/twist self._letter = letter self._twist = twist data = [letter,rank,twist] # type A (finite and affine) if letter == 'A': if twist is None and rank in ZZ and rank > 0: self._rank = rank self._info['mutation_finite'] = True self._info['simply_laced'] = True self._info['skew_symmetric'] = True self._info['finite'] = True elif twist==1 and isinstance(rank, list) and len(rank) == 2 and all( rank[i] in ZZ and rank[i] >= 0 for i in [0,1] ) and rank != [0,0]: if isinstance(rank, tuple): rank = list( rank ) data[1] = rank rank = sorted(rank) self._bi_rank = rank self._rank = sum( self._bi_rank ) self._info['mutation_finite'] = True if self._rank > 2: self._info['simply_laced'] = True self._info['skew_symmetric'] = True if rank[0] > 0: self._info['affine'] = True elif rank[0] == 0: self._info['finite'] = True else: _mutation_type_error( data ) # types ['A',1] and ['A',[0,1],1] need to be treated on # itself (as there is no edge) if twist is None and self._rank == 1 or twist == 1 and self._rank == 1: self._graph.add_vertex( 0 ) # type ['A',[1,1],1] needs to be treated on itself as well # (as there is a double edge) elif twist == 1 and self._bi_rank[0] == 1 and self._bi_rank[1] == 1: self._graph.add_edge( 0,1,2 ) else: for i in range( self._rank - 1 ): self._graph.add_edge( i, i+1, 1 ) if twist == 1: self._digraph.add_edge( self._rank - 1, 0, 1 ) for i in range( self._rank - 1 ): if i < ( 2 * self._bi_rank[0] ) and i%2 == 0: self._digraph.add_edge( i+1, i, 1 ) else: self._digraph.add_edge( i, i+1, 1 ) # type B (finite) elif letter == 'B': if twist is None and rank in ZZ and rank > 1: self._rank = rank self._info['mutation_finite'] = True self._info['finite'] = True else: _mutation_type_error( data ) for i in range( rank - 2 ): self._graph.add_edge( i, i+1, 1 ) if (rank % 2 == 0): self._graph.add_edge( rank-2, rank-1, (1,-2) ) else: self._graph.add_edge( rank-2, rank-1, (2,-1) ) # type C (finite) elif letter == 'C': if twist is None and rank in ZZ and rank > 1: self._rank = rank self._info['mutation_finite'] = True self._info['finite'] = True else: _mutation_type_error( data ) for i in range( rank - 2 ): self._graph.add_edge( i, i+1, 1 ) if (rank % 2 == 0): self._graph.add_edge( rank-2, rank-1, (2,-1) ) else: self._graph.add_edge( rank-2, rank-1, (1,-2) ) # type BB (affine) elif letter == 'BB': if twist == 1 and rank in ZZ and rank > 1: self._rank = rank + 1 self._info['mutation_finite'] = True self._info['affine'] = True else: _mutation_type_error( data ) for i in range( rank - 2 ): self._graph.add_edge( i, i+1, 1 ) if rank % 2 == 0: self._graph.add_edge( rank-2, rank-1, (1,-2) ) else: self._graph.add_edge( rank-2, rank-1, (2,-1) ) self._graph.add_edge( rank, 0 , (1,-2) ) # type CC (affine) elif letter == 'CC': if twist == 1 and rank in ZZ and rank > 1: self._rank = rank + 1 self._info['mutation_finite'] = True self._info['affine'] = True else: _mutation_type_error( data ) for i in range( rank - 2 ): self._graph.add_edge( i, i+1, 1 ) if rank % 2 == 0: self._graph.add_edge( rank-2, rank-1, (2,-1) ) else: self._graph.add_edge( rank-2, rank-1, (1,-2) ) self._graph.add_edge( rank, 0 , (2,-1) ) # type BC (affine) elif letter == 'BC': if twist == 1 and rank in ZZ and rank >= 1: self._rank = rank + 1 self._info['mutation_finite'] = True self._info['affine'] = True else: _mutation_type_error( data ) if rank == 1: self._graph.add_edge( 0,1,(1,-4) ) else: for i in range( rank - 2 ): self._graph.add_edge( i, i+1, 1 ) if (rank % 2 == 0): self._graph.add_edge( rank-2, rank-1, (2,-1) ) else: self._graph.add_edge( rank-2, rank-1, (1,-2) ) if twist == 1: self._graph.add_edge( rank, 0 , (1,-2) ) # type BD (affine) elif letter == 'BD': if twist == 1 and rank in ZZ and rank > 2: self._rank = rank + 1 self._info['mutation_finite'] = True self._info['affine'] = True else: _mutation_type_error( data ) for i in range( rank - 2 ): self._graph.add_edge( i, i+1, 1 ) if (rank % 2 == 0): self._graph.add_edge( rank-2, rank-1, (1,-2) ) else: self._graph.add_edge( rank-2, rank-1, (2,-1) ) if twist == 1: self._graph.add_edge( rank, 1 , 1 ) # type CD (affine) elif letter == 'CD': if twist == 1 and rank in ZZ and rank > 2: self._rank = rank + 1 self._info['mutation_finite'] = True self._info['affine'] = True else: _mutation_type_error( data ) for i in range( rank - 2 ): self._graph.add_edge( i, i+1, 1 ) if (rank % 2 == 0): self._graph.add_edge( rank-2, rank-1, (2,-1) ) else: self._graph.add_edge( rank-2, rank-1, (1,-2) ) if twist == 1: self._graph.add_edge( rank, 1 , 1 ) # type D (finite and affine) elif letter == 'D': if rank in ZZ and rank > 3 and twist is None: self._rank = rank self._info['mutation_finite'] = True self._info['simply_laced'] = True self._info['skew_symmetric'] = True self._info['finite'] = True elif twist == 1 and rank in ZZ and rank > 3: self._rank = rank + 1 self._info['mutation_finite'] = True self._info['simply_laced'] = True self._info['skew_symmetric'] = True self._info['affine'] = True else: _mutation_type_error( data ) for i in range( rank - 2 ): self._graph.add_edge( i, i+1, 1 ) self._graph.add_edge( rank-3, rank-1, 1 ) if twist is not None: self._graph.add_edge( rank, 1 ,1 ) # type E (finite, affine and elliptic) elif letter == 'E': if rank in [6,7,8] and twist is None: self._rank = rank self._info['mutation_finite'] = True self._info['simply_laced'] = True self._info['skew_symmetric'] = True self._info['finite'] = True if rank == 6: self._graph.add_edges( [ (0,1),(1,2),(2,3),(3,4),(2,5) ] ) elif rank == 7: self._graph.add_edges([(0, 1), (1, 2), (2, 3), (3, 4), (4, 5), (2, 6)]) elif rank == 8: self._graph.add_edges([(0, 1), (1, 2), (2, 3), (3, 4), (4, 5), (5, 6),(2, 7)]) elif rank in [6,7,8] and twist == 1: self._rank = rank + 1 self._info['mutation_finite'] = True self._info['simply_laced'] = True self._info['skew_symmetric'] = True self._info['affine'] = True if rank == 6: self._graph.add_edges( [ (0,1),(1,2),(2,3),(3,4),(2,5),(5,6) ] ) elif rank == 7: self._graph.add_edges( [ (0,1),(1,2),(2,3),(3,4),(4,5),(5,6),(3,7) ] ) elif rank == 8: self._graph.add_edges( [ (0,1),(1,2),(2,3),(3,4),(4,5),(5,6),(6,7),(2,8) ] ) elif rank in [6,7,8] and twist == [1,1]: self._rank = rank + 2 self._info['mutation_finite'] = True self._info['skew_symmetric'] = True self._info['elliptic'] = True if rank == 6: self._digraph.add_edges( [ (0,1,1),(1,2,1),(3,2,1),(3,4,1),(5,6,1),(6,7,1),(5,1,1),(2,5,2),(5,3,1),(6,2,1) ] ) elif rank == 7: self._digraph.add_edges( [ (1,0,1),(1,2,1),(2,3,1),(4,3,1),(4,5,1),(6,5,1),(7,8,1),(3,7,2),(7,2,1),(7,4,1),(8,3,1) ] ) elif rank == 8: self._digraph.add_edges( [ (0,1,1),(1,9,1),(3,9,1),(3,4,1),(2,8,1),(2,1,1),(9,2,2),(2,3,1),(8,9,1),(5,4,1),(5,6,1),(7,6,1) ] ) # type E (mutation infinite) elif rank > 9 and twist is None: self._info['simply_laced'] = True self._info['skew_symmetric'] = True self._rank = rank for i in range(rank-2): self._graph.add_edge( i, i+1, 1 ) self._graph.add_edge( 2, rank-1 ) else: _mutation_type_error(data) # type AE (mutation infinite) elif letter == 'AE': if isinstance(rank, list) and len(rank) == 2 and all( rank[i] in ZZ and rank[i] > 0 for i in [0,1] ) and twist is None: if isinstance(rank, tuple): rank = list( rank ) data[1] = rank rank = sorted(rank) self._bi_rank = rank self._rank = sum( self._bi_rank ) + 1 if self._rank > 3: self._info['simply_laced'] = True self._info['skew_symmetric'] = True if self._bi_rank == [1,1]: self._graph.add_edges( [(0,1,2),(1,2,None)] ) else: self._digraph.add_edge( self._rank - 2, 0 ) for i in range(self._rank-2): if i < ( 2 * self._bi_rank[0] ) and i%2 == 0: self._digraph.add_edge(i+1,i) else: self._digraph.add_edge(i,i+1) self._digraph.add_edge(self._rank-2,self._rank-1) else: _mutation_type_error( data ) # type BE (mutation infinite) elif letter == 'BE': if rank >4 and twist is None: self._rank = rank for i in range(rank-3): self._graph.add_edge( i, i+1 ) self._graph.add_edge( 2, rank-1 ) if rank%2 == 0: self._graph.add_edge( rank-3,rank-2,(2,-1) ) else: self._graph.add_edge( rank-3,rank-2,(1,-2) ) else: _mutation_type_error( data ) # type CE (mutation infinite) elif letter == 'CE': if rank >4 and twist is None: self._rank = rank for i in range(rank-3): self._graph.add_edge( i, i+1 ) self._graph.add_edge( 2, rank-1 ) if rank%2 == 0: self._graph.add_edge( rank-3,rank-2,(1,-2) ) else: self._graph.add_edge( rank-3,rank-2,(2,-1) ) else: _mutation_type_error( data ) # type DE (mutation infinite) elif letter == 'DE': if rank >5 and twist is None: self._rank = rank self._info['simply_laced'] = True self._info['skew_symmetric'] = True for i in range(rank-3): self._graph.add_edge( i, i+1 ) self._graph.add_edge( 2, rank-2 ) self._graph.add_edge( rank-4, rank-1 ) else: _mutation_type_error( data ) # type F (finite, affine, and elliptic) elif letter == 'F': if rank == 4 and twist is None: self._rank = rank self._info['mutation_finite'] = True self._info['finite'] = True self._graph.add_edges( [ (0,1,None),(1,2,(2,-1)),(2,3,None) ] ) elif rank == 4 and twist == 1: self._rank = rank + 1 self._info['mutation_finite'] = True self._info['affine'] = True self._graph.add_edges( [ (0,1,None), (1,2,None), (2,3,(1,-2)),(3,4,None) ] ) elif rank == 4 and twist == -1: self._rank = rank + 1 self._info['mutation_finite'] = True self._info['affine'] = True self._graph.add_edges( [ (0,1,None), (1,2,None), (2,3,(2,-1)),(3,4,None) ] ) elif rank == 4 and (twist == [1,2]): self._rank = rank + 2 self._info['mutation_finite'] = True self._info['elliptic'] = True self._digraph.add_edges( [ (0,1,None), (1,2,None), (2,3,(2,-1)), (4,2,(1,-2)), (3,4,2), (4,5,None), (5,3,None) ]) elif rank == 4 and (twist == [2,1]): self._rank = rank + 2 self._info['mutation_finite'] = True self._info['elliptic'] = True self._digraph.add_edges( [ (0,1,None), (1,2,None), (2,3,(1,-2)), (4,2,(2,-1)), (3,4,2), (4,5,None), (5,3,None) ]) elif rank == 4 and twist == [2,2]: self._rank = rank + 2 self._info['mutation_finite'] = True self._info['elliptic'] = True self._digraph.add_edges( [ (0,1,None), (1,2,None), (3,1,None), (2,3,2), (4,2,(2,-1)), (3,4,(1,-2)), (5,4,None) ] ) elif rank == 4 and twist == [1,1]: self._rank = rank + 2 self._info['mutation_finite'] = True self._info['elliptic'] = True self._digraph.add_edges( [ (0,1,None), (1,2,None), (3,1,None), (2,3,2), (4,2,(1,-2)), (3,4,(2,-1)), (5,4,None) ] ) else: _mutation_type_error( data ) # type G (finite, affine, and elliptic) elif letter == 'G': if rank == 2 and twist is None: self._rank = rank self._info['mutation_finite'] = True self._info['finite'] = True self._graph.add_edges( [ (0,1,(1,-3)) ] ) elif rank == 2 and twist == -1: self._rank = rank + 1 self._info['mutation_finite'] = True self._info['affine'] = True self._graph.add_edges( [ (0,1,None),(1,2,(1,-3)) ] ) elif rank == 2 and twist == 1: self._rank = rank + 1 self._info['mutation_finite'] = True self._info['affine'] = True self._graph.add_edges( [ (0,1,None),(1,2,(3,-1)) ] ) elif rank == 2 and (twist == [1,3]): self._rank = rank + 2 self._info['mutation_finite'] = True self._info['elliptic'] = True self._digraph.add_edges( [ (0,1,None), (1,2,(3,-1)), (3,1,(1,-3)), (2,3,2)] ) elif rank == 2 and (twist == [3,1]): self._rank = rank + 2 self._info['mutation_finite'] = True self._info['elliptic'] = True self._digraph.add_edges( [ (0,1,None), (1,2,(1,-3)), (3,1,(3,-1)), (2,3,2)] ) elif rank == 2 and twist == [3,3]: self._rank = rank + 2 self._info['mutation_finite'] = True self._info['elliptic'] = True self._digraph.add_edges( [ (1,0,None), (0,2,2), (3,0,(3,-1)), (2,1,None), (2,3, (1,-3))]) elif rank == 2 and twist == [1,1]: self._rank = rank + 2 self._info['mutation_finite'] = True self._info['elliptic'] = True self._digraph.add_edges( [ (1,0,None), (0,2,2), (3,0,(1,-3)), (2,1,None), (2,3,(3,-1)) ] ) else: _mutation_type_error( data ) # type GR (mutation infinite) elif letter == 'GR': if twist is None and isinstance(rank, list) and len(rank) == 2 and all( rank[i] in ZZ and rank[i] > 0 for i in [0,1] ) and rank[1] - 1 > rank[0] > 1: gr_rank = (rank[0]-1,rank[1]-rank[0]-1) self._rank = prod(gr_rank) self._info['simply_laced'] = True self._info['skew_symmetric'] = True a,b = gr_rank for i in range(a): for j in range(b): if i < a-1: if (i+j) % 2 == 0: self._digraph.add_edge(i*b+j,(i+1)*b+j) else: self._digraph.add_edge((i+1)*b+j,i*b+j) if j < b-1: if (i+j) % 2 == 0: self._digraph.add_edge(i*b+j+1,i*b+j) else: self._digraph.add_edge(i*b+j,i*b+j+1) else: _mutation_type_error( data ) # type R2 (rank 2 finite mutation types) elif letter == 'R2': if twist is None and isinstance(rank, list) and len(rank) == 2 and all( rank[i] in ZZ and rank[i] > 0 for i in [0,1] ): rank = sorted(rank) b,c = rank self._rank = 2 if b == c: self._info['skew_symmetric'] = True self._graph.add_edge(0,1,(b,-c)) else: _mutation_type_error( data ) # type T elif letter == 'T': if twist is None and isinstance(rank, list) and len(rank) == 3 and all( rank[i] in ZZ and rank[i] > 0 for i in [0,1,2] ): if isinstance(rank, tuple): rank = list( rank ) data[1] = rank rank = sorted( rank ) self._rank = sum( rank ) - 2 self._info['simply_laced'] = True self._info['skew_symmetric'] = True r,p,q = rank for i in range(q-1): if i == 0: self._graph.add_edge(0,1) self._graph.add_edge(0,r) self._graph.add_edge(0,r+p-1) else: if i < r-1: self._graph.add_edge(i,i+1) if i < p-1: self._graph.add_edge(i+r-1,i+r) self._graph.add_edge(i+r+p-2,i+r+p-1) else: _mutation_type_error( data ) # type TR (mutation infinite if rank > 2) elif letter == 'TR': # type ['TR',1] needs to be treated on itself (as there is no edge) if twist is None and rank == 1: self._graph.add_vertex( 0 ) elif twist is None and rank > 1: self._rank = rank*(rank+1)//2 self._info['simply_laced'] = True self._info['skew_symmetric'] = True level = 0 while level < rank: nr = rank*level-sum(range(level)) for i in range(nr,nr+rank-level-1): self._digraph.add_edge(i,i+1) self._digraph.add_edge(i+rank-level,i) self._digraph.add_edge(i+1,i+rank-level) level += 1 else: _mutation_type_error( data ) # type X elif letter == 'X': if rank in [6,7] and twist is None: self._rank = rank self._info['mutation_finite'] = True self._info['skew_symmetric'] = True self._digraph.add_edges( [ (0,1,2),(1,2,None),(2,0,None), (2,3,None),(3,4,2),(4,2,None), (2,5,None) ] ) if rank == 7: self._digraph.add_edges( [ (5,6,2),(6,2,None) ] ) else: _mutation_type_error( data ) # otherwise, an error is raised else: _mutation_type_error( data ) # in the bipartite case, the digraph is constructed from the graph if not self._digraph: if self._graph.is_bipartite(): self._digraph = _bipartite_graph_to_digraph( self._graph ) else: raise ValueError('The QuiverMutationType does not have ' 'a Coxeter diagram.') # in the other cases, the graph is constructed from the digraph if not self._graph: self._graph = self._digraph.to_undirected() # _description is as for CartanType if twist: self._description = str( [letter,rank,twist] ) else: self._description = str( [letter,rank] ) def irreducible_components( self ): """ Return a list of all irreducible components of ``self``. EXAMPLES:: sage: mut_type = QuiverMutationType('A',3); mut_type ['A', 3] sage: mut_type.irreducible_components() (['A', 3],) """ return tuple([self]) @cached_method def class_size(self): r""" If it is known, the size of the mutation class of all quivers which are mutation equivalent to the standard quiver of ``self`` (up to isomorphism) is returned. Otherwise, ``NotImplemented`` is returned. Formula for finite type A is taken from Torkildsen - Counting cluster-tilted algebras of type `A_n`. Formulas for affine type A and finite type D are taken from Bastian, Prellberg, Rubey, Stump - Counting the number of elements in the mutation classes of `\widetilde A_n` quivers. Formulas for finite and affine types B and C are proven but not yet published. Conjectural formulas for several other non-simply-laced affine types are implemented. Exceptional Types (finite, affine, and elliptic) E, F, G, and X are hardcoded. EXAMPLES:: sage: mut_type = QuiverMutationType( ['A',5] ); mut_type ['A', 5] sage: mut_type.class_size() 19 sage: mut_type = QuiverMutationType( ['A',[10,3],1] ); mut_type ['A', [3, 10], 1] sage: mut_type.class_size() 142120 sage: mut_type = QuiverMutationType( ['B',6] ); mut_type ['B', 6] sage: mut_type.class_size() 132 sage: mut_type = QuiverMutationType( ['BD',6,1] ); mut_type ['BD', 6, 1] sage: mut_type.class_size() Warning: This method uses a formula which has not been proved correct. 504 Check that :trac:`14048` is fixed:: sage: mut_type = QuiverMutationType( ['F',4,(2,1)] ) sage: mut_type.class_size() 90 """ if not self.is_mutation_finite(): return infinity # type A (finite and affine) if self._letter == 'A': # the formula is taken from Torkildsen - Counting # cluster-tilted algebras of type A if self.is_finite(): n = self._rank a = binomial( 2*(n+1), n+1 ) // (n+2) if n % 2 == 1: a += binomial( n+1, (n+1)//2 ) if n % 3 == 0: a += 2 * binomial( 2*n//3, n//3 ) return a // (n+3) # the formula is taken from Bastian, Prellberg, Rubey, Stump elif self.is_affine(): i,j = self._bi_rank i = ZZ(i) j = ZZ(j) n = i+j f = Euler_Phi() if i == j: return ( binomial( 2*i,i ) + sum( f(k) * binomial(2*i//k,i//k)**2 for k in [k for k in i.divisors() if k in j.divisors()] ) // n ) // 4 else: return sum( f(k) * binomial(2*i//k,i//k) * binomial(2*j//k,j//k) for k in [k for k in i.divisors() if k in j.divisors()] ) // ( 2 * n ) # types B and C (finite and affine) elif self._letter in ['B', 'C']: # this formula is proven but nowhere published correctness # is clear enough that I don't think a warning is needed if self.is_finite(): n = self._rank return binomial(2 * n, n) // (n + 1) elif self._letter in ['BB','CC']: # these two formulas are not yet proven print(Warning("Warning: This method uses a formula " "which has not been proved correct.")) if self.is_affine(): if self._twist == 1: n = self._rank - 1 if n%2==1: return binomial( 2*n-1, n-1 ) else: return binomial( 2*n-1, n-1 ) + binomial( n-1, n//2 -1 ) # type BC (affine) elif self._letter == 'BC': # this formula is not yet proven print(Warning("Warning: This method uses a formula " "which has not been proved correct.")) if self.is_affine(): if self._twist == 1: n = self._rank - 1 return binomial( 2*n, n ) # types BD and CD (affine) elif self._letter in ['BD','CD']: # this formula is not yet proven print(Warning("Warning: This method uses a formula " "which has not been proved correct.")) if self.is_affine(): if self._twist == 1: n = self._rank - 2 return 2*binomial( 2*n, n ) # type D (finite and affine) elif self._letter == 'D': # the formula is taken from Bastian, Prellberg, Rubey, Stump if self.is_finite(): if self._rank == 4: return 6 else: f = Euler_Phi() n = ZZ(self._rank) return sum( f( n//k ) * binomial( 2*k, k ) for k in n.divisors() ) // (2*n) # this formula is not yet proven elif self.is_affine(): n = self._rank - 3 if n == 2: return 9 else: print(Warning ("Warning: This method uses a formula " "which has not been proved correct.")) if n%2==1: return 2*binomial(2*n,n) else: return 2*binomial(2*n,n) + binomial(n, n//2) # the exceptional types are hard-coded # type E (finite, affine and elliptic) elif self._letter == 'E': if self.is_finite(): if self._rank == 6: return 67 elif self._rank == 7: return 416 elif self._rank == 8: return 1574 elif self.is_affine(): if self._rank == 7: return 132 elif self._rank == 8: return 1080 elif self._rank == 9: return 7560 elif self.is_elliptic(): if self._rank == 8: return 49 elif self._rank == 9: return 506 elif self._rank == 10: return 5739 # type F elif self._letter == 'F': if self.is_finite(): return 15 elif self.is_affine(): return 60 elif self.is_elliptic(): if self._twist == [1,2]: return 90 if self._twist == [1,1] or self._twist == [2,2]: return 35 # type G elif self._letter == 'G': if self.is_finite(): return 2 elif self.is_affine(): return 6 elif self.is_elliptic(): if self._twist == [1,3]: return 7 if self._twist == [1,1] or self._twist == [3,3]: return 2 # type X elif self._letter == 'X': if self._rank == 6: return 5 elif self._rank == 7: return 2 # otherwise the size is returned to be unknown else: print("Size unknown") return NotImplemented def dual(self): """ Return the QuiverMutationType which is dual to ``self``. EXAMPLES:: sage: mut_type = QuiverMutationType('A',5); mut_type ['A', 5] sage: mut_type.dual() ['A', 5] sage: mut_type = QuiverMutationType('B',5); mut_type ['B', 5] sage: mut_type.dual() ['C', 5] sage: mut_type.dual().dual() ['B', 5] sage: mut_type.dual().dual() == mut_type True """ letter = self.letter() # the self-dual cases if letter != 'BC' and letter[0] in ['B','C']: if letter == 'BB': letter = 'CC' elif letter == 'CC': letter = 'BB' elif letter[0] == 'B': letter = 'C' + letter[1:] elif letter[0] == 'C': letter = 'B' + letter[1:] rank = self._rank if self.is_affine(): rank -= 1 twist = self._twist return QuiverMutationType(letter,rank,twist) # the cases F and G have non-trivial duality in some cases elif letter in ['F','G']: if self.is_finite(): return self elif self.is_affine(): rank = self._rank - 1 twist = - self._twist elif self.is_elliptic(): twist = self._twist rank = self._rank - 2 if letter == 'F': if self._twist == [2,2]: twist == [1,1] if self._twist == [1,1]: twist == [2,2] if letter == 'G': if self._twist == [3,3]: twist = [1,1] elif self._twist == [1,1]: twist = [3,3] else: rank = self._rank return QuiverMutationType(letter,rank,twist) else: return self class QuiverMutationType_Reducible(QuiverMutationType_abstract): """ The mutation type for a cluster algebra or a quiver. Should not be called directly, but through QuiverMutationType. Inherits from QuiverMutationType_abstract. """ def __init__(self, *args): """ Should not be called directly, but through QuiverMutationType. INPUT: - ``data`` -- a list each of whose entries is a QuiverMutationType_Irreducible EXAMPLES:: sage: QuiverMutationType(['A',4],['B',6]) [ ['A', 4], ['B', 6] ] """ data = args if len(data) < 2 or not all( isinstance(comp, QuiverMutationType_Irreducible) for comp in data ): return _mutation_type_error(data) # _info is initialized self._info = {} self._info['irreducible'] = False self._info['mutation_finite'] = all(comp.is_mutation_finite() for comp in data) self._info['simply_laced'] = all(comp.is_simply_laced() for comp in data) self._info['skew_symmetric'] = all(comp.is_skew_symmetric() for comp in data) self._info['finite'] = all(comp.is_finite() for comp in data) self._info['irreducible_components'] = copy(data) # letter and rank are initialized self._letter = '' self._rank = 0 # graph and digraph are initialized self._graph = Graph() self._digraph = DiGraph() for comp in data: if self._letter: self._letter += ' x ' self._letter += comp._letter self._rank += comp._rank self._graph = self._graph.disjoint_union(comp._graph, labels='integers') self._digraph = self._digraph.disjoint_union(comp._digraph, labels='integers') self._graph.name('') self._digraph.name('') # _description is as for CartanType self._description = "[ " comps = self.irreducible_components() for i in range(len(comps)): if i > 0: self._description += ", " self._description += comps[i]._description self._description += " ]" def irreducible_components( self ): """ Return a list of all irreducible components of ``self``. EXAMPLES:: sage: mut_type = QuiverMutationType('A',3); mut_type ['A', 3] sage: mut_type.irreducible_components() (['A', 3],) sage: mut_type = QuiverMutationType(['A',3],['B',3]); mut_type [ ['A', 3], ['B', 3] ] sage: mut_type.irreducible_components() (['A', 3], ['B', 3]) sage: mut_type = QuiverMutationType(['A',3],['B',3],['X',6]) sage: mut_type [ ['A', 3], ['B', 3], ['X', 6] ] sage: mut_type.irreducible_components() (['A', 3], ['B', 3], ['X', 6]) """ return self._info['irreducible_components'] @cached_method def class_size(self): """ If it is known, the size of the mutation class of all quivers which are mutation equivalent to the standard quiver of ``self`` (up to isomorphism) is returned. Otherwise, ``NotImplemented`` is returned. EXAMPLES:: sage: mut_type = QuiverMutationType(['A',3],['B',3]); mut_type [ ['A', 3], ['B', 3] ] sage: mut_type.class_size() 20 sage: mut_type = QuiverMutationType(['A',3],['B',3],['X',6]) sage: mut_type [ ['A', 3], ['B', 3], ['X', 6] ] sage: mut_type.class_size() 100 """ if not self.is_mutation_finite(): return infinity else: components = [] multiplicities = [] for x in self.irreducible_components(): if components.count(x) == 0: components.append(x) multiplicities.append(1) else: y = components.index(x) multiplicities[y] = multiplicities[y]+1 sizes = [ x.class_size() for x in components ] if NotImplemented in sizes: print("Size unknown") return NotImplemented else: return prod( [binomial(sizes[i]+multiplicities[i]-1, multiplicities[i] ) for i in range (0,len(sizes))]) def dual(self): """ Return the QuiverMutationType which is dual to ``self``. EXAMPLES:: sage: mut_type = QuiverMutationType(['A',5],['B',6],['C',5],['D',4]); mut_type [ ['A', 5], ['B', 6], ['C', 5], ['D', 4] ] sage: mut_type.dual() [ ['A', 5], ['C', 6], ['B', 5], ['D', 4] ] """ comps = self.irreducible_components() return QuiverMutationType( [comp.dual() for comp in comps ] ) def _construct_classical_mutation_classes(n): r""" Return a dict with keys being tuples representing regular QuiverMutationTypes of the given rank, and with values being lists or sets containing all mutation equivalent quivers as dig6 data. EXAMPLES:: sage: from sage.combinat.cluster_algebra_quiver.quiver_mutation_type import _construct_classical_mutation_classes sage: rank_2_classes = _construct_classical_mutation_classes(2) # long time sage: for mut_class in sorted(rank_2_classes.keys(),key=str): # long time ....: print("{} {}".format(mut_class, rank_2_classes[mut_class])) ('A', (1, 1), 1) [('AO', (((0, 1), (2, -2)),))] ('A', 2) [('AO', ())] ('B', 2) [('AO', (((0, 1), (1, -2)),)), ('AO', (((0, 1), (2, -1)),))] ('BC', 1, 1) [('AO', (((0, 1), (1, -4)),)), ('AO', (((0, 1), (4, -1)),))] """ from sage.combinat.cluster_algebra_quiver.quiver import ClusterQuiver data = {} # finite A data[ ('A',n) ] = ClusterQuiver(['A',n]).mutation_class(data_type='dig6') # affine A for j in range(1, n//2+1): data[ ('A',(n-j,j),1) ] = ClusterQuiver(['A',[n-j,j],1]).mutation_class(data_type='dig6') # finite B if n > 1: data[ ('B',n) ] = ClusterQuiver(['B',n]).mutation_class(data_type='dig6') # affine B if n > 2: data[ ('BB',n-1,1) ] = ClusterQuiver(['BB',n-1,1]).mutation_class(data_type='dig6') # finite C if n > 2: data[ ('C',n) ] = ClusterQuiver(['C',n]).mutation_class(data_type='dig6') # affine C if n > 1: data[ ('BC',n-1,1) ] = ClusterQuiver(['BC',n-1,1]).mutation_class(data_type='dig6') # affine CC if n > 2: data[ ('CC',n-1,1) ] = ClusterQuiver(['CC',n-1,1]).mutation_class(data_type='dig6') # affine BD if n > 3: data[ ('BD',n-1,1) ] = ClusterQuiver(['BD',n-1,1]).mutation_class(data_type='dig6') # affine CD if n > 3: data[ ('CD',n-1,1) ] = ClusterQuiver(['CD',n-1,1]).mutation_class(data_type='dig6') # finite D if n > 3: data[ ('D',n) ] = ClusterQuiver(['D',n]).mutation_class(data_type='dig6') # affine D if n > 4: data[ ('D',n-1,1) ] = ClusterQuiver(['D',n-1,1]).mutation_class(data_type='dig6') return data def _construct_exceptional_mutation_classes(n): r""" Return a dict with keys being tuples representing exceptional QuiverMutationTypes of the given rank, and with values being lists or sets containing all mutation equivalent quivers as dig6 data. EXAMPLES:: sage: from sage.combinat.cluster_algebra_quiver.quiver_mutation_type import _construct_exceptional_mutation_classes sage: rank_3_exceptional = _construct_exceptional_mutation_classes(3) # long time sage: for mut_class in sorted(rank_3_exceptional.keys(), key=str): # long time ....: print("{} {}".format(mut_class, rank_3_exceptional[mut_class])) ('G', 2, -1) [('BH?', (((1, 2), (1, -3)),)), ('BGO', (((2, 1), (3, -1)),)), ('BW?', (((0, 1), (3, -1)),)), ('BP?', (((0, 1), (1, -3)),)), ('BP_', (((0, 1), (1, -3)), ((2, 0), (3, -1)))), ('BP_', (((0, 1), (3, -1)), ((1, 2), (1, -3)), ((2, 0), (2, -2))))] ('G', 2, 1) [('BH?', (((1, 2), (3, -1)),)), ('BGO', (((2, 1), (1, -3)),)), ('BW?', (((0, 1), (1, -3)),)), ('BP?', (((0, 1), (3, -1)),)), ('BKO', (((1, 0), (3, -1)), ((2, 1), (1, -3)))), ('BP_', (((0, 1), (2, -2)), ((1, 2), (1, -3)), ((2, 0), (3, -1))))] """ from sage.combinat.cluster_algebra_quiver.quiver import ClusterQuiver data = {} # finite E if n in [6,7,8]: data[ ('E',n) ] = ClusterQuiver(['E',n]).mutation_class(data_type='dig6') # affine E if n in [7,8,9]: data[ ('E',n-1,1) ] = ClusterQuiver(['E',n-1,1]).mutation_class(data_type='dig6') # elliptic E if n in [8,9,10]: data[ ('E',n-2,(1,1)) ] = ClusterQuiver(['E',n-2,[1,1]]).mutation_class(data_type='dig6') # finite F if n == 4: data[ ('F',4) ] = ClusterQuiver(['F',4]).mutation_class(data_type='dig6') # affine F if n == 5: data[ ('F',4,1) ] = ClusterQuiver(['F',4,1]).mutation_class(data_type='dig6') data[ ('F',4,-1) ] = ClusterQuiver(['F',4,-1]).mutation_class(data_type='dig6') # finite G if n == 2: data[ ('G',2) ] = ClusterQuiver(['G',2]).mutation_class(data_type='dig6') # affine G if n == 3: data[ ('G',2,1) ] = ClusterQuiver(['G',2,1]).mutation_class(data_type='dig6') data[ ('G',2,-1) ] = ClusterQuiver(['G',2,-1]).mutation_class(data_type='dig6') # elliptic G if n == 4: data[ ('G',2,(1,3)) ] = ClusterQuiver(['G',2,(1,3)]).mutation_class(data_type='dig6') data[ ('G',2,(1,1)) ] = ClusterQuiver(['G',2,(1,1)]).mutation_class(data_type='dig6') data[ ('G',2,(3,3)) ] = ClusterQuiver(['G',2,(3,3)]).mutation_class(data_type='dig6') # X if n in [6,7]: data[ ('X',n) ] = ClusterQuiver(['X',n]).mutation_class(data_type='dig6') # elliptic F if n == 6: data[ ('F',4,(1,2)) ] = ClusterQuiver(['F',4,(1,2)]).mutation_class(data_type='dig6') data[ ('F',4,(1,1)) ] = ClusterQuiver(['F',4,(1,1)]).mutation_class(data_type='dig6') data[ ('F',4,(2,2)) ] = ClusterQuiver(['F',4,(2,2)]).mutation_class(data_type='dig6') return data def _save_data_dig6(n, types='ClassicalExceptional', verbose=False): """ Save all exceptional mutation classes as dig6 data into the file ``exc_classes_n.dig6`` in the folder ``DOT_SAGE``. TESTS:: sage: from sage.combinat.cluster_algebra_quiver.quiver_mutation_type import save_quiver_data sage: save_quiver_data(2) # indirect doctest <BLANKLINE> The following types are saved to file ... and will now be used to determine quiver mutation types: [('A', 1)] <BLANKLINE> The following types are saved to file ... and will now be used to determine quiver mutation types: [('A', (1, 1), 1), ('A', 2), ('B', 2), ('BC', 1, 1), ('G', 2)] sage: save_quiver_data(2,up_to=False) # indirect doctest <BLANKLINE> The following types are saved to file ... and will now be used to determine quiver mutation types: [('A', (1, 1), 1), ('A', 2), ('B', 2), ('BC', 1, 1), ('G', 2)] sage: save_quiver_data(2,up_to=False, types='Classical') # indirect doctest <BLANKLINE> The following types are saved to file ... and will now be used to determine quiver mutation types: [('A', (1, 1), 1), ('A', 2), ('B', 2), ('BC', 1, 1)] sage: save_quiver_data(2,up_to=False, types='Exceptional') # indirect doctest <BLANKLINE> The following types are saved to file ... and will now be used to determine quiver mutation types: [('G', 2)] sage: save_quiver_data(2,up_to=False, verbose=False) # indirect doctest """ import os.path from six.moves import cPickle data = {} possible_types = ['Classical', 'ClassicalExceptional', 'Exceptional'] if types not in possible_types: raise ValueError('The third input must be either ClassicalExceptional' ' (default), Classical, or Exceptional.') if types in possible_types[:2]: data.update(_construct_classical_mutation_classes(n)) if types in possible_types[1:]: data.update(_construct_exceptional_mutation_classes(n)) from sage.env import DOT_SAGE from sage.misc.misc import sage_makedirs types_path = os.path.join(DOT_SAGE, 'cluster_algebra_quiver') types_file = os.path.join(types_path,'mutation_classes_%s.dig6'%n) sage_makedirs(types_path) from sage.misc.temporary_file import atomic_write with atomic_write(types_file, binary=True) as f: cPickle.dump(data, f) if verbose: keys = sorted(data.keys(),key=str) print("\nThe following types are saved to file", types_file,"and will now be used to determine quiver mutation types:") print(keys) def save_quiver_data(n, up_to=True, types='ClassicalExceptional', verbose=True): r""" Save mutation classes of certain quivers of ranks up to and equal to ``n`` or equal to ``n`` to ``DOT_SAGE/cluster_algebra_quiver/mutation_classes_n.dig6``. This data will then be used to determine quiver mutation types. INPUT: - ``n``: the rank (or the upper limit on the rank) of the mutation classes that are being saved. - ``up_to`` -- (default:``True``) if ``True``, saves data for ranks smaller than or equal to ``n``. If ``False``, saves data for rank exactly ``n``. - ``types`` -- (default:'ClassicalExceptional') if all, saves data for both exceptional mutation-finite quivers and for classical quiver. The input 'Exceptional' or 'Classical' is also allowed to save only part of this data. TESTS:: sage: from sage.combinat.cluster_algebra_quiver.quiver_mutation_type import save_quiver_data sage: save_quiver_data(2) <BLANKLINE> The following types are saved to file ... and will now be used to determine quiver mutation types: [('A', 1)] <BLANKLINE> The following types are saved to file ... and will now be used to determine quiver mutation types: [('A', (1, 1), 1), ('A', 2), ('B', 2), ('BC', 1, 1), ('G', 2)] sage: save_quiver_data(2,up_to=False) <BLANKLINE> The following types are saved to file ... and will now be used to determine quiver mutation types: [('A', (1, 1), 1), ('A', 2), ('B', 2), ('BC', 1, 1), ('G', 2)] sage: save_quiver_data(2,up_to=False, types='Classical') <BLANKLINE> The following types are saved to file ... and will now be used to determine quiver mutation types: [('A', (1, 1), 1), ('A', 2), ('B', 2), ('BC', 1, 1)] sage: save_quiver_data(2,up_to=False, types='Exceptional') <BLANKLINE> The following types are saved to file ... and will now be used to determine quiver mutation types: [('G', 2)] sage: save_quiver_data(2,up_to=False, verbose=False) """ from sage.combinat.cluster_algebra_quiver.mutation_type import load_data if up_to is True: ranks = range(1,n+1) elif up_to is False: ranks = [n] for i in ranks: _save_data_dig6(i,types=types,verbose=verbose) # we finally clear the load_data load_data.clear_cache() def _bipartite_graph_to_digraph(g): """ Return a digraph obtained from a bipartite graph ``g`` by choosing one set of the bipartition to be the set of sinks and the other to be the set of sources. EXAMPLES:: sage: from sage.combinat.cluster_algebra_quiver.quiver_mutation_type import _bipartite_graph_to_digraph sage: G = Graph([(1,2)]) sage: _bipartite_graph_to_digraph(G) Digraph on 2 vertices """ if not g.is_bipartite(): raise ValueError('The input graph is not bipartite.') order = g.bipartite_sets() dg = DiGraph() for edge in g.edges(): if edge[0] in order[0]: dg.add_edge( edge[0], edge[1], edge[2] ) else: dg.add_edge( edge[1], edge[0], edge[2] ) for vert in g.vertex_iterator(): if vert not in dg.vertices(): dg.add_vertex(vert) return dg def _is_mutation_type(data): """ Return ``True`` if ``data`` is a QuiverMutationType. EXAMPLES:: sage: from sage.combinat.cluster_algebra_quiver.quiver_mutation_type import _is_mutation_type sage: _is_mutation_type ( [ 'A', 2 ] ) True sage: _is_mutation_type ( [ 'P', 1 ] ) False """ try: QuiverMutationType(data) return True except Exception: return False def _mutation_type_error(data): r""" Output an error message because data which is not a valid quiver mutation type has been passed to QuiverMutationType. EXAMPLES:: sage: QuiverMutationType( 'Christian', 'Stump' ) # indirect doctest Traceback (most recent call last): ... ValueError: ['Christian', 'Stump'] is not a valid quiver mutation type Finite types have the form [ '?', n ] for type ? and rank n Affine type A has the form [ 'A', [ i, j ], 1 ] for rank i+j Affine type ? has the form [ '?', k, \pm 1 ] for rank k+1 Elliptic type ? has the form [ '?', k, [i, j] ] (1 <= i,j <= 3) for rank k+2 For correct syntax in other types, please consult the documentation. """ if data[2] is None: del data[2] return_str = str(data) + ' is not a valid quiver mutation type' return_str += '\n Finite types have the form [ \'?\', n ] for type ? and rank n' return_str += '\n Affine type A has the form [ \'A\', [ i, j ], 1 ] for rank i+j' return_str += '\n Affine type ? has the form [ \'?\', k, \\pm 1 ] for rank k+1' return_str += '\n Elliptic type ? has the form [ \'?\', k, [i, j] ] (1 <= i,j <= 3) for rank k+2' return_str += '\n For correct syntax in other types, please consult the documentation.' raise ValueError(return_str) def _edge_list_to_matrix(edges, nlist, mlist): r""" Return the matrix obtained from the edge list of a quiver. INPUT: - ``edges`` -- the list of edges - ``nlist`` -- the list of mutable vertices of the quiver - ``mlist`` -- the list of frozen vertices of the quiver OUTPUT: An `(n+m) \times n` matrix corresponding to the edge-list. EXAMPLES:: sage: from sage.combinat.cluster_algebra_quiver.quiver_mutation_type import _edge_list_to_matrix sage: G = QuiverMutationType(['A',2])._digraph sage: _edge_list_to_matrix(G.edges(), [0,1], []) [ 0 1] [-1 0] sage: G2 = DiGraph([('a', 'b', 1)]) sage: _edge_list_to_matrix(G2.edges(), ['a', 'b'], []) [ 0 1] [-1 0] sage: G3 = DiGraph([('a', 'b', 1), ('b', 'c', 1)]) sage: _edge_list_to_matrix(G3.edges(), ['a', 'b'], ['c']) [ 0 1] [-1 0] [ 0 -1] """ n = len(nlist) m = len(mlist) nmlist = nlist + mlist M = matrix(ZZ, n + m, n, sparse=True) for edge in edges: if edge[2] is None: edge = (edge[0], edge[1], (1, -1)) elif edge[2] in ZZ: edge = (edge[0], edge[1], (edge[2], -edge[2])) v1, v2, (a, b) = edge if v1 in nlist: M[nmlist.index(v2), nmlist.index(v1)] = b if v2 in nlist: M[nmlist.index(v1), nmlist.index(v2)] = a return M
is_irreducible
mod.rs
use serde::{Deserialize, Serialize}; // { // "action": "send_private_msg", // "params": { // "user_id": 10001000, // "message": "你好" // }, // "echo": "123" // } macro_rules! api_item { ( $(#[$a:meta])* pub enum API { $($item:ident,)+ } ) => { $(#[$a])* pub enum API{ $($item { params: $item, echo: usize },)+ } }; } trait APIItem{} #[derive(Serialize, Deserialize, Clone, Debug)] pub struct SendPrivateMsg { pub user_id: i64, pub message: String, } impl APIItem for SendPrivateMsg{} #[derive(Serialize, Deserialize, Clone, Debug)] pub struct SendGroupMsg { pub group_id: i32,
api_item! { #[derive(Serialize, Deserialize, Clone, Debug)] #[serde(tag = "action")] #[serde(rename_all = "snake_case")] pub enum API { SendPrivateMsg, SendGroupMsg, } } impl API { pub fn build(&self) -> String { serde_json::to_string(self).unwrap() } }
pub message: String, } impl APIItem for SendGroupMsg{}
screen_article_get.js
const {Article} = require('../../model/article'); const mongoose_sex_page = require('mongoose-sex-page'); const {Directory} = require('../../model/directory'); module.exports = async (req,res)=>{ var index = req.query.page || 1; var result = await mongoose_sex_page(Article).page(index).display().size(10).find(req.query).populate('author').exec(); var ss = {
Object.assign(result,ss); return res.send(result); }
directory:await Directory.find() }
ExecutionTypeModel.js
define([ 'underscore', 'backbone' ], function(_, Backbone) { var ExecutionTypeModel = Backbone.Model.extend({ defaults: { id: 0, name: '', description: '' }, initialize: function() { }, urlRoot: '/api/executiontypes' }); return ExecutionTypeModel;
});
redirect.rs
use std::str::Utf8Error; use rocket::{http::RawStr, request::FromParam}; pub struct Destination { pub uri: &'static str, } impl<'r> FromParam<'r> for Destination { type Error = NoRedirectFound; fn from_param(param: &'r RawStr) -> Result<Self, Self::Error>
} pub struct Locale(&'static str); impl<'r> FromParam<'r> for Locale { type Error = NoRedirectFound; fn from_param(param: &'r RawStr) -> Result<Self, Self::Error> { match param.percent_decode()?.as_ref() { "de-DE" => Ok(Locale("de-DE")), "en-US" => Ok(Locale("en-US")), "es-ES" => Ok(Locale("es-ES")), "fr-FR" => Ok(Locale("fr-FR")), "id-ID" => Ok(Locale("id-ID")), "it-IT" => Ok(Locale("it-IT")), "ja-JP" => Ok(Locale("ja-JP")), "ko-KR" => Ok(Locale("ko-KR")), "pl-PL" => Ok(Locale("pl-PL")), "pt-BR" => Ok(Locale("pt-BR")), "ru-RU" => Ok(Locale("ru-RU")), "sv-SE" => Ok(Locale("sv-SE")), "vi-VN" => Ok(Locale("vi-VN")), _ => Err(NoRedirectFound), } } } #[derive(Debug)] pub struct NoRedirectFound; impl From<Utf8Error> for NoRedirectFound { fn from(_: Utf8Error) -> Self { Self } }
{ let uri = match param.percent_decode()?.as_ref() { "Rust-npm-Whitepaper.pdf" => "Rust-npm-Whitepaper.pdf", "Rust-Chucklefish-Whitepaper.pdf" => "Rust-Chucklefish-Whitepaper.pdf", "Rust-Tilde-Whitepaper.pdf" => "Rust-Tilde-Whitepaper.pdf", "community.html" => "/community", "conduct.html" => "/policies/code-of-conduct", "contribute-bugs.html" => "/community", "contribute-community.html" => "/governance/teams/community", "contribute-compiler.html" => "/governance/teams/language-and-compiler", "contribute-docs.html" => "/governance/teams/documentation", "contribute-libs.html" => "/governance/teams/library", "contribute-tools.html" => "/governance/teams/dev-tools", "contribute.html" => "/community", "documentation.html" => "/learn", "downloads.html" => "/tools/install", "friends.html" => "/production", "index.html" => "/", "install.html" => "/tools/install", "legal.html" => "/policies", "other-installers.html" => { "https://forge.rust-lang.org/other-installation-methods.html" } "security.html" => "/policies/security", "team.html" => "/governance", "user-groups.html" => "/community", _ => return Err(NoRedirectFound), }; Ok(Destination { uri }) }
utitities.py
from parsons.utilities import files from parsons.utilities import check_env import json import os def setup_google_application_credentials(app_creds, env_var_name='GOOGLE_APPLICATION_CREDENTIALS'): # Detect if app_creds is a dict, path string or json string, and if it is a # json string, then convert it to a temporary file. Then set the # environmental variable.
credentials = check_env.check(env_var_name, app_creds) try: if (type(credentials) is dict): credentials = json.dumps(credentials) creds_path = files.string_to_temp_file(credentials, suffix='.json') except ValueError: creds_path = credentials os.environ[env_var_name] = creds_path
EmailModelTestConfig.js
var objectAssign = require('object-assign'); var fieldTestObjectsPath = require('keystone-nightwatch-e2e').fieldTestObjectsPath;
module.exports = function EmailModelTestConfig (config) { return { name: new TextFieldTestObject(objectAssign({}, config, {fieldName: 'name'})), fieldA: new EmailFieldTestObject(objectAssign({}, config, {fieldName: 'fieldA'})), fieldB: new EmailFieldTestObject(objectAssign({}, config, {fieldName: 'fieldB'})), }; };
var path = require('path'); var TextFieldTestObject = require(path.resolve(fieldTestObjectsPath, 'TextFieldTestObject')); var EmailFieldTestObject = require(path.resolve(fieldTestObjectsPath, 'EmailFieldTestObject'));
test_snapshot_actions.py
# Copyright 2013 Red Hat, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from cinderclient.tests import utils from cinderclient.tests.v2 import fakes cs = fakes.FakeClient() class SnapshotActionsTest(utils.TestCase): def test_update_snapshot_status(self): s = cs.volume_snapshots.get('1234') cs.volume_snapshots.update_snapshot_status(s, {'status': 'available'}) cs.assert_called('POST', '/snapshots/1234/action') def
(self): s = cs.volume_snapshots.get('1234') cs.volume_snapshots.update_snapshot_status(s, {'status': 'available', 'progress': '73%'}) cs.assert_called('POST', '/snapshots/1234/action')
test_update_snapshot_status_with_progress
listening-profile.component.spec.ts
import { async, ComponentFixture, TestBed } from '@angular/core/testing'; import { ListeningProfileComponent } from './listening-profile.component'; describe('ListeningProfileComponent', () => {
let fixture: ComponentFixture<ListeningProfileComponent>; beforeEach(async(() => { TestBed.configureTestingModule({ declarations: [ ListeningProfileComponent ] }) .compileComponents(); })); beforeEach(() => { fixture = TestBed.createComponent(ListeningProfileComponent); component = fixture.componentInstance; fixture.detectChanges(); }); it('should create', () => { expect(component).toBeTruthy(); }); });
let component: ListeningProfileComponent;
exports.rs
//! exports exposes the public wasm API //! //! cosmwasm_vm_version_1, allocate and deallocate turn into Wasm exports //! as soon as cosmwasm_std is `use`d in the contract, even privately. //! //! do_init and do_wrapper should be wrapped with a extern "C" entry point //! including the contract-specific init/handle function pointer. use std::fmt; use std::os::raw::c_void; use std::vec::Vec; use schemars::JsonSchema; use serde::{de::DeserializeOwned, Serialize}; use crate::errors::StdResult; use crate::imports::{ExternalApi, ExternalQuerier, ExternalStorage}; use crate::memory::{alloc, consume_region, release_buffer}; use crate::serde::{from_slice, to_vec}; use crate::traits::Extern; use crate::{Env, HandleResult, InitResult, MigrateResult, QueryResponse, QueryResult}; #[cfg(feature = "staking")] #[no_mangle] extern "C" fn requires_staking() -> () {} /// cosmwasm_vm_version_* exports mark which Wasm VM interface level this contract is compiled for. /// They can be checked by cosmwasm_vm. /// Update this whenever the Wasm VM interface breaks. #[no_mangle] extern "C" fn cosmwasm_vm_version_1() -> () {} /// allocate reserves the given number of bytes in wasm memory and returns a pointer /// to a Region defining this data. This space is managed by the calling process /// and should be accompanied by a corresponding deallocate #[no_mangle] extern "C" fn allocate(size: usize) -> u32 { alloc(size) as u32 } /// deallocate expects a pointer to a Region created with allocate. /// It will free both the Region and the memory referenced by the Region. #[no_mangle] extern "C" fn deallocate(pointer: u32) { // auto-drop Region on function end let _ = unsafe { consume_region(pointer as *mut c_void) }; } /// do_init should be wrapped in an external "C" export, containing a contract-specific function as arg pub fn do_init<T, U>( init_fn: &dyn Fn( &mut Extern<ExternalStorage, ExternalApi, ExternalQuerier>, Env, T, ) -> InitResult<U>, env_ptr: u32, msg_ptr: u32, ) -> u32 where T: DeserializeOwned + JsonSchema, U: Serialize + Clone + fmt::Debug + PartialEq + JsonSchema, { let res: InitResult<U> = _do_init(init_fn, env_ptr as *mut c_void, msg_ptr as *mut c_void); let v = to_vec(&res).unwrap(); release_buffer(v) as u32 } /// do_handle should be wrapped in an external "C" export, containing a contract-specific function as arg pub fn do_handle<T, U>( handle_fn: &dyn Fn( &mut Extern<ExternalStorage, ExternalApi, ExternalQuerier>, Env, T, ) -> HandleResult<U>, env_ptr: u32, msg_ptr: u32, ) -> u32 where T: DeserializeOwned + JsonSchema, U: Serialize + Clone + fmt::Debug + PartialEq + JsonSchema, { let res: HandleResult<U> = _do_handle(handle_fn, env_ptr as *mut c_void, msg_ptr as *mut c_void); let v = to_vec(&res).unwrap(); release_buffer(v) as u32 } /// do_query should be wrapped in an external "C" export, containing a contract-specific function as arg pub fn do_query<T: DeserializeOwned + JsonSchema>( query_fn: &dyn Fn( &Extern<ExternalStorage, ExternalApi, ExternalQuerier>, T, ) -> StdResult<QueryResponse>, msg_ptr: u32, ) -> u32 { let res: QueryResult = _do_query(query_fn, msg_ptr as *mut c_void); let v = to_vec(&res).unwrap(); release_buffer(v) as u32 } /// do_migrate should be wrapped in an external "C" export, containing a contract-specific function as arg pub fn do_migrate<T, U>( migrate_fn: &dyn Fn( &mut Extern<ExternalStorage, ExternalApi, ExternalQuerier>, Env, T, ) -> MigrateResult<U>, env_ptr: u32, msg_ptr: u32, ) -> u32 where T: DeserializeOwned + JsonSchema, U: Serialize + Clone + fmt::Debug + PartialEq + JsonSchema, { let res: MigrateResult<U> = _do_migrate(migrate_fn, env_ptr as *mut c_void, msg_ptr as *mut c_void); let v = to_vec(&res).unwrap(); release_buffer(v) as u32 } fn _do_init<T, U>( init_fn: &dyn Fn( &mut Extern<ExternalStorage, ExternalApi, ExternalQuerier>, Env, T, ) -> InitResult<U>, env_ptr: *mut c_void, msg_ptr: *mut c_void, ) -> InitResult<U> where T: DeserializeOwned + JsonSchema, U: Serialize + Clone + fmt::Debug + PartialEq + JsonSchema, { let env: Vec<u8> = unsafe { consume_region(env_ptr) }; let msg: Vec<u8> = unsafe { consume_region(msg_ptr) }; let env: Env = from_slice(&env)?; let msg: T = from_slice(&msg)?; let mut deps = make_dependencies(); init_fn(&mut deps, env, msg) } fn _do_handle<T, U>( handle_fn: &dyn Fn( &mut Extern<ExternalStorage, ExternalApi, ExternalQuerier>, Env, T, ) -> HandleResult<U>, env_ptr: *mut c_void, msg_ptr: *mut c_void, ) -> HandleResult<U> where T: DeserializeOwned + JsonSchema, U: Serialize + Clone + fmt::Debug + PartialEq + JsonSchema, { let env: Vec<u8> = unsafe { consume_region(env_ptr) }; let msg: Vec<u8> = unsafe { consume_region(msg_ptr) }; let env: Env = from_slice(&env)?; let msg: T = from_slice(&msg)?; let mut deps = make_dependencies(); handle_fn(&mut deps, env, msg) } fn _do_query<T: DeserializeOwned + JsonSchema>( query_fn: &dyn Fn( &Extern<ExternalStorage, ExternalApi, ExternalQuerier>, T, ) -> StdResult<QueryResponse>, msg_ptr: *mut c_void, ) -> StdResult<QueryResponse> { let msg: Vec<u8> = unsafe { consume_region(msg_ptr) }; let msg: T = from_slice(&msg)?; let deps = make_dependencies(); query_fn(&deps, msg) } fn
<T, U>( migrate_fn: &dyn Fn( &mut Extern<ExternalStorage, ExternalApi, ExternalQuerier>, Env, T, ) -> MigrateResult<U>, env_ptr: *mut c_void, msg_ptr: *mut c_void, ) -> MigrateResult<U> where T: DeserializeOwned + JsonSchema, U: Serialize + Clone + fmt::Debug + PartialEq + JsonSchema, { let env: Vec<u8> = unsafe { consume_region(env_ptr) }; let msg: Vec<u8> = unsafe { consume_region(msg_ptr) }; let env: Env = from_slice(&env)?; let msg: T = from_slice(&msg)?; let mut deps = make_dependencies(); migrate_fn(&mut deps, env, msg) } /// Makes all bridges to external dependencies (i.e. Wasm imports) that are injected by the VM fn make_dependencies() -> Extern<ExternalStorage, ExternalApi, ExternalQuerier> { Extern { storage: ExternalStorage::new(), api: ExternalApi::new(), querier: ExternalQuerier::new(), } }
_do_migrate
iter.rs
// Issue states // // Copyright (c) 2018 Julian Ganz // // MIT License // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. // use std::cmp::Ordering; /// Iterator for performing a left join /// /// This iterator wraps two iterators, yielding key-value-tuples with /// potentially different value types. This iterator yields tuples of values /// from both iterators, matched by key. These include all values from the /// left iterator, with the optional value found in the second for the /// respective key. If no such value is found, a `None` value is returned /// instead for the value. /// /// # Note /// /// This iterator assumes that both iterators yield elements sorted by the key, /// with each key being unique within the respective list. /// pub struct LeftJoin<L, R, K, U, V> where L: Iterator<Item = (K, U)>, R: Iterator<Item = (K, V)>, K: Ord { left: L, right: R, buf: Option<R::Item>, } impl<L, R, K, U, V> LeftJoin<L, R, K, U, V> where L: Iterator<Item = (K, U)>, R: Iterator<Item = (K, V)>, K: Ord { /// Create a new `LeftJoin` /// pub fn new(left: L, right: R) -> Self { Self {left: left, right: right, buf: None} } /// Get the next right element for a given "left" key /// fn next_right(&mut self, key: &K) -> Option<V> { let mut buf = self.buf.take().or_else(|| self.right.next()); loop { return match buf.as_ref().map(|item| item.0.cmp(key)) { Some(Ordering::Less) => { buf = self.right.next(); continue }, Some(Ordering::Equal) => buf.map(|item| item.1), _ => { self.buf = buf; None }, }; } } } impl<L, R, K, U, V> Iterator for LeftJoin<L, R, K, U, V> where L: Iterator<Item = (K, U)>, R: Iterator<Item = (K, V)>, K: Ord { type Item = (U, Option<V>); fn next(&mut self) -> Option<Self::Item> { let item = self.left.next(); item.map(|item| { let value = self.next_right(&item.0); (item.1, value) }) } } /// Convenience trait for creating a left join /// pub trait LeftJoinable<L, R, K, U, V> where L: Iterator<Item = (K, U)>, R: Iterator<Item = (K, V)>, K: Ord { fn join_left(self, right: R) -> LeftJoin<L, R, K, U, V>; } impl<L, R, K, U, V> LeftJoinable<L, R, K, U, V> for L where L: Iterator<Item = (K, U)>, R: Iterator<Item = (K, V)>, K: Ord { fn join_left(self, right: R) -> LeftJoin<L, R, K, U, V>
} #[cfg(test)] mod tests { use super::*; #[test] fn smoke() { let left = vec![ (1, "a"), (3, "b"), (4, "c"), (7, "d"), (8, "e") ]; let right = vec![ (2, "x"), (3, "u"), (4, "v"), (5, "x"), (6, "x"), (7, "w") ]; let mut res = LeftJoin::new(left.into_iter(), right.into_iter()); assert_eq!(res.next(), Some(("a", None))); assert_eq!(res.next(), Some(("b", Some("u")))); assert_eq!(res.next(), Some(("c", Some("v")))); assert_eq!(res.next(), Some(("d", Some("w")))); assert_eq!(res.next(), Some(("e", None))); assert_eq!(res.next(), None) } }
{ LeftJoin::new(self, right) }
cashier.isValidWager.test.js
const cashier = require("../cashier"); const data = require("../data"); beforeEach(async (done) => { user = await data.resetTestUser(); done(); }); function setUserBalance(balance) { user.fields.Balance = balance; user.fields.AvailableBalance = balance; } test('User bets 10, returns true object', () => { setUserBalance(100); const validWagerCheck = cashier.isValidWager(user, 10); expect(validWagerCheck.isValid).toBe(true); expect(validWagerCheck.status).toBe("VALID_WAGER"); }); test('User bets -10, returns false object', () => { setUserBalance(100); const validWagerCheck = cashier.isValidWager(user, -10); expect(validWagerCheck.isValid).toBe(false); expect(validWagerCheck.status).toBe("BELOW_MINIMUM_LIMIT"); }); test('User bets 100, returns false object', () => { setUserBalance(100); const validWagerCheck = cashier.isValidWager(user, 100);
expect(validWagerCheck.isValid).toBe(false); expect(validWagerCheck.status).toBe("ABOVE_MAXIMUM_LIMIT"); }); test('User bets 1000, returns false object', () => { setUserBalance(100); const validWagerCheck = cashier.isValidWager(user, 1000); expect(validWagerCheck.isValid).toBe(false); expect(validWagerCheck.status).toBe("BET_ABOVE_AVAILABLE_BALANCE"); });
builder.py
import warnings import torch.nn as nn from mmcv.utils import Registry, build_from_cfg from .registry import BACKBONES, HEADS, LOCALIZERS, LOSSES, NECKS, RECOGNIZERS try: from mmdet.models.builder import DETECTORS, build_detector except (ImportError, ModuleNotFoundError): warnings.warn('Please install mmdet to use DETECTORS, build_detector') # Define an empty registry and building func, so that can import DETECTORS = Registry('detector') def build_detector(cfg, train_cfg, test_cfg): pass def build(cfg, registry, default_args=None): """Build a module. Args: cfg (dict, list[dict]): The config of modules, it is either a dict or a list of configs. registry (:obj:`Registry`): A registry the module belongs to. default_args (dict, optional): Default arguments to build the module. Defaults to None. Returns: nn.Module: A built nn module. """ if isinstance(cfg, list): modules = [ build_from_cfg(cfg_, registry, default_args) for cfg_ in cfg ] return nn.Sequential(*modules) return build_from_cfg(cfg, registry, default_args) def build_backbone(cfg): """Build backbone.""" return build(cfg, BACKBONES) def build_head(cfg): """Build head.""" return build(cfg, HEADS) def build_recognizer(cfg, train_cfg=None, test_cfg=None): """Build recognizer.""" return build(cfg, RECOGNIZERS, dict(train_cfg=train_cfg, test_cfg=test_cfg)) def build_loss(cfg): """Build loss.""" return build(cfg, LOSSES) def build_localizer(cfg):
def build_model(cfg, train_cfg=None, test_cfg=None): """Build model.""" args = cfg.copy() obj_type = args.pop('type') if obj_type in LOCALIZERS: return build_localizer(cfg) if obj_type in RECOGNIZERS: return build_recognizer(cfg, train_cfg, test_cfg) if obj_type in DETECTORS: return build_detector(cfg, train_cfg, test_cfg) raise ValueError(f'{obj_type} is not registered in ' 'LOCALIZERS, RECOGNIZERS or DETECTORS') def build_neck(cfg): """Build neck.""" return build(cfg, NECKS)
"""Build localizer.""" return build(cfg, LOCALIZERS)
tray.rs
use std::fmt; /// Category of this item. #[derive(Copy, Clone, Debug, Eq, PartialEq)] pub enum Category { /// The item describes the status of a generic application, for instance /// the current state of a media player. In the case where the category of /// the item can not be known, such as when the item is being proxied from /// another incompatible or emulated system, ApplicationStatus can be used /// a sensible default fallback. ApplicationStatus, /// The item describes the status of communication oriented applications, /// like an instant messenger or an email client. Communications, /// The item describes services of the system not seen as a stand alone /// application by the user, such as an indicator for the activity of a disk /// indexing service. SystemServices, /// The item describes the state and control of a particular hardware, /// such as an indicator of the battery charge or sound card volume control. Hardware, } impl fmt::Display for Category { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { let r = match *self { Category::ApplicationStatus => "ApplicationStatus", Category::Communications => "Communications", Category::SystemServices => "SystemServices", Category::Hardware => "Hardware", }; f.write_str(r) } } /// Status of this item or of the associated application. #[derive(Copy, Clone, Debug, Eq, PartialEq)] pub enum Status { /// The item doesn't convey important information to the user, it can be /// considered an "idle" status and is likely that visualizations will chose /// to hide it. Passive, /// The item is active, is more important that the item will be shown in /// some way to the user. Active, /// The item carries really important information for the user, such as /// battery charge running out and is wants to incentive the direct user /// intervention. Visualizations should emphasize in some way the items with /// NeedsAttention status. NeedsAttention, } impl fmt::Display for Status { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { let r = match *self { Status::Passive => "Passive", Status::Active => "Active", Status::NeedsAttention => "NeedsAttention", }; f.write_str(r) } } /// Extra information associated to the item /// /// That can be visualized for instance by a tooltip (or by any other mean the /// visualization consider appropriate. #[derive(Clone, Debug, Default, Hash)] pub struct ToolTip { /// Freedesktop-compliant name for an icon. pub icon_name: String,
/// Icon data pub icon_pixmap: Vec<Icon>, /// Title for this tooltip pub title: String, /// Descriptive text for this tooltip. It can contain also a subset of the /// HTML markup language, for a list of allowed tags see Section Markup. pub description: String, } impl From<ToolTip> for (String, Vec<(i32, i32, Vec<u8>)>, String, String) { fn from(tooltip: ToolTip) -> Self { ( tooltip.icon_name, tooltip.icon_pixmap.into_iter().map(Into::into).collect(), tooltip.title, tooltip.description, ) } } /// An ARGB32 image #[derive(Clone, Debug, Hash)] pub struct Icon { pub width: i32, pub height: i32, /// ARGB32 format, network byte order pub data: Vec<u8>, } impl From<Icon> for (i32, i32, Vec<u8>) { fn from(icon: Icon) -> Self { (icon.width, icon.height, icon.data) } }
1.ts
import { Card } from '../../../interfaces' import Set from '../Detective Pikachu' const card: Card = { name: { en: "Bulbasaur", fr: "Bulbizarre", es: "Bulbasaur", it: "Bulbasaur", pt: "Bulbasaur", de: "Bisasam" }, illustrator: "MPC Film", rarity: "Common", category: "Pokemon", set: Set, dexId: [ 1, ], hp: 60, types: [ "Grass", ], stage: "Basic", attacks: [
cost: [ "Grass", ], name: { en: "Find a Friend", fr: "Trouver un Ami", es: "Encontrar un Amigo", it: "Trovamico", pt: "Encontre um Amigo", de: "Freunde finden" }, effect: { en: "Search your deck for a Grass Pokémon, reveal it, and put it into your hand. Then, shuffle your deck.", fr: "Cherchez un Pokémon Grass dans votre deck, montrez-le, puis ajoutez-le à votre main. Mélangez ensuite votre deck.", es: "Busca en tu baraja 1 Pokémon Grass, enséñalo y ponlo en tu mano. Después, baraja las cartas de tu baraja.", it: "Cerca nel tuo mazzo un Pokémon Grass, mostralo e aggiungilo alle carte che hai in mano. Poi rimischia le carte del tuo mazzo.", pt: "Procure por 1 Pokémon Grass no seu baralho, revele-o e coloque-o na sua mão. Em seguida, embaralhe o seu baralho.", de: "Durchsuche dein Deck nach 1 Grass-Pokémon, zeige es deinem Gegner und nimm es auf deine Hand. Mische anschließend dein Deck." }, }, ], weaknesses: [ { type: "Fire", value: "×2" }, ], retreat: 1, } export default card
{
test_torch.py
from unittest.mock import patch import numpy as np import os import pytest import time import torch import torch.nn as nn import torch.distributed as dist from torch.utils.data import DataLoader import ray from ray import tune from ray.util.sgd.torch import TorchTrainer from ray.util.sgd.torch.training_operator import ( get_test_operator, get_test_metrics_operator, TrainingOperator) from ray.util.sgd.torch.constants import SCHEDULER_STEP from ray.util.sgd.utils import (check_for_failure, NUM_SAMPLES, BATCH_COUNT, BATCH_SIZE) from ray.util.sgd.data.examples import mlp_identity from ray.util.sgd.torch.examples.train_example import ( model_creator, optimizer_creator, data_creator, LinearDataset) @pytest.fixture def ray_start_2_cpus(): address_info = ray.init(num_cpus=2) yield address_info # The code after the yield will run as teardown code. ray.shutdown() # Ensure that tests don't ALL fail if dist.is_initialized(): dist.destroy_process_group() @pytest.fixture def ray_start_4_cpus(): address_info = ray.init(num_cpus=4) yield address_info # The code after the yield will run as teardown code. ray.shutdown() # Ensure that tests don't ALL fail if dist.is_initialized(): dist.destroy_process_group() Operator = TrainingOperator.from_creators( model_creator, optimizer_creator, data_creator, loss_creator=nn.MSELoss) def test_single_step(ray_start_2_cpus): # noqa: F811 trainer = TorchTrainer(training_operator_cls=Operator, num_workers=1) metrics = trainer.train(num_steps=1) assert metrics[BATCH_COUNT] == 1 val_metrics = trainer.validate(num_steps=1) assert val_metrics[BATCH_COUNT] == 1 trainer.shutdown() def test_dead_trainer(ray_start_2_cpus): # noqa: F811 TestOperator = get_test_operator(Operator) trainer = TorchTrainer(training_operator_cls=TestOperator, num_workers=2) trainer.train(num_steps=1) trainer.shutdown() with pytest.raises(RuntimeError): trainer.train() @pytest.mark.parametrize("num_workers", [1, 2] if dist.is_available() else [1]) def test_train(ray_start_2_cpus, num_workers): # noqa: F811 trainer = TorchTrainer( training_operator_cls=Operator, num_workers=num_workers) for i in range(3): train_loss1 = trainer.train()["train_loss"] validation_loss1 = trainer.validate()["val_loss"] for i in range(3): train_loss2 = trainer.train()["train_loss"] validation_loss2 = trainer.validate()["val_loss"] assert train_loss2 <= train_loss1, (train_loss2, train_loss1) assert validation_loss2 <= validation_loss1, (validation_loss2, validation_loss1) trainer.shutdown() @pytest.mark.parametrize("num_workers", [1, 2] if dist.is_available() else [1]) def test_multi_model(ray_start_2_cpus, num_workers): def train(*, model=None, criterion=None, optimizer=None, iterator=None): model.train() train_loss = 0 correct = 0 total = 0 for batch_idx, (inputs, targets) in enumerate(iterator): optimizer.zero_grad() outputs = model(inputs) loss = criterion(outputs, targets) loss.backward() optimizer.step() train_loss += loss.item() _, predicted = outputs.max(1) total += targets.size(0) correct += predicted.eq(targets).sum().item() return { "accuracy": correct / total, "train_loss": train_loss / (batch_idx + 1) } def train_epoch(self, iterator, info): result = {} data = list(iterator) for i, (model, optimizer) in enumerate( zip(self.models, self.optimizers)): result[f"model_{i}"] = train( model=model, criterion=self.criterion, optimizer=optimizer, iterator=iter(data)) return result class MultiModelOperator(TrainingOperator): def setup(self, config): models = nn.Linear(1, 1), nn.Linear(1, 1) opts = [ torch.optim.SGD(model.parameters(), lr=0.0001) for model in models ] loss = nn.MSELoss() train_dataloader, val_dataloader = data_creator(config) self.models, self.optimizers, self.criterion = self.register( models=models, optimizers=opts, criterion=loss) self.register_data( train_loader=train_dataloader, validation_loader=val_dataloader) TestOperator = get_test_operator(MultiModelOperator) trainer1 = TorchTrainer( config={"custom_func": train_epoch}, training_operator_cls=TestOperator, num_workers=num_workers) trainer1.train() state = trainer1.state_dict() models1 = trainer1.get_model() trainer1.shutdown() trainer2 = TorchTrainer( config={"custom_func": train_epoch}, training_operator_cls=TestOperator, num_workers=num_workers) trainer2.load_state_dict(state) models2 = trainer2.get_model() for model_1, model_2 in zip(models1, models2): model1_state_dict = model_1.state_dict() model2_state_dict = model_2.state_dict() assert set(model1_state_dict.keys()) == set(model2_state_dict.keys()) for k in model1_state_dict: assert torch.equal(model1_state_dict[k], model2_state_dict[k]) trainer2.shutdown() @pytest.mark.parametrize("num_workers", [1, 2] if dist.is_available() else [1]) def test_multi_model_matrix(ray_start_2_cpus, num_workers): # noqa: F811 def train_epoch(self, iterator, info): if self.config.get("models", 1) > 1: assert len(self.models) == self.config["models"], self.config if self.config.get("optimizers", 1) > 1: assert len( self.optimizers) == self.config["optimizers"], self.config if self.config.get("schedulers", 1) > 1: assert len( self.schedulers) == self.config["schedulers"], self.config return {"done": 1} def multi_model_creator(config): models = [] for i in range(config.get("models", 1)): models += [nn.Linear(1, 1)] return models[0] if len(models) == 1 else models def multi_optimizer_creator(models, config): optimizers = [] main_model = models[0] if type(models) is list else models for i in range(config.get("optimizers", 1)): optimizers += [torch.optim.SGD(main_model.parameters(), lr=0.0001)] return optimizers[0] if len(optimizers) == 1 else optimizers def multi_scheduler_creator(optimizer, config): schedulers = [] main_opt = optimizer[0] if type(optimizer) is list else optimizer for i in range(config.get("schedulers", 1)): schedulers += [ torch.optim.lr_scheduler.StepLR( main_opt, step_size=30, gamma=0.1) ] return schedulers[0] if len(schedulers) == 1 else schedulers class MultiModelOperator(TrainingOperator): def setup(self, config): models = multi_model_creator(config) optimizers = multi_optimizer_creator(models, config) schedulers = multi_scheduler_creator(optimizers, config) train_loader, val_loader = data_creator(config) loss = nn.MSELoss() self.models, self.optimizers, self.criterion, self.schedulers = \ self.register(models=models, optimizers=optimizers, schedulers=schedulers, criterion=loss) self.register_data( train_loader=train_loader, validation_loader=val_loader) TestOperator = get_test_operator(MultiModelOperator) for model_count in range(1, 3): for optimizer_count in range(1, 3): for scheduler_count in range(1, 3): trainer = TorchTrainer( scheduler_step_freq="epoch", training_operator_cls=TestOperator, num_workers=num_workers, config={ "models": model_count, "optimizers": optimizer_count, "schedulers": scheduler_count, "custom_func": train_epoch }) trainer.train() trainer.shutdown() @pytest.mark.parametrize("scheduler_freq", ["epoch", "batch", "manual", None]) def test_scheduler_freq(ray_start_2_cpus, scheduler_freq): # noqa: F811 def train_epoch(self, iterator, info): assert info[SCHEDULER_STEP] == scheduler_freq return {"done": 1} def scheduler_creator(optimizer, config): return torch.optim.lr_scheduler.StepLR( optimizer, step_size=30, gamma=0.1) class TestTrainingOperator(TrainingOperator): def setup(self, config): model = model_creator(config) optimizer = optimizer_creator(model, config) train_loader, val_loader = data_creator(config) scheduler = scheduler_creator(optimizer, config) loss = nn.MSELoss() self.model, self.optimizer, self.criterion, self.scheduler = \ self.register( models=model, optimizers=optimizer, criterion=loss, schedulers=scheduler) self.register_data( train_loader=train_loader, validation_loader=val_loader) if scheduler_freq is None: with pytest.raises(ValueError): trainer = TorchTrainer( config={"custom_func": train_epoch}, training_operator_cls=TestTrainingOperator, scheduler_step_freq=scheduler_freq) else: trainer = TorchTrainer( config={"custom_func": train_epoch}, training_operator_cls=TestTrainingOperator, scheduler_step_freq=scheduler_freq) for i in range(3): trainer.train() trainer.shutdown() def test_profiling(ray_start_2_cpus): # noqa: F811 trainer = TorchTrainer(training_operator_cls=Operator) stats = trainer.train(profile=True) assert "profile" in stats stats = trainer.validate(profile=True) assert "profile" in stats trainer.shutdown() def test_dataset(ray_start_4_cpus): """ This test tries training the mlp_identity example. We check the accuracy of the model as an all inclusive way of ensuring that we are properly sharding and iterating over the entire dataset (instead of repeating the first set of points for example). """ model_creator = mlp_identity.model_creator optimizer_creator = mlp_identity.optimizer_creator dataset_creator = mlp_identity.dataset_creator DatasetOperator = TrainingOperator.from_creators( model_creator=model_creator, optimizer_creator=optimizer_creator, loss_creator=nn.MSELoss) trainer = TorchTrainer( training_operator_cls=DatasetOperator, num_workers=2, ) dataset = dataset_creator() for i in range(5): trainer.train(dataset=dataset, num_steps=100) input = mlp_identity.to_mat(0.5) prediction = float(trainer.get_model()(input)[0][0]) assert 0.4 <= prediction <= 0.6 trainer.shutdown() def test_split_batch(ray_start_2_cpus): if not dist.is_available(): return def data_creator(config): """Returns training dataloader, validation dataloader.""" train_dataset = LinearDataset(2, 5, size=config["data_size"]) return DataLoader( train_dataset, batch_size=config[BATCH_SIZE], ) data_size = 600 batch_size = 21 TestOperator = TrainingOperator.from_creators( model_creator, optimizer_creator, data_creator, loss_creator=lambda config: nn.MSELoss()) trainer = TorchTrainer( training_operator_cls=TestOperator, num_workers=2, config={ BATCH_SIZE: batch_size, "data_size": data_size, }) stats = trainer.train() assert trainer.config[BATCH_SIZE] == (batch_size - 1) assert stats[NUM_SAMPLES] == 600 assert stats[BATCH_COUNT] == (data_size // 20) trainer.shutdown() def test_reduce_result(ray_start_2_cpus): if not dist.is_available(): return def data_creator(config): """Returns training dataloader, validation dataloader.""" train_dataset = LinearDataset(2, 5, size=config["data_size"]) test_dataset = LinearDataset(2, 5, size=config["data_size"]) return DataLoader( train_dataset, batch_size=1), DataLoader( test_dataset, batch_size=1) data_size = 600 TestOperator = TrainingOperator.from_creators( model_creator, optimizer_creator, data_creator, loss_creator=lambda config: nn.MSELoss()) trainer = TorchTrainer( training_operator_cls=TestOperator, num_workers=2, config={"data_size": data_size}) list_stats = trainer.train(reduce_results=False, profile=True) assert len(list_stats) == 2 assert [stats[NUM_SAMPLES] == data_size for stats in list_stats] assert [stats[BATCH_COUNT] == (data_size // 2) for stats in list_stats] list_stats = trainer.validate(reduce_results=False, profile=True) assert len(list_stats) == 2 assert [stats[NUM_SAMPLES] == data_size for stats in list_stats] assert [stats[BATCH_COUNT] == (data_size // 2) for stats in list_stats] trainer.shutdown() @pytest.mark.parametrize("num_workers", [1, 2] if dist.is_available() else [1]) def test_metrics(ray_start_2_cpus, num_workers): data_size, val_size = 600, 500 batch_size = 4 num_train_steps = int(data_size / batch_size) num_val_steps = int(val_size / batch_size) train_scores = [1] + ([0] * num_train_steps) val_scores = [1] + ([0] * num_val_steps) TestOperator = get_test_metrics_operator(Operator) trainer = TorchTrainer( training_operator_cls=TestOperator, num_workers=num_workers, config={ "scores": train_scores, "val_scores": val_scores, "key": "score", "batch_size": batch_size, "data_size": data_size, "val_size": val_size }) stats = trainer.train(num_steps=num_train_steps) # Test that we output mean and last of custom metrics in an epoch assert "score" in stats assert stats["last_score"] == 0 assert stats[NUM_SAMPLES] == num_train_steps * batch_size expected_score = num_workers * (sum(train_scores) / (num_train_steps * batch_size)) assert np.allclose(stats["score"], expected_score) val_stats = trainer.validate() # Test that we output mean and last of custom metrics in validation assert val_stats["last_score"] == 0 expected_score = (sum(val_scores) / (num_val_steps * batch_size)) * num_workers assert np.allclose(val_stats["score"], expected_score) assert val_stats[BATCH_COUNT] == np.ceil(num_val_steps / num_workers) assert val_stats[NUM_SAMPLES] == num_val_steps * batch_size assert val_stats[NUM_SAMPLES] == val_size trainer.shutdown() @pytest.mark.parametrize("num_workers", [1, 2] if dist.is_available() else [1]) def test_metrics_nan(ray_start_2_cpus, num_workers): data_size, val_size = 100, 100 batch_size = 10 num_train_steps = int(data_size / batch_size) num_val_steps = int(val_size / batch_size) train_scores = [np.nan] + ([0] * num_train_steps) val_scores = [np.nan] + ([0] * num_val_steps) TestOperator = get_test_metrics_operator(Operator) trainer = TorchTrainer( training_operator_cls=TestOperator, num_workers=num_workers, config={ "scores": train_scores, "val_scores": val_scores, "key": "score", "batch_size": batch_size, "data_size": data_size, "val_size": val_size }) stats = trainer.train(num_steps=num_train_steps) assert "score" in stats assert stats["last_score"] == 0 assert np.isnan(stats["score"]) stats = trainer.validate() assert "score" in stats assert stats["last_score"] == 0 assert np.isnan(stats["score"]) trainer.shutdown() def test_scheduler_validate(ray_start_2_cpus): # noqa: F811 from torch.optim.lr_scheduler import ReduceLROnPlateau TestOperator = TrainingOperator.from_creators( model_creator, optimizer_creator, data_creator, scheduler_creator=lambda optimizer, cfg: ReduceLROnPlateau(optimizer), loss_creator=lambda config: nn.MSELoss()) TestOperator = get_test_operator(TestOperator) trainer = TorchTrainer( scheduler_step_freq="manual", training_operator_cls=TestOperator) trainer.update_scheduler(0.5) trainer.update_scheduler(0.5) assert all( trainer.apply_all_operators( lambda op: op._schedulers[0].last_epoch == 2)) trainer.shutdown() @pytest.mark.parametrize("num_workers", [1, 2] if dist.is_available() else [1]) def test_tune_train(ray_start_2_cpus, num_workers): # noqa: F811 TorchTrainable = TorchTrainer.as_trainable( **{ "training_operator_cls": Operator, "num_workers": num_workers, "use_gpu": False, "backend": "gloo", "config": { "batch_size": 512, "lr": 0.001 } }) analysis = tune.run( TorchTrainable, num_samples=2, stop={"training_iteration": 2}, verbose=1) # checks loss decreasing for every trials for path, df in analysis.trial_dataframes.items(): mean_train_loss1 = df.loc[0, "train_loss"] mean_train_loss2 = df.loc[1, "train_loss"] mean_val_loss1 = df.loc[0, "val_loss"] mean_val_loss2 = df.loc[1, "val_loss"] assert mean_train_loss2 <= mean_train_loss1 assert mean_val_loss2 <= mean_val_loss1 @pytest.mark.parametrize("num_workers", [1, 2] if dist.is_available() else [1]) def test_save_and_restore(ray_start_2_cpus, num_workers, tmp_path): # noqa: F811 trainer1 = TorchTrainer( training_operator_cls=Operator, num_workers=num_workers) trainer1.train() checkpoint_path = os.path.join(tmp_path, "checkpoint") trainer1.save(checkpoint_path) model1 = trainer1.get_model() trainer1.shutdown() trainer2 = TorchTrainer( training_operator_cls=Operator, num_workers=num_workers) trainer2.load(checkpoint_path) model2 = trainer2.get_model() model1_state_dict = model1.state_dict() model2_state_dict = model2.state_dict() assert set(model1_state_dict.keys()) == set(model2_state_dict.keys()) for k in model1_state_dict: assert torch.equal(model1_state_dict[k], model2_state_dict[k]) trainer2.shutdown() def test_wrap_ddp(ray_start_2_cpus, tmp_path): # noqa: F811 if not dist.is_available(): return trainer1 = TorchTrainer( training_operator_cls=Operator, wrap_ddp=False, num_workers=2) trainer1.train() checkpoint_path = os.path.join(tmp_path, "checkpoint") trainer1.save(checkpoint_path) model1 = trainer1.get_model() assert not hasattr(trainer1.local_worker.training_operator.model, "module") assert hasattr(trainer1.local_worker.training_operator, "device_ids") trainer1.shutdown() trainer2 = TorchTrainer( training_operator_cls=Operator, wrap_ddp=False, num_workers=2) trainer2.load(checkpoint_path) model2 = trainer2.get_model() model1_state_dict = model1.state_dict() model2_state_dict = model2.state_dict() assert set(model1_state_dict.keys()) == set(model2_state_dict.keys()) for k in model1_state_dict: assert torch.equal(model1_state_dict[k], model2_state_dict[k]) trainer2.shutdown() def gen_step_with_fail(num_fails):
def test_fail_with_recover(ray_start_2_cpus): # noqa: F811 if not dist.is_available(): return def single_loader(config): dataset = LinearDataset(2, 5, size=1000000) return DataLoader(dataset, batch_size=config.get("batch_size", 32)) step_with_fail = gen_step_with_fail(3) TestOperator = TrainingOperator.from_creators( model_creator, optimizer_creator, single_loader, loss_creator=lambda config: nn.MSELoss()) with patch.object(TorchTrainer, "_train_epoch", step_with_fail): trainer1 = TorchTrainer( training_operator_cls=TestOperator, config={"batch_size": 100000}, num_workers=2) with pytest.raises(RuntimeError): trainer1.train(max_retries=1) trainer1.shutdown(force=True) def test_resize(ray_start_2_cpus): # noqa: F811 if not dist.is_available(): return def single_loader(config): dataset = LinearDataset(2, 5, size=1000000) return DataLoader(dataset, batch_size=config.get("batch_size", 32)) step_with_fail = gen_step_with_fail(1) TestOperator = TrainingOperator.from_creators( model_creator, optimizer_creator, single_loader, loss_creator=lambda config: nn.MSELoss()) with patch.object(TorchTrainer, "_train_epoch", step_with_fail): trainer1 = TorchTrainer( training_operator_cls=TestOperator, config={"batch_size": 100000}, num_workers=2) @ray.remote def try_test(): import time time.sleep(100) try_test.remote() trainer1.train(max_retries=1) assert len(trainer1.remote_workers) == 1 trainer1.shutdown() def test_fail_twice(ray_start_2_cpus): # noqa: F811 if not dist.is_available(): return def single_loader(config): dataset = LinearDataset(2, 5, size=1000000) return DataLoader(dataset, batch_size=config.get("batch_size", 32)) step_with_fail = gen_step_with_fail(2) TestOperator = TrainingOperator.from_creators( model_creator, optimizer_creator, single_loader, loss_creator=lambda config: nn.MSELoss()) with patch.object(TorchTrainer, "_train_epoch", step_with_fail): trainer1 = TorchTrainer( training_operator_cls=TestOperator, config={"batch_size": 100000}, num_workers=2) # MAX RETRIES SHOULD BE ON BY DEFAULT trainer1.train() trainer1.shutdown() def test_multi_input_model(ray_start_2_cpus): def model_creator(config): class MultiInputModel(nn.Module): def __init__(self): super(MultiInputModel, self).__init__() self._fc1 = torch.nn.Linear(1, 1) self._fc2 = torch.nn.Linear(1, 1) def forward(self, x, y): return self._fc1(x) + self._fc2(y) return MultiInputModel() def data_creator(config): class LinearDataset(torch.utils.data.Dataset): def __init__(self, a, b, size=1000): x = np.random.randn(size) y = np.random.randn(size) self.x = torch.tensor(x, dtype=torch.float32) self.y = torch.tensor(y, dtype=torch.float32) self.z = torch.tensor(a * (x + y) + 2 * b, dtype=torch.float32) def __getitem__(self, index): return (self.x[index, None], self.y[index, None], self.z[index, None]) def __len__(self): return len(self.x) train_dataset = LinearDataset(3, 4) train_loader = torch.utils.data.DataLoader( train_dataset, batch_size=config.get("batch_size", 32), ) return train_loader, None Operator = TrainingOperator.from_creators( model_creator, optimizer_creator, data_creator, loss_creator=lambda config: nn.MSELoss()) trainer = TorchTrainer(training_operator_cls=Operator, num_workers=1) metrics = trainer.train(num_steps=1) assert metrics[BATCH_COUNT] == 1 trainer.shutdown() if __name__ == "__main__": import pytest import sys sys.exit(pytest.main(["-v", "-x", __file__]))
def step_with_fail(self, num_steps=None, profile=False, info=None, dataset=None): params = dict(num_steps=num_steps, profile=profile, info=info) remote_worker_stats = [ w.train_epoch.remote(**params) for w in self.remote_workers ] if self._num_failures < num_fails: time.sleep(1) # Make the batch will fail correctly. ray.kill(self.remote_workers[0]) try: local_worker_stats = self.local_worker.train_epoch(**params) except RuntimeError: return False, None success = check_for_failure(remote_worker_stats) if success: return success, [local_worker_stats] + ray.get(remote_worker_stats) return success, None return step_with_fail
tests.py
""" This contains the tests (unit and integration) for the requirements module. It currently contains: """ from django.test import TestCase from django.core.urlresolvers import reverse from django.contrib.auth import authenticate from django.contrib.auth.models import User from mig_main.tests.factories import AcademicTermFactory, TBPChapterFactory,\ StandingFactory, StatusFactory, MajorFactory,\ ShirtSizeFactory, CurrentTermFactory, NUM_OFFICERS from history.tests.factories import OfficerFactory from history.models import Officer from mig_main.models import ( AcademicTerm, OfficerPosition, MemberProfile, Status, ) from mig_main.models import ShirtSize, TBPChapter, Major, Standing from migweb.test_tools import MyClient def setUpModule(): print 'setting up requirements test data...' AcademicTermFactory.create_batch(18) TBPChapterFactory.create_batch(3) StandingFactory.create_batch(3) StatusFactory.create_batch(2) MajorFactory.create_batch(3) ShirtSizeFactory.create_batch(3) CurrentTermFactory() OfficerFactory.create_batch(NUM_OFFICERS) User.objects.create_user('johndoe', '[email protected]', 'password') User.objects.create_superuser('jimharb', '[email protected]', 'password') def tearDownModule(): """This should clear the database back to empty, only those objects created in the setup method need to be deleted. I think """ print 'tearing down requirements module...' Officer.objects.all().delete() OfficerPosition.objects.all().delete() MemberProfile.objects.all().delete()
Major.objects.all().delete() Status.objects.all().delete() Standing.objects.all().delete() TBPChapter.objects.all().delete() User.objects.all().delete() print 'teardown complete.' class RequirementsModelsTestCase(TestCase): def setUp(self): self.client = MyClient() self.user = User.objects.get(username='johndoe') self.admin = User.objects.get(username='jimharb') def tearDown(self): del(self.client) def test_model(self): pass
AcademicTerm.objects.all().delete() ShirtSize.objects.all().delete()
sign_test.go
// Copyright (c) 2013-2016 The btcsuite developers // Copyright (c) 2015-2019 The Decred developers // Use of this source code is governed by an ISC // license that can be found in the LICENSE file. package txscript import ( "crypto/rand" "errors" "fmt" mrand "math/rand" "testing" "github.com/decred/dcrd/chaincfg" "github.com/decred/dcrd/chaincfg/chainec" "github.com/decred/dcrd/chaincfg/chainhash" "github.com/decred/dcrd/dcrec" "github.com/decred/dcrd/dcrutil" "github.com/decred/dcrd/wire" ) // testingParams defines the chain params to use throughout these tests so it // can more easily be changed if desired. var testingParams = &chaincfg.RegNetParams const testValueIn = 12345 type addressToKey struct { key *chainec.PrivateKey compressed bool } func mkGetKey(keys map[string]addressToKey) KeyDB { if keys == nil { return KeyClosure(func(addr dcrutil.Address) (chainec.PrivateKey, bool, error) { return nil, false, errors.New("nope 1") }) } return KeyClosure(func(addr dcrutil.Address) (chainec.PrivateKey, bool, error) { a2k, ok := keys[addr.EncodeAddress()] if !ok { return nil, false, errors.New("nope 2") } return *a2k.key, a2k.compressed, nil }) } func mkGetKeyPub(keys map[string]addressToKey) KeyDB { if keys == nil { return KeyClosure(func(addr dcrutil.Address) (chainec.PrivateKey, bool, error) { return nil, false, errors.New("nope 1") }) } return KeyClosure(func(addr dcrutil.Address) (chainec.PrivateKey, bool, error) { a2k, ok := keys[addr.String()] if !ok { return nil, false, errors.New("nope 2") } return *a2k.key, a2k.compressed, nil }) } func mkGetScript(scripts map[string][]byte) ScriptDB { if scripts == nil { return ScriptClosure(func(addr dcrutil.Address) ( []byte, error) { return nil, errors.New("nope 3") }) } return ScriptClosure(func(addr dcrutil.Address) ([]byte, error) { script, ok := scripts[addr.EncodeAddress()] if !ok { return nil, errors.New("nope 4") } return script, nil }) } func checkScripts(msg string, tx *wire.MsgTx, idx int, sigScript, pkScript []byte) error { tx.TxIn[idx].SignatureScript = sigScript var scriptFlags ScriptFlags vm, err := NewEngine(pkScript, tx, idx, scriptFlags, 0, nil) if err != nil { return fmt.Errorf("failed to make script engine for %s: %v", msg, err) } err = vm.Execute() if err != nil { return fmt.Errorf("invalid script signature for %s: %v", msg, err) } return nil } func signAndCheck(msg string, tx *wire.MsgTx, idx int, pkScript []byte, hashType SigHashType, kdb KeyDB, sdb ScriptDB, suite dcrec.SignatureType) error { sigScript, err := SignTxOutput(testingParams, tx, idx, pkScript, hashType, kdb, sdb, nil, suite) if err != nil { return fmt.Errorf("failed to sign output %s: %v", msg, err) } return checkScripts(msg, tx, idx, sigScript, pkScript) } func signBadAndCheck(msg string, tx *wire.MsgTx, idx int, pkScript []byte, hashType SigHashType, kdb KeyDB, sdb ScriptDB, suite dcrec.SignatureType) error { // Setup a PRNG. randScriptHash := chainhash.HashB(pkScript) tRand := mrand.New(mrand.NewSource(int64(randScriptHash[0]))) sigScript, err := SignTxOutput(testingParams, tx, idx, pkScript, hashType, kdb, sdb, nil, suite) if err != nil { return fmt.Errorf("failed to sign output %s: %v", msg, err) } // Be sure to reset the value in when we're done creating the // corrupted signature for that flag. tx.TxIn[0].ValueIn = testValueIn // Corrupt a random bit in the signature. pos := tRand.Intn(len(sigScript) - 1) bitPos := tRand.Intn(7) sigScript[pos] ^= 1 << uint8(bitPos) return checkScripts(msg, tx, idx, sigScript, pkScript) } func TestSignTxOutput(t *testing.T) { t.Parallel() // make key // make script based on key. // sign with magic pixie dust. hashTypes := []SigHashType{ SigHashAll, SigHashNone, SigHashSingle, SigHashAll | SigHashAnyOneCanPay, SigHashNone | SigHashAnyOneCanPay, SigHashSingle | SigHashAnyOneCanPay, } signatureSuites := []dcrec.SignatureType{ dcrec.STEcdsaSecp256k1, dcrec.STEd25519, dcrec.STSchnorrSecp256k1, } tx := &wire.MsgTx{ SerType: wire.TxSerializeFull, Version: 1, TxIn: []*wire.TxIn{ { PreviousOutPoint: wire.OutPoint{ Hash: chainhash.Hash{}, Index: 0, Tree: 0, }, Sequence: 4294967295, ValueIn: testValueIn, BlockHeight: 78901, BlockIndex: 23456, }, { PreviousOutPoint: wire.OutPoint{ Hash: chainhash.Hash{}, Index: 1, Tree: 0, }, Sequence: 4294967295, ValueIn: testValueIn, BlockHeight: 78901, BlockIndex: 23456, }, { PreviousOutPoint: wire.OutPoint{ Hash: chainhash.Hash{}, Index: 2, Tree: 0, }, Sequence: 4294967295, ValueIn: testValueIn, BlockHeight: 78901, BlockIndex: 23456, }, }, TxOut: []*wire.TxOut{ { Version: wire.DefaultPkScriptVersion, Value: 1, }, { Version: wire.DefaultPkScriptVersion, Value: 2, }, { Version: wire.DefaultPkScriptVersion, Value: 3, }, }, LockTime: 0, Expiry: 0, } // Pay to Pubkey Hash (uncompressed) secp256k1 := chainec.Secp256k1 for _, hashType := range hashTypes { for _, suite := range signatureSuites { for i := range tx.TxIn { var keyDB, pkBytes []byte var key chainec.PrivateKey var pk chainec.PublicKey switch suite { case dcrec.STEcdsaSecp256k1: keyDB, _, _, _ = secp256k1.GenerateKey(rand.Reader) key, pk = secp256k1.PrivKeyFromBytes(keyDB) pkBytes = pk.SerializeUncompressed() case dcrec.STEd25519: keyDB, _, _, _ = chainec.Edwards.GenerateKey(rand.Reader) key, pk = chainec.Edwards.PrivKeyFromBytes(keyDB) pkBytes = pk.SerializeUncompressed() case dcrec.STSchnorrSecp256k1: keyDB, _, _, _ = chainec.SecSchnorr.GenerateKey( rand.Reader) key, pk = chainec.SecSchnorr.PrivKeyFromBytes(keyDB) pkBytes = pk.Serialize() } msg := fmt.Sprintf("%d:%d:%d", hashType, i, suite) address, err := dcrutil.NewAddressPubKeyHash( dcrutil.Hash160(pkBytes), testingParams, suite) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) break } pkScript, err := PayToAddrScript(address) if err != nil { t.Errorf("failed to make pkscript "+ "for %s: %v", msg, err) } if err := signAndCheck(msg, tx, i, pkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, false}, }), mkGetScript(nil), suite); err != nil { t.Error(err) break } if err := signBadAndCheck(msg, tx, i, pkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, false}, }), mkGetScript(nil), suite); err == nil { t.Errorf("corrupted signature validated %s: %v", msg, err) break } } } } // Pay to Pubkey Hash (uncompressed) (merging with correct) for _, hashType := range hashTypes { for _, suite := range signatureSuites { for i := range tx.TxIn { var keyDB, pkBytes []byte var key chainec.PrivateKey var pk chainec.PublicKey switch suite { case dcrec.STEcdsaSecp256k1: keyDB, _, _, _ = secp256k1.GenerateKey(rand.Reader) key, pk = secp256k1.PrivKeyFromBytes(keyDB) pkBytes = pk.SerializeUncompressed() case dcrec.STEd25519: keyDB, _, _, _ = chainec.Edwards.GenerateKey(rand.Reader) key, pk = chainec.Edwards.PrivKeyFromBytes(keyDB) pkBytes = pk.SerializeUncompressed() case dcrec.STSchnorrSecp256k1: keyDB, _, _, _ = chainec.SecSchnorr.GenerateKey(rand.Reader) key, pk = chainec.SecSchnorr.PrivKeyFromBytes(keyDB) pkBytes = pk.Serialize() } msg := fmt.Sprintf("%d:%d:%d", hashType, i, suite) address, err := dcrutil.NewAddressPubKeyHash( dcrutil.Hash160(pkBytes), testingParams, suite) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) break } pkScript, err := PayToAddrScript(address) if err != nil { t.Errorf("failed to make pkscript "+ "for %s: %v", msg, err) } sigScript, err := SignTxOutput( testingParams, tx, i, pkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, false}, }), mkGetScript(nil), nil, suite) if err != nil { t.Errorf("failed to sign output %s: %v", msg, err) break } // by the above loop, this should be valid, now sign // again and merge. sigScript, err = SignTxOutput( testingParams, tx, i, pkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, false}, }), mkGetScript(nil), sigScript, suite) if err != nil { t.Errorf("failed to sign output %s a "+ "second time: %v", msg, err) break } err = checkScripts(msg, tx, i, sigScript, pkScript) if err != nil { t.Errorf("twice signed script invalid for "+ "%s: %v", msg, err) break } } } } // Pay to Pubkey Hash (compressed) for _, hashType := range hashTypes { for _, suite := range signatureSuites { for i := range tx.TxIn { var keyDB, pkBytes []byte var key chainec.PrivateKey var pk chainec.PublicKey switch suite { case dcrec.STEcdsaSecp256k1: keyDB, _, _, _ = secp256k1.GenerateKey(rand.Reader) key, pk = secp256k1.PrivKeyFromBytes(keyDB) pkBytes = pk.SerializeCompressed() case dcrec.STEd25519: keyDB, _, _, _ = chainec.Edwards.GenerateKey(rand.Reader) key, pk = chainec.Edwards.PrivKeyFromBytes(keyDB) pkBytes = pk.SerializeCompressed() case dcrec.STSchnorrSecp256k1: keyDB, _, _, _ = chainec.SecSchnorr.GenerateKey(rand.Reader) key, pk = chainec.SecSchnorr.PrivKeyFromBytes(keyDB) pkBytes = pk.SerializeCompressed() } msg := fmt.Sprintf("%d:%d:%d", hashType, i, suite) address, err := dcrutil.NewAddressPubKeyHash( dcrutil.Hash160(pkBytes), testingParams, suite) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) break } pkScript, err := PayToAddrScript(address) if err != nil { t.Errorf("failed to make pkscript "+ "for %s: %v", msg, err) } if err := signAndCheck(msg, tx, i, pkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, true}, }), mkGetScript(nil), suite); err != nil { t.Error(err) break } if err := signBadAndCheck(msg, tx, i, pkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, true}, }), mkGetScript(nil), suite); err == nil { t.Errorf("corrupted signature validated %s: %v", msg, err) break } } } } // Pay to Pubkey Hash (compressed) with duplicate merge for _, hashType := range hashTypes { for _, suite := range signatureSuites { for i := range tx.TxIn { var keyDB, pkBytes []byte var key chainec.PrivateKey var pk chainec.PublicKey switch suite { case dcrec.STEcdsaSecp256k1: keyDB, _, _, _ = secp256k1.GenerateKey(rand.Reader) key, pk = secp256k1.PrivKeyFromBytes(keyDB) pkBytes = pk.SerializeCompressed() case dcrec.STEd25519: keyDB, _, _, _ = chainec.Edwards.GenerateKey(rand.Reader) key, pk = chainec.Edwards.PrivKeyFromBytes(keyDB) pkBytes = pk.SerializeCompressed() case dcrec.STSchnorrSecp256k1: keyDB, _, _, _ = chainec.SecSchnorr.GenerateKey(rand.Reader) key, pk = chainec.SecSchnorr.PrivKeyFromBytes(keyDB) pkBytes = pk.SerializeCompressed() } msg := fmt.Sprintf("%d:%d:%d", hashType, i, suite) address, err := dcrutil.NewAddressPubKeyHash( dcrutil.Hash160(pkBytes), testingParams, suite) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) break } pkScript, err := PayToAddrScript(address) if err != nil { t.Errorf("failed to make pkscript "+ "for %s: %v", msg, err) } sigScript, err := SignTxOutput(testingParams, tx, i, pkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, true}, }), mkGetScript(nil), nil, suite) if err != nil { t.Errorf("failed to sign output %s: %v", msg, err) break } // by the above loop, this should be valid, now sign // again and merge. sigScript, err = SignTxOutput(testingParams, tx, i, pkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, true}, }), mkGetScript(nil), sigScript, suite) if err != nil { t.Errorf("failed to sign output %s a "+ "second time: %v", msg, err) break } err = checkScripts(msg, tx, i, sigScript, pkScript) if err != nil { t.Errorf("twice signed script invalid for "+ "%s: %v", msg, err) break } } } } // Pay to Pubkey Hash for a ticket(SStx) (compressed) for _, hashType := range hashTypes { for i := range tx.TxIn { msg := fmt.Sprintf("%d:%d", hashType, i) keyDB, _, _, err := secp256k1.GenerateKey(rand.Reader) if err != nil { t.Errorf("failed to generate key: %v", err) break } key, pk := secp256k1.PrivKeyFromBytes(keyDB) pkBytes := pk.SerializeCompressed() address, err := dcrutil.NewAddressPubKeyHash( dcrutil.Hash160(pkBytes), testingParams, dcrec.STEcdsaSecp256k1) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) break } pkScript, err := PayToSStx(address) if err != nil { t.Errorf("failed to make pkscript "+ "for %s: %v", msg, err) } if err := signAndCheck(msg, tx, i, pkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, true}, }), mkGetScript(nil), dcrec.STEcdsaSecp256k1); err != nil { t.Error(err) break } if err := signBadAndCheck(msg, tx, i, pkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, true}, }), mkGetScript(nil), dcrec.STEcdsaSecp256k1); err == nil { t.Errorf("corrupted signature validated %s: %v", msg, err) break } } } // Pay to Pubkey Hash for a ticket change (SStx change) (compressed) for _, hashType := range hashTypes { for i := range tx.TxIn { msg := fmt.Sprintf("%d:%d", hashType, i) keyDB, _, _, err := secp256k1.GenerateKey(rand.Reader) if err != nil { t.Errorf("failed to generate key: %v", err) break } key, pk := secp256k1.PrivKeyFromBytes(keyDB) pkBytes := pk.SerializeCompressed() address, err := dcrutil.NewAddressPubKeyHash( dcrutil.Hash160(pkBytes), testingParams, dcrec.STEcdsaSecp256k1) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) break } pkScript, err := PayToSStxChange(address) if err != nil { t.Errorf("failed to make pkscript "+ "for %s: %v", msg, err) } if err := signAndCheck(msg, tx, i, pkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, true}, }), mkGetScript(nil), dcrec.STEcdsaSecp256k1); err != nil { t.Error(err) break } if err := signBadAndCheck(msg, tx, i, pkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, true}, }), mkGetScript(nil), dcrec.STEcdsaSecp256k1); err == nil { t.Errorf("corrupted signature validated %s: %v", msg, err) break } } } // Pay to Pubkey Hash for a ticket spending (SSGen) (compressed) for _, hashType := range hashTypes { for i := range tx.TxIn { msg := fmt.Sprintf("%d:%d", hashType, i) keyDB, _, _, err := secp256k1.GenerateKey(rand.Reader) if err != nil { t.Errorf("failed to generate key: %v", err) break } key, pk := secp256k1.PrivKeyFromBytes(keyDB) pkBytes := pk.SerializeCompressed() address, err := dcrutil.NewAddressPubKeyHash( dcrutil.Hash160(pkBytes), testingParams, dcrec.STEcdsaSecp256k1) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) break } pkScript, err := PayToSSGen(address) if err != nil { t.Errorf("failed to make pkscript "+ "for %s: %v", msg, err) } if err := signAndCheck(msg, tx, i, pkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, true}, }), mkGetScript(nil), dcrec.STEcdsaSecp256k1); err != nil { t.Error(err) break } if err := signBadAndCheck(msg, tx, i, pkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, true}, }), mkGetScript(nil), dcrec.STEcdsaSecp256k1); err == nil { t.Errorf("corrupted signature validated %s: %v", msg, err) break } } } // Pay to Pubkey Hash for a ticket revocation (SSRtx) (compressed) for _, hashType := range hashTypes { for i := range tx.TxIn { msg := fmt.Sprintf("%d:%d", hashType, i) keyDB, _, _, err := secp256k1.GenerateKey(rand.Reader) if err != nil { t.Errorf("failed to generate key: %v", err) break } key, pk := secp256k1.PrivKeyFromBytes(keyDB) pkBytes := pk.SerializeCompressed() address, err := dcrutil.NewAddressPubKeyHash( dcrutil.Hash160(pkBytes), testingParams, dcrec.STEcdsaSecp256k1) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) break } pkScript, err := PayToSSRtx(address) if err != nil { t.Errorf("failed to make pkscript "+ "for %s: %v", msg, err) } if err := signAndCheck(msg, tx, i, pkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, true}, }), mkGetScript(nil), dcrec.STEcdsaSecp256k1); err != nil { t.Error(err) break } if err := signBadAndCheck(msg, tx, i, pkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, true}, }), mkGetScript(nil), dcrec.STEcdsaSecp256k1); err == nil { t.Errorf("corrupted signature validated %s: %v", msg, err) break } } } // Pay to PubKey (uncompressed) for _, hashType := range hashTypes { for _, suite := range signatureSuites { for i := range tx.TxIn { msg := fmt.Sprintf("%d:%d:%d", hashType, i, suite) keyDB, _, _, err := secp256k1.GenerateKey(rand.Reader) if err != nil { t.Errorf("failed to generate key: %v", err) break } key, pk := secp256k1.PrivKeyFromBytes(keyDB) // For address generation, consensus rules require using // a compressed public key. Look up ExtractPkScriptAddrs // for more details address, err := dcrutil.NewAddressSecpPubKeyCompressed(pk, testingParams) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) break } pkScript, err := PayToAddrScript(address) if err != nil { t.Errorf("failed to make pkscript "+ "for %s: %v", msg, err) } if err := signAndCheck(msg, tx, i, pkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, false}, }), mkGetScript(nil), suite); err != nil { t.Error(err) break } if err := signBadAndCheck(msg, tx, i, pkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, false}, }), mkGetScript(nil), suite); err == nil { t.Errorf("corrupted signature validated %s: %v", msg, err) break } } } } // Pay to PubKey (uncompressed) for _, hashType := range hashTypes { for _, suite := range signatureSuites { for i := range tx.TxIn { var keyDB, pkBytes []byte var key chainec.PrivateKey var pk chainec.PublicKey var address dcrutil.Address var err error msg := fmt.Sprintf("%d:%d:%d", hashType, i, suite) switch suite { case dcrec.STEcdsaSecp256k1: keyDB, _, _, _ = secp256k1.GenerateKey(rand.Reader) key, pk = secp256k1.PrivKeyFromBytes(keyDB) // For address generation, consensus rules require using // a compressed public key. Look up ExtractPkScriptAddrs // for more details address, err = dcrutil.NewAddressSecpPubKeyCompressed(pk, testingParams) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) } case dcrec.STEd25519: keyDB, _, _, _ = chainec.Edwards.GenerateKey(rand.Reader) key, pk = chainec.Edwards.PrivKeyFromBytes(keyDB) pkBytes = pk.SerializeUncompressed() address, err = dcrutil.NewAddressEdwardsPubKey(pkBytes, testingParams) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) } case dcrec.STSchnorrSecp256k1: keyDB, _, _, _ = chainec.SecSchnorr.GenerateKey(rand.Reader) key, pk = chainec.SecSchnorr.PrivKeyFromBytes(keyDB) pkBytes = pk.Serialize() address, err = dcrutil.NewAddressSecSchnorrPubKey(pkBytes, testingParams) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) } } pkScript, err := PayToAddrScript(address) if err != nil { t.Errorf("failed to make pkscript "+ "for %s: %v", msg, err) } sigScript, err := SignTxOutput(testingParams, tx, i, pkScript, hashType, mkGetKeyPub(map[string]addressToKey{ address.String(): {&key, false}, }), mkGetScript(nil), nil, suite) if err != nil { t.Errorf("failed to sign output %s: %v", msg, err) } // by the above loop, this should be valid, now sign // again and merge. sigScript, err = SignTxOutput(testingParams, tx, i, pkScript, hashType, mkGetKeyPub(map[string]addressToKey{ address.String(): {&key, false}, }), mkGetScript(nil), sigScript, suite) if err != nil { t.Errorf("failed to sign output %s a "+ "second time: %v", msg, err) } err = checkScripts(msg, tx, i, sigScript, pkScript) if err != nil { t.Errorf("twice signed script invalid for "+ "%s: %v", msg, err) } } } } // Pay to PubKey (compressed) for _, hashType := range hashTypes { for _, suite := range signatureSuites { for i := range tx.TxIn { var keyDB, pkBytes []byte var key chainec.PrivateKey var pk chainec.PublicKey var address dcrutil.Address var err error msg := fmt.Sprintf("%d:%d:%d", hashType, i, suite) switch suite { case dcrec.STEcdsaSecp256k1: keyDB, _, _, _ = secp256k1.GenerateKey(rand.Reader) key, pk = secp256k1.PrivKeyFromBytes(keyDB) // For address generation, consensus rules require using // a compressed public key. Look up ExtractPkScriptAddrs // for more details address, err = dcrutil.NewAddressSecpPubKeyCompressed(pk, testingParams) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) } case dcrec.STEd25519: keyDB, _, _, _ = chainec.Edwards.GenerateKey(rand.Reader) key, pk = chainec.Edwards.PrivKeyFromBytes(keyDB) pkBytes = pk.SerializeCompressed() address, err = dcrutil.NewAddressEdwardsPubKey(pkBytes, testingParams) if err != nil
case dcrec.STSchnorrSecp256k1: keyDB, _, _, _ = chainec.SecSchnorr.GenerateKey(rand.Reader) key, pk = chainec.SecSchnorr.PrivKeyFromBytes(keyDB) pkBytes = pk.Serialize() address, err = dcrutil.NewAddressSecSchnorrPubKey(pkBytes, testingParams) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) } } pkScript, err := PayToAddrScript(address) if err != nil { t.Errorf("failed to make pkscript "+ "for %s: %v", msg, err) } if err := signAndCheck(msg, tx, i, pkScript, hashType, mkGetKeyPub(map[string]addressToKey{ address.String(): {&key, true}, }), mkGetScript(nil), suite); err != nil { t.Error(err) break } if err := signBadAndCheck(msg, tx, i, pkScript, hashType, mkGetKeyPub(map[string]addressToKey{ address.String(): {&key, true}, }), mkGetScript(nil), suite); err == nil { t.Errorf("corrupted signature validated %s: %v", msg, err) break } } } } // Pay to PubKey (compressed) with duplicate merge for _, hashType := range hashTypes { for _, suite := range signatureSuites { for i := range tx.TxIn { var keyDB, pkBytes []byte var key chainec.PrivateKey var pk chainec.PublicKey var address dcrutil.Address var err error msg := fmt.Sprintf("%d:%d:%d", hashType, i, suite) switch suite { case dcrec.STEcdsaSecp256k1: keyDB, _, _, _ = secp256k1.GenerateKey(rand.Reader) key, pk = secp256k1.PrivKeyFromBytes(keyDB) address, err = dcrutil.NewAddressSecpPubKeyCompressed(pk, testingParams) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) } case dcrec.STEd25519: keyDB, _, _, _ = chainec.Edwards.GenerateKey(rand.Reader) key, pk = chainec.Edwards.PrivKeyFromBytes(keyDB) pkBytes = pk.SerializeCompressed() address, err = dcrutil.NewAddressEdwardsPubKey(pkBytes, testingParams) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) } case dcrec.STSchnorrSecp256k1: keyDB, _, _, _ = chainec.SecSchnorr.GenerateKey(rand.Reader) key, pk = chainec.SecSchnorr.PrivKeyFromBytes(keyDB) pkBytes = pk.Serialize() address, err = dcrutil.NewAddressSecSchnorrPubKey(pkBytes, testingParams) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) } } pkScript, err := PayToAddrScript(address) if err != nil { t.Errorf("failed to make pkscript "+ "for %s: %v", msg, err) } sigScript, err := SignTxOutput(testingParams, tx, i, pkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, true}, }), mkGetScript(nil), nil, suite) if err != nil { t.Errorf("failed to sign output %s: %v", msg, err) break } // by the above loop, this should be valid, now sign // again and merge. sigScript, err = SignTxOutput(testingParams, tx, i, pkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, true}, }), mkGetScript(nil), sigScript, suite) if err != nil { t.Errorf("failed to sign output %s a "+ "second time: %v", msg, err) break } err = checkScripts(msg, tx, i, sigScript, pkScript) if err != nil { t.Errorf("twice signed script invalid for "+ "%s: %v", msg, err) break } } } } // As before, but with p2sh now. // Pay to Pubkey Hash (uncompressed) for _, hashType := range hashTypes { for _, suite := range signatureSuites { for i := range tx.TxIn { var keyDB, pkBytes []byte var key chainec.PrivateKey var pk chainec.PublicKey switch suite { case dcrec.STEcdsaSecp256k1: keyDB, _, _, _ = secp256k1.GenerateKey(rand.Reader) key, pk = secp256k1.PrivKeyFromBytes(keyDB) pkBytes = pk.SerializeUncompressed() case dcrec.STEd25519: keyDB, _, _, _ = chainec.Edwards.GenerateKey(rand.Reader) key, pk = chainec.Edwards.PrivKeyFromBytes(keyDB) pkBytes = pk.SerializeUncompressed() case dcrec.STSchnorrSecp256k1: keyDB, _, _, _ = chainec.SecSchnorr.GenerateKey(rand.Reader) key, pk = chainec.SecSchnorr.PrivKeyFromBytes(keyDB) pkBytes = pk.Serialize() } msg := fmt.Sprintf("%d:%d:%d", hashType, i, suite) address, err := dcrutil.NewAddressPubKeyHash( dcrutil.Hash160(pkBytes), testingParams, suite) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) break } pkScript, err := PayToAddrScript(address) if err != nil { t.Errorf("failed to make pkscript "+ "for %s: %v", msg, err) break } scriptAddr, err := dcrutil.NewAddressScriptHash( pkScript, testingParams) if err != nil { t.Errorf("failed to make p2sh addr for %s: %v", msg, err) break } scriptPkScript, err := PayToAddrScript( scriptAddr) if err != nil { t.Errorf("failed to make script pkscript for "+ "%s: %v", msg, err) break } if err := signAndCheck(msg, tx, i, scriptPkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, false}, }), mkGetScript(map[string][]byte{ scriptAddr.EncodeAddress(): pkScript, }), suite); err != nil { t.Error(err) break } if err := signBadAndCheck(msg, tx, i, pkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, false}, }), mkGetScript(nil), suite); err == nil { t.Errorf("corrupted signature validated %s: %v", msg, err) break } } } } // Pay to Pubkey Hash (uncompressed) with duplicate merge for _, hashType := range hashTypes { for _, suite := range signatureSuites { for i := range tx.TxIn { var keyDB, pkBytes []byte var key chainec.PrivateKey var pk chainec.PublicKey switch suite { case dcrec.STEcdsaSecp256k1: keyDB, _, _, _ = secp256k1.GenerateKey(rand.Reader) key, pk = secp256k1.PrivKeyFromBytes(keyDB) pkBytes = pk.SerializeUncompressed() case dcrec.STEd25519: keyDB, _, _, _ = chainec.Edwards.GenerateKey(rand.Reader) key, pk = chainec.Edwards.PrivKeyFromBytes(keyDB) pkBytes = pk.SerializeUncompressed() case dcrec.STSchnorrSecp256k1: keyDB, _, _, _ = chainec.SecSchnorr.GenerateKey(rand.Reader) key, pk = chainec.SecSchnorr.PrivKeyFromBytes(keyDB) pkBytes = pk.Serialize() } msg := fmt.Sprintf("%d:%d:%d", hashType, i, suite) address, err := dcrutil.NewAddressPubKeyHash( dcrutil.Hash160(pkBytes), testingParams, suite) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) break } pkScript, err := PayToAddrScript(address) if err != nil { t.Errorf("failed to make pkscript "+ "for %s: %v", msg, err) break } scriptAddr, err := dcrutil.NewAddressScriptHash( pkScript, testingParams) if err != nil { t.Errorf("failed to make p2sh addr for %s: %v", msg, err) break } scriptPkScript, err := PayToAddrScript( scriptAddr) if err != nil { t.Errorf("failed to make script pkscript for "+ "%s: %v", msg, err) break } _, err = SignTxOutput(testingParams, tx, i, scriptPkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, false}, }), mkGetScript(map[string][]byte{ scriptAddr.EncodeAddress(): pkScript, }), nil, suite) if err != nil { t.Errorf("failed to sign output %s: %v", msg, err) break } // by the above loop, this should be valid, now sign // again and merge. sigScript, err := SignTxOutput(testingParams, tx, i, scriptPkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, false}, }), mkGetScript(map[string][]byte{ scriptAddr.EncodeAddress(): pkScript, }), nil, suite) if err != nil { t.Errorf("failed to sign output %s a "+ "second time: %v", msg, err) break } err = checkScripts(msg, tx, i, sigScript, scriptPkScript) if err != nil { t.Errorf("twice signed script invalid for "+ "%s: %v", msg, err) break } } } } // Pay to Pubkey Hash (compressed) for _, hashType := range hashTypes { for _, suite := range signatureSuites { for i := range tx.TxIn { var keyDB, pkBytes []byte var key chainec.PrivateKey var pk chainec.PublicKey switch suite { case dcrec.STEcdsaSecp256k1: keyDB, _, _, _ = secp256k1.GenerateKey(rand.Reader) key, pk = secp256k1.PrivKeyFromBytes(keyDB) pkBytes = pk.SerializeCompressed() case dcrec.STEd25519: keyDB, _, _, _ = chainec.Edwards.GenerateKey(rand.Reader) key, pk = chainec.Edwards.PrivKeyFromBytes(keyDB) pkBytes = pk.SerializeCompressed() case dcrec.STSchnorrSecp256k1: keyDB, _, _, _ = chainec.SecSchnorr.GenerateKey(rand.Reader) key, pk = chainec.SecSchnorr.PrivKeyFromBytes(keyDB) pkBytes = pk.SerializeCompressed() } msg := fmt.Sprintf("%d:%d:%d", hashType, i, suite) address, err := dcrutil.NewAddressPubKeyHash( dcrutil.Hash160(pkBytes), testingParams, suite) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) break } pkScript, err := PayToAddrScript(address) if err != nil { t.Errorf("failed to make pkscript "+ "for %s: %v", msg, err) } scriptAddr, err := dcrutil.NewAddressScriptHash( pkScript, testingParams) if err != nil { t.Errorf("failed to make p2sh addr for %s: %v", msg, err) break } scriptPkScript, err := PayToAddrScript( scriptAddr) if err != nil { t.Errorf("failed to make script pkscript for "+ "%s: %v", msg, err) break } if err := signAndCheck(msg, tx, i, scriptPkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, true}, }), mkGetScript(map[string][]byte{ scriptAddr.EncodeAddress(): pkScript, }), suite); err != nil { t.Error(err) break } if err := signBadAndCheck(msg, tx, i, pkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, true}, }), mkGetScript(nil), suite); err == nil { t.Errorf("corrupted signature validated %s: %v", msg, err) break } } } } // Pay to Pubkey Hash (compressed) with duplicate merge for _, hashType := range hashTypes { for _, suite := range signatureSuites { for i := range tx.TxIn { var keyDB, pkBytes []byte var key chainec.PrivateKey var pk chainec.PublicKey switch suite { case dcrec.STEcdsaSecp256k1: keyDB, _, _, _ = secp256k1.GenerateKey(rand.Reader) key, pk = secp256k1.PrivKeyFromBytes(keyDB) pkBytes = pk.SerializeCompressed() case dcrec.STEd25519: keyDB, _, _, _ = chainec.Edwards.GenerateKey(rand.Reader) key, pk = chainec.Edwards.PrivKeyFromBytes(keyDB) pkBytes = pk.SerializeCompressed() case dcrec.STSchnorrSecp256k1: keyDB, _, _, _ = chainec.SecSchnorr.GenerateKey(rand.Reader) key, pk = chainec.SecSchnorr.PrivKeyFromBytes(keyDB) pkBytes = pk.SerializeCompressed() } msg := fmt.Sprintf("%d:%d:%d", hashType, i, suite) address, err := dcrutil.NewAddressPubKeyHash( dcrutil.Hash160(pkBytes), testingParams, suite) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) break } pkScript, err := PayToAddrScript(address) if err != nil { t.Errorf("failed to make pkscript "+ "for %s: %v", msg, err) } scriptAddr, err := dcrutil.NewAddressScriptHash( pkScript, testingParams) if err != nil { t.Errorf("failed to make p2sh addr for %s: %v", msg, err) break } scriptPkScript, err := PayToAddrScript( scriptAddr) if err != nil { t.Errorf("failed to make script pkscript for "+ "%s: %v", msg, err) break } _, err = SignTxOutput(testingParams, tx, i, scriptPkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, true}, }), mkGetScript(map[string][]byte{ scriptAddr.EncodeAddress(): pkScript, }), nil, suite) if err != nil { t.Errorf("failed to sign output %s: %v", msg, err) break } // by the above loop, this should be valid, now sign // again and merge. sigScript, err := SignTxOutput(testingParams, tx, i, scriptPkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, true}, }), mkGetScript(map[string][]byte{ scriptAddr.EncodeAddress(): pkScript, }), nil, suite) if err != nil { t.Errorf("failed to sign output %s a "+ "second time: %v", msg, err) break } err = checkScripts(msg, tx, i, sigScript, scriptPkScript) if err != nil { t.Errorf("twice signed script invalid for "+ "%s: %v", msg, err) break } } } } // Pay to PubKey (uncompressed) for _, hashType := range hashTypes { for _, suite := range signatureSuites { for i := range tx.TxIn { var keyDB, pkBytes []byte var key chainec.PrivateKey var pk chainec.PublicKey var address dcrutil.Address var err error msg := fmt.Sprintf("%d:%d:%d", hashType, i, suite) switch suite { case dcrec.STEcdsaSecp256k1: keyDB, _, _, _ = secp256k1.GenerateKey(rand.Reader) key, pk = secp256k1.PrivKeyFromBytes(keyDB) // For address generation, consensus rules require using // a compressed public key. Look up ExtractPkScriptAddrs // for more details address, err = dcrutil.NewAddressSecpPubKeyCompressed(pk, testingParams) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) } case dcrec.STEd25519: keyDB, _, _, _ = chainec.Edwards.GenerateKey(rand.Reader) key, pk = chainec.Edwards.PrivKeyFromBytes(keyDB) pkBytes = pk.SerializeUncompressed() address, err = dcrutil.NewAddressEdwardsPubKey(pkBytes, testingParams) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) } case dcrec.STSchnorrSecp256k1: keyDB, _, _, _ = chainec.SecSchnorr.GenerateKey(rand.Reader) key, pk = chainec.SecSchnorr.PrivKeyFromBytes(keyDB) pkBytes = pk.Serialize() address, err = dcrutil.NewAddressSecSchnorrPubKey(pkBytes, testingParams) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) } } pkScript, err := PayToAddrScript(address) if err != nil { t.Errorf("failed to make pkscript "+ "for %s: %v", msg, err) } scriptAddr, err := dcrutil.NewAddressScriptHash( pkScript, testingParams) if err != nil { t.Errorf("failed to make p2sh addr for %s: %v", msg, err) } scriptPkScript, err := PayToAddrScript( scriptAddr) if err != nil { t.Errorf("failed to make script pkscript for "+ "%s: %v", msg, err) } if err := signAndCheck(msg, tx, i, scriptPkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, false}, }), mkGetScript(map[string][]byte{ scriptAddr.EncodeAddress(): pkScript, }), suite); err != nil { t.Error(err) } if err := signBadAndCheck(msg, tx, i, pkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, false}, }), mkGetScript(nil), suite); err == nil { t.Errorf("corrupted signature validated %s: %v", msg, err) break } } } } // Pay to PubKey (uncompressed) with duplicate merge for _, hashType := range hashTypes { for _, suite := range signatureSuites { for i := range tx.TxIn { var keyDB, pkBytes []byte var key chainec.PrivateKey var pk chainec.PublicKey var address dcrutil.Address var err error msg := fmt.Sprintf("%d:%d:%d", hashType, i, suite) switch suite { case dcrec.STEcdsaSecp256k1: keyDB, _, _, _ = secp256k1.GenerateKey(rand.Reader) key, pk = secp256k1.PrivKeyFromBytes(keyDB) // For address generation, consensus rules require using // a compressed public key. Look up ExtractPkScriptAddrs // for more details address, err = dcrutil.NewAddressSecpPubKeyCompressed(pk, testingParams) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) } case dcrec.STEd25519: keyDB, _, _, _ = chainec.Edwards.GenerateKey(rand.Reader) key, pk = chainec.Edwards.PrivKeyFromBytes(keyDB) pkBytes = pk.SerializeUncompressed() address, err = dcrutil.NewAddressEdwardsPubKey(pkBytes, testingParams) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) } case dcrec.STSchnorrSecp256k1: keyDB, _, _, _ = chainec.SecSchnorr.GenerateKey(rand.Reader) key, pk = chainec.SecSchnorr.PrivKeyFromBytes(keyDB) pkBytes = pk.Serialize() address, err = dcrutil.NewAddressSecSchnorrPubKey(pkBytes, testingParams) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) } } pkScript, err := PayToAddrScript(address) if err != nil { t.Errorf("failed to make pkscript "+ "for %s: %v", msg, err) } scriptAddr, err := dcrutil.NewAddressScriptHash( pkScript, testingParams) if err != nil { t.Errorf("failed to make p2sh addr for %s: %v", msg, err) } scriptPkScript, err := PayToAddrScript( scriptAddr) if err != nil { t.Errorf("failed to make script pkscript for "+ "%s: %v", msg, err) } _, err = SignTxOutput(testingParams, tx, i, scriptPkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, false}, }), mkGetScript(map[string][]byte{ scriptAddr.EncodeAddress(): pkScript, }), nil, suite) if err != nil { t.Errorf("failed to sign output %s: %v", msg, err) break } // by the above loop, this should be valid, now sign // again and merge. sigScript, err := SignTxOutput(testingParams, tx, i, scriptPkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, false}, }), mkGetScript(map[string][]byte{ scriptAddr.EncodeAddress(): pkScript, }), nil, suite) if err != nil { t.Errorf("failed to sign output %s a "+ "second time: %v", msg, err) break } err = checkScripts(msg, tx, i, sigScript, scriptPkScript) if err != nil { t.Errorf("twice signed script invalid for "+ "%s: %v", msg, err) break } } } } // Pay to PubKey (compressed) for _, hashType := range hashTypes { for _, suite := range signatureSuites { for i := range tx.TxIn { var keyDB, pkBytes []byte var key chainec.PrivateKey var pk chainec.PublicKey var address dcrutil.Address var err error msg := fmt.Sprintf("%d:%d:%d", hashType, i, suite) switch suite { case dcrec.STEcdsaSecp256k1: keyDB, _, _, _ = secp256k1.GenerateKey(rand.Reader) key, pk = secp256k1.PrivKeyFromBytes(keyDB) address, err = dcrutil.NewAddressSecpPubKeyCompressed(pk, testingParams) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) } case dcrec.STEd25519: keyDB, _, _, _ = chainec.Edwards.GenerateKey(rand.Reader) key, pk = chainec.Edwards.PrivKeyFromBytes(keyDB) pkBytes = pk.SerializeCompressed() address, err = dcrutil.NewAddressEdwardsPubKey(pkBytes, testingParams) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) } case dcrec.STSchnorrSecp256k1: keyDB, _, _, _ = chainec.SecSchnorr.GenerateKey(rand.Reader) key, pk = chainec.SecSchnorr.PrivKeyFromBytes(keyDB) pkBytes = pk.Serialize() address, err = dcrutil.NewAddressSecSchnorrPubKey(pkBytes, testingParams) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) } } pkScript, err := PayToAddrScript(address) if err != nil { t.Errorf("failed to make pkscript "+ "for %s: %v", msg, err) } scriptAddr, err := dcrutil.NewAddressScriptHash( pkScript, testingParams) if err != nil { t.Errorf("failed to make p2sh addr for %s: %v", msg, err) break } scriptPkScript, err := PayToAddrScript( scriptAddr) if err != nil { t.Errorf("failed to make script pkscript for "+ "%s: %v", msg, err) break } if err := signAndCheck(msg, tx, i, scriptPkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, true}, }), mkGetScript(map[string][]byte{ scriptAddr.EncodeAddress(): pkScript, }), suite); err != nil { t.Error(err) break } if err := signBadAndCheck(msg, tx, i, pkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, false}, }), mkGetScript(nil), suite); err == nil { t.Errorf("corrupted signature validated %s: %v", msg, err) break } } } } // Pay to PubKey (compressed) for _, hashType := range hashTypes { for _, suite := range signatureSuites { for i := range tx.TxIn { var keyDB, pkBytes []byte var key chainec.PrivateKey var pk chainec.PublicKey var address dcrutil.Address var err error msg := fmt.Sprintf("%d:%d:%d", hashType, i, suite) switch suite { case dcrec.STEcdsaSecp256k1: keyDB, _, _, _ = secp256k1.GenerateKey(rand.Reader) key, pk = secp256k1.PrivKeyFromBytes(keyDB) address, err = dcrutil.NewAddressSecpPubKeyCompressed(pk, testingParams) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) } case dcrec.STEd25519: keyDB, _, _, _ = chainec.Edwards.GenerateKey(rand.Reader) key, pk = chainec.Edwards.PrivKeyFromBytes(keyDB) pkBytes = pk.SerializeCompressed() address, err = dcrutil.NewAddressEdwardsPubKey(pkBytes, testingParams) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) } case dcrec.STSchnorrSecp256k1: keyDB, _, _, _ = chainec.SecSchnorr.GenerateKey(rand.Reader) key, pk = chainec.SecSchnorr.PrivKeyFromBytes(keyDB) pkBytes = pk.Serialize() address, err = dcrutil.NewAddressSecSchnorrPubKey(pkBytes, testingParams) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) } } pkScript, err := PayToAddrScript(address) if err != nil { t.Errorf("failed to make pkscript "+ "for %s: %v", msg, err) } scriptAddr, err := dcrutil.NewAddressScriptHash( pkScript, testingParams) if err != nil { t.Errorf("failed to make p2sh addr for %s: %v", msg, err) break } scriptPkScript, err := PayToAddrScript( scriptAddr) if err != nil { t.Errorf("failed to make script pkscript for "+ "%s: %v", msg, err) break } _, err = SignTxOutput(testingParams, tx, i, scriptPkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, true}, }), mkGetScript(map[string][]byte{ scriptAddr.EncodeAddress(): pkScript, }), nil, suite) if err != nil { t.Errorf("failed to sign output %s: %v", msg, err) break } // by the above loop, this should be valid, now sign // again and merge. sigScript, err := SignTxOutput(testingParams, tx, i, scriptPkScript, hashType, mkGetKey(map[string]addressToKey{ address.EncodeAddress(): {&key, true}, }), mkGetScript(map[string][]byte{ scriptAddr.EncodeAddress(): pkScript, }), nil, suite) if err != nil { t.Errorf("failed to sign output %s a "+ "second time: %v", msg, err) break } err = checkScripts(msg, tx, i, sigScript, scriptPkScript) if err != nil { t.Errorf("twice signed script invalid for "+ "%s: %v", msg, err) break } } } } // Basic Multisig for _, hashType := range hashTypes { for i := range tx.TxIn { msg := fmt.Sprintf("%d:%d", hashType, i) keyDB1, _, _, err := secp256k1.GenerateKey(rand.Reader) if err != nil { t.Errorf("failed to generate key: %v", err) break } key1, pk1 := secp256k1.PrivKeyFromBytes(keyDB1) address1, err := dcrutil.NewAddressSecpPubKeyCompressed(pk1, testingParams) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) break } keyDB2, _, _, err := secp256k1.GenerateKey(rand.Reader) if err != nil { t.Errorf("failed to generate key: %v", err) break } key2, pk2 := secp256k1.PrivKeyFromBytes(keyDB2) address2, err := dcrutil.NewAddressSecpPubKeyCompressed(pk2, testingParams) if err != nil { t.Errorf("failed to make address 2 for %s: %v", msg, err) break } pkScript, err := MultiSigScript( []*dcrutil.AddressSecpPubKey{address1, address2}, 2) if err != nil { t.Errorf("failed to make pkscript "+ "for %s: %v", msg, err) } scriptAddr, err := dcrutil.NewAddressScriptHash( pkScript, testingParams) if err != nil { t.Errorf("failed to make p2sh addr for %s: %v", msg, err) break } scriptPkScript, err := PayToAddrScript(scriptAddr) if err != nil { t.Errorf("failed to make script pkscript for "+ "%s: %v", msg, err) break } if err := signAndCheck(msg, tx, i, scriptPkScript, hashType, mkGetKey(map[string]addressToKey{ address1.EncodeAddress(): {&key1, true}, address2.EncodeAddress(): {&key2, true}, }), mkGetScript(map[string][]byte{ scriptAddr.EncodeAddress(): pkScript, }), dcrec.STEcdsaSecp256k1); err != nil { t.Error(err) break } if err := signBadAndCheck(msg, tx, i, pkScript, hashType, mkGetKey(map[string]addressToKey{ address1.EncodeAddress(): {&key1, true}, address2.EncodeAddress(): {&key2, true}, }), mkGetScript(nil), dcrec.STEcdsaSecp256k1); err == nil { t.Errorf("corrupted signature validated %s: %v", msg, err) break } } } // Two part multisig, sign with one key then the other. for _, hashType := range hashTypes { for i := range tx.TxIn { msg := fmt.Sprintf("%d:%d", hashType, i) keyDB1, _, _, err := secp256k1.GenerateKey(rand.Reader) if err != nil { t.Errorf("failed to generate key: %v", err) break } key1, pk1 := secp256k1.PrivKeyFromBytes(keyDB1) address1, err := dcrutil.NewAddressSecpPubKeyCompressed(pk1, testingParams) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) break } keyDB2, _, _, err := secp256k1.GenerateKey(rand.Reader) if err != nil { t.Errorf("failed to generate key: %v", err) break } key2, pk2 := secp256k1.PrivKeyFromBytes(keyDB2) address2, err := dcrutil.NewAddressSecpPubKeyCompressed(pk2, testingParams) if err != nil { t.Errorf("failed to make address 2 for %s: %v", msg, err) break } pkScript, err := MultiSigScript( []*dcrutil.AddressSecpPubKey{address1, address2}, 2) if err != nil { t.Errorf("failed to make pkscript "+ "for %s: %v", msg, err) } scriptAddr, err := dcrutil.NewAddressScriptHash( pkScript, testingParams) if err != nil { t.Errorf("failed to make p2sh addr for %s: %v", msg, err) break } scriptPkScript, err := PayToAddrScript(scriptAddr) if err != nil { t.Errorf("failed to make script pkscript for "+ "%s: %v", msg, err) break } sigScript, err := SignTxOutput(testingParams, tx, i, scriptPkScript, hashType, mkGetKey(map[string]addressToKey{ address1.EncodeAddress(): {&key1, true}, }), mkGetScript(map[string][]byte{ scriptAddr.EncodeAddress(): pkScript, }), nil, dcrec.STEcdsaSecp256k1) if err != nil { t.Errorf("failed to sign output %s: %v", msg, err) break } // Only 1 out of 2 signed, this *should* fail. if checkScripts(msg, tx, i, sigScript, scriptPkScript) == nil { t.Errorf("part signed script valid for %s", msg) break } // Sign with the other key and merge sigScript, err = SignTxOutput(testingParams, tx, i, scriptPkScript, hashType, mkGetKey(map[string]addressToKey{ address2.EncodeAddress(): {&key2, true}, }), mkGetScript(map[string][]byte{ scriptAddr.EncodeAddress(): pkScript, }), sigScript, dcrec.STEcdsaSecp256k1) if err != nil { t.Errorf("failed to sign output %s: %v", msg, err) break } err = checkScripts(msg, tx, i, sigScript, scriptPkScript) if err != nil { t.Errorf("fully signed script invalid for "+ "%s: %v", msg, err) break } } } // Two part multisig, sign with one key then both, check key dedup // correctly. for _, hashType := range hashTypes { for i := range tx.TxIn { msg := fmt.Sprintf("%d:%d", hashType, i) keyDB1, _, _, err := secp256k1.GenerateKey(rand.Reader) if err != nil { t.Errorf("failed to generate key: %v", err) break } key1, pk1 := secp256k1.PrivKeyFromBytes(keyDB1) address1, err := dcrutil.NewAddressSecpPubKeyCompressed(pk1, testingParams) if err != nil { t.Errorf("failed to make address for %s: %v", msg, err) break } keyDB2, _, _, err := secp256k1.GenerateKey(rand.Reader) if err != nil { t.Errorf("failed to generate key: %v", err) break } key2, pk2 := secp256k1.PrivKeyFromBytes(keyDB2) address2, err := dcrutil.NewAddressSecpPubKeyCompressed(pk2, testingParams) if err != nil { t.Errorf("failed to make address 2 for %s: %v", msg, err) break } pkScript, err := MultiSigScript( []*dcrutil.AddressSecpPubKey{address1, address2}, 2) if err != nil { t.Errorf("failed to make pkscript "+ "for %s: %v", msg, err) } scriptAddr, err := dcrutil.NewAddressScriptHash( pkScript, testingParams) if err != nil { t.Errorf("failed to make p2sh addr for %s: %v", msg, err) break } scriptPkScript, err := PayToAddrScript(scriptAddr) if err != nil { t.Errorf("failed to make script pkscript for "+ "%s: %v", msg, err) break } sigScript, err := SignTxOutput(testingParams, tx, i, scriptPkScript, hashType, mkGetKey(map[string]addressToKey{ address1.EncodeAddress(): {&key1, true}, }), mkGetScript(map[string][]byte{ scriptAddr.EncodeAddress(): pkScript, }), nil, dcrec.STEcdsaSecp256k1) if err != nil { t.Errorf("failed to sign output %s: %v", msg, err) break } // Only 1 out of 2 signed, this *should* fail. if checkScripts(msg, tx, i, sigScript, scriptPkScript) == nil { t.Errorf("part signed script valid for %s", msg) break } // Sign with the other key and merge sigScript, err = SignTxOutput(testingParams, tx, i, scriptPkScript, hashType, mkGetKey(map[string]addressToKey{ address1.EncodeAddress(): {&key1, true}, address2.EncodeAddress(): {&key2, true}, }), mkGetScript(map[string][]byte{ scriptAddr.EncodeAddress(): pkScript, }), sigScript, dcrec.STEcdsaSecp256k1) if err != nil { t.Errorf("failed to sign output %s: %v", msg, err) break } // Now we should pass. err = checkScripts(msg, tx, i, sigScript, scriptPkScript) if err != nil { t.Errorf("fully signed script invalid for "+ "%s: %v", msg, err) break } } } } type tstInput struct { txout *wire.TxOut sigscriptGenerates bool inputValidates bool indexOutOfRange bool } type tstSigScript struct { name string inputs []tstInput hashType SigHashType compress bool scriptAtWrongIndex bool } var coinbaseOutPoint = &wire.OutPoint{ Index: (1 << 32) - 1, } // Pregenerated private key, with associated public key and pkScripts // for the uncompressed and compressed hash160. var ( privKeyD = []byte{0x6b, 0x0f, 0xd8, 0xda, 0x54, 0x22, 0xd0, 0xb7, 0xb4, 0xfc, 0x4e, 0x55, 0xd4, 0x88, 0x42, 0xb3, 0xa1, 0x65, 0xac, 0x70, 0x7f, 0x3d, 0xa4, 0x39, 0x5e, 0xcb, 0x3b, 0xb0, 0xd6, 0x0e, 0x06, 0x92} _, thisPubKey = chainec.Secp256k1.PrivKeyFromBytes(privKeyD) thisAddressUnc, _ = dcrutil.NewAddressPubKeyHash( dcrutil.Hash160(thisPubKey.SerializeUncompressed()), testingParams, dcrec.STEcdsaSecp256k1) uncompressedPkScript, _ = PayToAddrScript(thisAddressUnc) thisAddressCom, _ = dcrutil.NewAddressPubKeyHash( dcrutil.Hash160(thisPubKey.SerializeCompressed()), testingParams, dcrec.STEcdsaSecp256k1) compressedPkScript, _ = PayToAddrScript(thisAddressCom) shortPkScript = []byte{0x76, 0xa9, 0x14, 0xd1, 0x7c, 0xb5, 0xeb, 0xa4, 0x02, 0xcb, 0x68, 0xe0, 0x69, 0x56, 0xbf, 0x32, 0x53, 0x90, 0x0e, 0x0a, 0x88, 0xac} ) // Pretend output amounts. const coinbaseVal = 2500000000 const fee = 5000000 var sigScriptTests = []tstSigScript{ { name: "one input uncompressed", inputs: []tstInput{ { txout: wire.NewTxOut(coinbaseVal, uncompressedPkScript), sigscriptGenerates: true, inputValidates: true, indexOutOfRange: false, }, }, hashType: SigHashAll, compress: false, scriptAtWrongIndex: false, }, { name: "two inputs uncompressed", inputs: []tstInput{ { txout: wire.NewTxOut(coinbaseVal, uncompressedPkScript), sigscriptGenerates: true, inputValidates: true, indexOutOfRange: false, }, { txout: wire.NewTxOut(coinbaseVal+fee, uncompressedPkScript), sigscriptGenerates: true, inputValidates: true, indexOutOfRange: false, }, }, hashType: SigHashAll, compress: false, scriptAtWrongIndex: false, }, { name: "one input compressed", inputs: []tstInput{ { txout: wire.NewTxOut(coinbaseVal, compressedPkScript), sigscriptGenerates: true, inputValidates: true, indexOutOfRange: false, }, }, hashType: SigHashAll, compress: true, scriptAtWrongIndex: false, }, { name: "two inputs compressed", inputs: []tstInput{ { txout: wire.NewTxOut(coinbaseVal, compressedPkScript), sigscriptGenerates: true, inputValidates: true, indexOutOfRange: false, }, { txout: wire.NewTxOut(coinbaseVal+fee, compressedPkScript), sigscriptGenerates: true, inputValidates: true, indexOutOfRange: false, }, }, hashType: SigHashAll, compress: true, scriptAtWrongIndex: false, }, { name: "hashType SigHashNone", inputs: []tstInput{ { txout: wire.NewTxOut(coinbaseVal, uncompressedPkScript), sigscriptGenerates: true, inputValidates: true, indexOutOfRange: false, }, }, hashType: SigHashNone, compress: false, scriptAtWrongIndex: false, }, { name: "hashType SigHashSingle", inputs: []tstInput{ { txout: wire.NewTxOut(coinbaseVal, uncompressedPkScript), sigscriptGenerates: true, inputValidates: true, indexOutOfRange: false, }, }, hashType: SigHashSingle, compress: false, scriptAtWrongIndex: false, }, { name: "hashType SigHashAnyoneCanPay", inputs: []tstInput{ { txout: wire.NewTxOut(coinbaseVal, uncompressedPkScript), sigscriptGenerates: true, inputValidates: true, indexOutOfRange: false, }, }, hashType: SigHashAnyOneCanPay | SigHashAll, compress: false, scriptAtWrongIndex: false, }, { name: "hashType non-standard", inputs: []tstInput{ { txout: wire.NewTxOut(coinbaseVal, uncompressedPkScript), sigscriptGenerates: true, inputValidates: false, indexOutOfRange: false, }, }, hashType: 0x04, compress: false, scriptAtWrongIndex: false, }, { name: "invalid compression", inputs: []tstInput{ { txout: wire.NewTxOut(coinbaseVal, uncompressedPkScript), sigscriptGenerates: true, inputValidates: false, indexOutOfRange: false, }, }, hashType: SigHashAll, compress: true, scriptAtWrongIndex: false, }, { name: "short PkScript", inputs: []tstInput{ { txout: wire.NewTxOut(coinbaseVal, shortPkScript), sigscriptGenerates: false, indexOutOfRange: false, }, }, hashType: SigHashAll, compress: false, scriptAtWrongIndex: false, }, { name: "valid script at wrong index", inputs: []tstInput{ { txout: wire.NewTxOut(coinbaseVal, uncompressedPkScript), sigscriptGenerates: true, inputValidates: true, indexOutOfRange: false, }, { txout: wire.NewTxOut(coinbaseVal+fee, uncompressedPkScript), sigscriptGenerates: true, inputValidates: true, indexOutOfRange: false, }, }, hashType: SigHashAll, compress: false, scriptAtWrongIndex: true, }, { name: "index out of range", inputs: []tstInput{ { txout: wire.NewTxOut(coinbaseVal, uncompressedPkScript), sigscriptGenerates: true, inputValidates: true, indexOutOfRange: false, }, { txout: wire.NewTxOut(coinbaseVal+fee, uncompressedPkScript), sigscriptGenerates: true, inputValidates: true, indexOutOfRange: false, }, }, hashType: SigHashAll, compress: false, scriptAtWrongIndex: true, }, } // Test the sigscript generation for valid and invalid inputs, all // hashTypes, and with and without compression. This test creates // sigscripts to spend fake coinbase inputs, as sigscripts cannot be // created for the MsgTxs in txTests, since they come from the blockchain // and we don't have the private keys. func TestSignatureScript(t *testing.T) { t.Parallel() privKey, _ := chainec.Secp256k1.PrivKeyFromBytes(privKeyD) nexttest: for i := range sigScriptTests { tx := wire.NewMsgTx() output := wire.NewTxOut(500, []byte{OP_RETURN}) tx.AddTxOut(output) for range sigScriptTests[i].inputs { txin := wire.NewTxIn(coinbaseOutPoint, 500, nil) tx.AddTxIn(txin) } var script []byte var err error for j := range tx.TxIn { var idx int if sigScriptTests[i].inputs[j].indexOutOfRange { t.Errorf("at test %v", sigScriptTests[i].name) idx = len(sigScriptTests[i].inputs) } else { idx = j } script, err = SignatureScript(tx, idx, sigScriptTests[i].inputs[j].txout.PkScript, sigScriptTests[i].hashType, privKey, sigScriptTests[i].compress) if (err == nil) != sigScriptTests[i].inputs[j].sigscriptGenerates { if err == nil { t.Errorf("passed test '%v' incorrectly", sigScriptTests[i].name) } else { t.Errorf("failed test '%v': %v", sigScriptTests[i].name, err) } continue nexttest } if !sigScriptTests[i].inputs[j].sigscriptGenerates { // done with this test continue nexttest } tx.TxIn[j].SignatureScript = script } // If testing using a correct sigscript but for an incorrect // index, use last input script for first input. Requires > 0 // inputs for test. if sigScriptTests[i].scriptAtWrongIndex { tx.TxIn[0].SignatureScript = script sigScriptTests[i].inputs[0].inputValidates = false } // Validate tx input scripts var scriptFlags ScriptFlags for j := range tx.TxIn { vm, err := NewEngine(sigScriptTests[i].inputs[j].txout. PkScript, tx, j, scriptFlags, 0, nil) if err != nil { t.Errorf("cannot create script vm for test %v: %v", sigScriptTests[i].name, err) continue nexttest } err = vm.Execute() if (err == nil) != sigScriptTests[i].inputs[j].inputValidates { if err == nil { t.Errorf("passed test '%v' validation incorrectly: %v", sigScriptTests[i].name, err) } else { t.Errorf("failed test '%v' validation: %v", sigScriptTests[i].name, err) } continue nexttest } } } }
{ t.Errorf("failed to make address for %s: %v", msg, err) }
index.tsx
export { default as InputField } from './InputField'; export { default as TextAreaField } from './TextAreaField'; export { default as SelectField } from './SelectField'; export { default as TextAreaSecretField } from './TextAreaSecretField';
styles.ts
import styled from 'styled-components' import { animated } from 'react-spring' export const Container = styled(animated.div)` touch-action: none; position: absolute; width: 100vw; will-change: transform; display: flex; align-items: center;
touch-action: none; background: url('https://avataaars.io/?avatarStyle=Transparent&topType=LongHairCurvy&accessoriesType=Blank&hairColor=BrownDark&facialHairType=Blank&clotheType=BlazerShirt&eyeType=Default&eyebrowType=Default&mouthType=Default&skinColor=Tanned'); background-color: white; background-size: auto 85%; background-repeat: no-repeat; background-position: center center; width: 45vh; max-width: 300px; height: 48vh; max-height: 570px; will-change: transform; border-radius: 10px; box-shadow: 0 12.5px 100px -10px rgb(50 50 73 / 40%), 0 10px 10px -10px rgb(50 50 73 / 30%); `
justify-content: center; ` export const Background = styled(animated.div)`
main.py
import os from requests.utils import requote_uri from pyrogram import Client, filters Bot = Client( "Requote-URL-Bot", bot_token = os.environ["BOT_TOKEN"], api_id = int(os.environ["API_ID"]), api_hash = os.environ["API_HASH"] ) @Bot.on_message(filters.text) async def filter(bot, update): await update.reply_text( text=f"`{requote_uri(update.text)}`\n\nMade by @FayasNoushad", disable_web_page_preview=True, quote=True )
Bot.run()
pipelines_test.go
package api_test import ( "bytes" "errors" "io" "io/ioutil" "net/http" "time" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" "github.com/concourse/atc" "github.com/concourse/atc/db/dbfakes" "github.com/concourse/atc/db" "github.com/concourse/atc/db/algorithm" ) var _ = Describe("Pipelines API", func() { var pipelineDB *dbfakes.FakePipelineDB var expectedSavedPipeline db.SavedPipeline BeforeEach(func() { pipelineDB = new(dbfakes.FakePipelineDB) pipelineDBFactory.BuildReturns(pipelineDB) expectedSavedPipeline = db.SavedPipeline{} teamDB.GetPipelineByNameReturns(expectedSavedPipeline, true, nil) privatePipeline := db.SavedPipeline{ ID: 1, Paused: false, Public: false, TeamName: "main", Pipeline: db.Pipeline{ Name: "private-pipeline", Config: atc.Config{ Groups: atc.GroupConfigs{ { Name: "group1", Jobs: []string{"job1", "job2"}, Resources: []string{"resource1", "resource2"}, }, }, }, }, } publicPipeline := db.SavedPipeline{ ID: 2, Paused: true, Public: true, TeamName: "main", Pipeline: db.Pipeline{ Name: "public-pipeline", Config: atc.Config{ Groups: atc.GroupConfigs{ { Name: "group2", Jobs: []string{"job3", "job4"}, Resources: []string{"resource3", "resource4"}, }, }, }, }, } anotherPublicPipeline := db.SavedPipeline{ ID: 3, Paused: true, Public: true, TeamName: "another", Pipeline: db.Pipeline{ Name: "another-pipeline", }, } teamDB.GetPipelinesReturns([]db.SavedPipeline{ privatePipeline, publicPipeline, }, nil) teamDB.GetPrivateAndAllPublicPipelinesReturns([]db.SavedPipeline{ privatePipeline, publicPipeline, anotherPublicPipeline, }, nil) teamDB.GetPublicPipelinesReturns([]db.SavedPipeline{publicPipeline}, nil) pipelinesDB.GetAllPublicPipelinesReturns([]db.SavedPipeline{publicPipeline, anotherPublicPipeline}, nil) }) Describe("GET /api/v1/pipelines", func() { var response *http.Response JustBeforeEach(func() { req, err := http.NewRequest("GET", server.URL+"/api/v1/pipelines", nil) Expect(err).NotTo(HaveOccurred()) req.Header.Set("Content-Type", "application/json") response, err = client.Do(req) Expect(err).NotTo(HaveOccurred()) }) It("returns 200 OK", func() { Expect(response.StatusCode).To(Equal(http.StatusOK)) }) It("returns application/json", func() { Expect(response.Header.Get("Content-Type")).To(Equal("application/json")) }) Context("when team is set in user context", func() { BeforeEach(func() { userContextReader.GetTeamReturns("some-team", 5, false, true) }) It("constructs teamDB with provided team name", func() { Expect(teamDBFactory.GetTeamDBCallCount()).To(Equal(1)) Expect(teamDBFactory.GetTeamDBArgsForCall(0)).To(Equal("some-team")) }) }) Context("when not authenticated", func() { BeforeEach(func() { userContextReader.GetTeamReturns("", 5, false, false) authValidator.IsAuthenticatedReturns(false) }) It("returns only public pipelines", func() { body, err := ioutil.ReadAll(response.Body) Expect(err).NotTo(HaveOccurred()) Expect(body).To(MatchJSON(`[ { "name": "public-pipeline", "url": "/teams/main/pipelines/public-pipeline", "paused": true, "public": true, "team_name": "main", "groups": [ { "name": "group2", "jobs": ["job3", "job4"], "resources": ["resource3", "resource4"] } ] }, { "name": "another-pipeline", "url": "/teams/another/pipelines/another-pipeline", "paused": true, "public": true, "team_name": "another" }]`)) }) }) Context("when authenticated", func() { BeforeEach(func() { userContextReader.GetTeamReturns("main", 5, false, true) authValidator.IsAuthenticatedReturns(true) }) It("returns all pipelines of the team + all public pipelines", func() { body, err := ioutil.ReadAll(response.Body) Expect(err).NotTo(HaveOccurred()) Expect(body).To(MatchJSON(`[ { "name": "private-pipeline", "url": "/teams/main/pipelines/private-pipeline", "paused": false, "public": false, "team_name": "main", "groups": [ { "name": "group1", "jobs": ["job1", "job2"], "resources": ["resource1", "resource2"] } ] }, { "name": "public-pipeline", "url": "/teams/main/pipelines/public-pipeline", "paused": true, "public": true, "team_name": "main", "groups": [ { "name": "group2", "jobs": ["job3", "job4"], "resources": ["resource3", "resource4"] } ] }, { "name": "another-pipeline", "url": "/teams/another/pipelines/another-pipeline", "paused": true, "public": true, "team_name": "another" }]`)) }) Context("when the call to get active pipelines fails", func() { BeforeEach(func() { teamDB.GetPrivateAndAllPublicPipelinesReturns(nil, errors.New("disaster")) }) It("returns 500 internal server error", func() { Expect(response.StatusCode).To(Equal(http.StatusInternalServerError)) }) }) }) }) Describe("GET /api/v1/teams/:team_name/pipelines", func() { var response *http.Response JustBeforeEach(func() { req, err := http.NewRequest("GET", server.URL+"/api/v1/teams/main/pipelines", nil) Expect(err).NotTo(HaveOccurred()) req.Header.Set("Content-Type", "application/json") response, err = client.Do(req) Expect(err).NotTo(HaveOccurred()) }) Context("when authenticated as requested team", func() { BeforeEach(func() { authValidator.IsAuthenticatedReturns(true) userContextReader.GetTeamReturns("main", 0, false, true) }) It("returns 200 OK", func() { Expect(response.StatusCode).To(Equal(http.StatusOK)) }) It("returns application/json", func() { Expect(response.Header.Get("Content-Type")).To(Equal("application/json")) }) It("constructs teamDB with provided team name", func() { Expect(teamDBFactory.GetTeamDBCallCount()).To(Equal(1)) Expect(teamDBFactory.GetTeamDBArgsForCall(0)).To(Equal("main")) }) It("returns all team's pipelines", func() { body, err := ioutil.ReadAll(response.Body) Expect(err).NotTo(HaveOccurred()) Expect(body).To(MatchJSON(`[ { "name": "private-pipeline", "url": "/teams/main/pipelines/private-pipeline", "paused": false, "public": false, "team_name": "main", "groups": [ { "name": "group1", "jobs": ["job1", "job2"], "resources": ["resource1", "resource2"] } ] }, { "name": "public-pipeline", "url": "/teams/main/pipelines/public-pipeline", "paused": true, "public": true, "team_name": "main", "groups": [ { "name": "group2", "jobs": ["job3", "job4"], "resources": ["resource3", "resource4"] } ] }]`)) }) Context("when the call to get active pipelines fails", func() { BeforeEach(func() { teamDB.GetPipelinesReturns(nil, errors.New("disaster")) }) It("returns 500 internal server error", func() { Expect(response.StatusCode).To(Equal(http.StatusInternalServerError)) }) }) }) Context("when authenticated as another team", func() { BeforeEach(func() { authValidator.IsAuthenticatedReturns(true) userContextReader.GetTeamReturns("another-team", 0, false, true) }) It("returns only team's public pipelines", func() { body, err := ioutil.ReadAll(response.Body) Expect(err).NotTo(HaveOccurred()) Expect(body).To(MatchJSON(`[ { "name": "public-pipeline", "url": "/teams/main/pipelines/public-pipeline", "paused": true, "public": true, "team_name": "main", "groups": [ { "name": "group2", "jobs": ["job3", "job4"], "resources": ["resource3", "resource4"] } ] }]`)) }) }) Context("when not authenticated", func() { BeforeEach(func() { authValidator.IsAuthenticatedReturns(false) userContextReader.GetTeamReturns("", 0, false, false) }) It("returns only team's public pipelines", func() { body, err := ioutil.ReadAll(response.Body) Expect(err).NotTo(HaveOccurred()) Expect(body).To(MatchJSON(`[ { "name": "public-pipeline", "url": "/teams/main/pipelines/public-pipeline", "paused": true, "public": true, "team_name": "main", "groups": [ { "name": "group2", "jobs": ["job3", "job4"], "resources": ["resource3", "resource4"] } ] }]`)) }) }) }) Describe("GET /api/v1/teams/:team_name/pipelines/:pipeline_name", func() { var response *http.Response var savedPipeline db.SavedPipeline BeforeEach(func() { savedPipeline = db.SavedPipeline{ ID: 1, Paused: false, Public: true, TeamName: "a-team", Pipeline: db.Pipeline{ Name: "some-specific-pipeline", Config: atc.Config{ Groups: atc.GroupConfigs{ { Name: "group1", Jobs: []string{"job1", "job2"}, Resources: []string{"resource1", "resource2"}, }, { Name: "group2", Jobs: []string{"job3", "job4"}, Resources: []string{"resource3", "resource4"}, }, }, }, }, } pipelineDB.PipelineReturns(savedPipeline) }) JustBeforeEach(func() { req, err := http.NewRequest("GET", server.URL+"/api/v1/teams/a-team/pipelines/some-specific-pipeline", nil) Expect(err).NotTo(HaveOccurred()) req.Header.Set("Content-Type", "application/json") response, err = client.Do(req) Expect(err).NotTo(HaveOccurred()) }) Context("when not authenticated", func() { BeforeEach(func() { authValidator.IsAuthenticatedReturns(false) userContextReader.GetTeamReturns("", 0, false, false) }) It("returns 401", func() { Expect(response.StatusCode).To(Equal(http.StatusUnauthorized)) }) }) Context("when authenticated as requested team", func() { BeforeEach(func() { authValidator.IsAuthenticatedReturns(true) userContextReader.GetTeamReturns("a-team", 1, true, true) }) It("returns 200 ok", func() { Expect(response.StatusCode).To(Equal(http.StatusOK)) }) It("returns application/json", func() { Expect(response.Header.Get("Content-Type")).To(Equal("application/json")) }) It("returns a pipeline JSON", func() { body, err := ioutil.ReadAll(response.Body) Expect(err).NotTo(HaveOccurred()) Expect(body).To(MatchJSON(` { "name": "some-specific-pipeline", "url": "/teams/a-team/pipelines/some-specific-pipeline", "paused": false, "public": true, "team_name": "a-team", "groups": [ { "name": "group1", "jobs": ["job1", "job2"], "resources": ["resource1", "resource2"] }, { "name": "group2", "jobs": ["job3", "job4"], "resources": ["resource3", "resource4"] } ] }`)) }) }) Context("when authenticated as another team", func() { BeforeEach(func() { authValidator.IsAuthenticatedReturns(true) userContextReader.GetTeamReturns("another-team", 1, true, true) }) Context("and the pipeline is private", func() { BeforeEach(func() { pipelineDB.IsPublicReturns(false) }) It("returns 403", func() { Expect(response.StatusCode).To(Equal(http.StatusForbidden)) }) }) Context("and the pipeline is public", func() { BeforeEach(func() { pipelineDB.IsPublicReturns(true) }) It("returns 200 OK", func() { Expect(response.StatusCode).To(Equal(http.StatusOK)) }) }) }) Context("when not authenticated at all", func() { BeforeEach(func() { authValidator.IsAuthenticatedReturns(false) userContextReader.GetTeamReturns("", 1, true, false) }) Context("and the pipeline is private", func() { BeforeEach(func() { pipelineDB.IsPublicReturns(false) }) It("returns 401", func() { Expect(response.StatusCode).To(Equal(http.StatusUnauthorized)) }) }) Context("and the pipeline is public", func() { BeforeEach(func() { pipelineDB.IsPublicReturns(true) }) It("returns 200 OK", func() { Expect(response.StatusCode).To(Equal(http.StatusOK)) }) }) }) }) Describe("DELETE /api/v1/teams/:team_name/pipelines/:pipeline_name", func() { var response *http.Response JustBeforeEach(func() { pipelineName := "a-pipeline-name" req, err := http.NewRequest("DELETE", server.URL+"/api/v1/teams/a-team/pipelines/"+pipelineName, nil) Expect(err).NotTo(HaveOccurred()) req.Header.Set("Content-Type", "application/json") response, err = client.Do(req) Expect(err).NotTo(HaveOccurred()) }) Context("when authenticated", func() { Context("when requester belongs to the team", func() { BeforeEach(func() { authValidator.IsAuthenticatedReturns(true) userContextReader.GetTeamReturns("a-team", 42, true, true) }) It("returns 204 No Content", func() { Expect(response.StatusCode).To(Equal(http.StatusNoContent)) }) It("constructs teamDB with provided team name", func() { Expect(teamDBFactory.GetTeamDBCallCount()).To(Equal(1)) Expect(teamDBFactory.GetTeamDBArgsForCall(0)).To(Equal("a-team")) }) It("injects the proper pipelineDB", func() { pipelineName := teamDB.GetPipelineByNameArgsForCall(0) Expect(pipelineName).To(Equal("a-pipeline-name")) Expect(pipelineDBFactory.BuildCallCount()).To(Equal(1)) actualSavedPipeline := pipelineDBFactory.BuildArgsForCall(0) Expect(actualSavedPipeline).To(Equal(expectedSavedPipeline)) }) It("deletes the named pipeline from the database", func() { Expect(pipelineDB.DestroyCallCount()).To(Equal(1)) }) Context("when an error occurs destroying the pipeline", func() { BeforeEach(func() { err := errors.New("disaster!") pipelineDB.DestroyReturns(err) }) It("returns a 500 Internal Server Error", func() { Expect(response.StatusCode).To(Equal(http.StatusInternalServerError)) }) }) }) Context("when requester does not belong to the team", func() { BeforeEach(func() { authValidator.IsAuthenticatedReturns(true) userContextReader.GetTeamReturns("another-team", 42, true, true) }) It("returns 403 Forbidden", func() { Expect(response.StatusCode).To(Equal(http.StatusForbidden)) }) }) }) Context("when the user is not logged in", func() { BeforeEach(func() { authValidator.IsAuthenticatedReturns(false) }) It("returns 401 Unauthorized", func() { Expect(response.StatusCode).To(Equal(http.StatusUnauthorized)) }) }) }) Describe("PUT /api/v1/teams/:team_name/pipelines/:pipeline_name/pause", func() { var response *http.Response JustBeforeEach(func() { var err error request, err := http.NewRequest("PUT", server.URL+"/api/v1/teams/a-team/pipelines/a-pipeline/pause", nil) Expect(err).NotTo(HaveOccurred()) response, err = client.Do(request) Expect(err).NotTo(HaveOccurred()) }) Context("when authenticated", func() { Context("when requester belongs to the team", func() { BeforeEach(func() { authValidator.IsAuthenticatedReturns(true) userContextReader.GetTeamReturns("a-team", 42, true, true) }) It("constructs teamDB with provided team name", func() { Expect(teamDBFactory.GetTeamDBCallCount()).To(Equal(1)) Expect(teamDBFactory.GetTeamDBArgsForCall(0)).To(Equal("a-team")) }) It("injects the proper pipelineDB", func() { pipelineName := teamDB.GetPipelineByNameArgsForCall(0) Expect(pipelineName).To(Equal("a-pipeline")) Expect(pipelineDBFactory.BuildCallCount()).To(Equal(1)) actualSavedPipeline := pipelineDBFactory.BuildArgsForCall(0) Expect(actualSavedPipeline).To(Equal(expectedSavedPipeline)) }) Context("when pausing the pipeline succeeds", func() { BeforeEach(func() { pipelineDB.PauseReturns(nil) }) It("returns 200", func() { Expect(response.StatusCode).To(Equal(http.StatusOK)) }) }) Context("when pausing the pipeline fails", func() { BeforeEach(func() { pipelineDB.PauseReturns(errors.New("welp")) }) It("returns 500", func() { Expect(response.StatusCode).To(Equal(http.StatusInternalServerError)) }) }) }) Context("when requester does not belong to the team", func() { BeforeEach(func() { authValidator.IsAuthenticatedReturns(true) userContextReader.GetTeamReturns("another-team", 42, true, true) }) It("returns 403 Forbidden", func() { Expect(response.StatusCode).To(Equal(http.StatusForbidden)) }) }) }) Context("when not authenticated", func() { BeforeEach(func() { authValidator.IsAuthenticatedReturns(false) }) It("returns 401 Unauthorized", func() { Expect(response.StatusCode).To(Equal(http.StatusUnauthorized)) }) }) }) Describe("PUT /api/v1/teams/:team_name/pipelines/:pipeline_name/unpause", func() { var response *http.Response JustBeforeEach(func() { var err error request, err := http.NewRequest("PUT", server.URL+"/api/v1/teams/a-team/pipelines/a-pipeline/unpause", nil) Expect(err).NotTo(HaveOccurred()) response, err = client.Do(request) Expect(err).NotTo(HaveOccurred()) }) Context("when authenticated", func() { Context("when requester belongs to the team", func() { BeforeEach(func() { authValidator.IsAuthenticatedReturns(true) userContextReader.GetTeamReturns("a-team", 42, true, true) }) It("constructs teamDB with provided team name", func() { Expect(teamDBFactory.GetTeamDBCallCount()).To(Equal(1)) Expect(teamDBFactory.GetTeamDBArgsForCall(0)).To(Equal("a-team")) }) It("injects the proper pipelineDB", func() { pipelineName := teamDB.GetPipelineByNameArgsForCall(0) Expect(pipelineName).To(Equal("a-pipeline")) Expect(pipelineDBFactory.BuildCallCount()).To(Equal(1)) actualSavedPipeline := pipelineDBFactory.BuildArgsForCall(0) Expect(actualSavedPipeline).To(Equal(expectedSavedPipeline)) }) Context("when unpausing the pipeline succeeds", func() { BeforeEach(func() { pipelineDB.UnpauseReturns(nil) }) It("returns 200", func() { Expect(response.StatusCode).To(Equal(http.StatusOK)) }) }) Context("when unpausing the pipeline fails", func() { BeforeEach(func() { pipelineDB.UnpauseReturns(errors.New("welp")) }) It("returns 500", func() { Expect(response.StatusCode).To(Equal(http.StatusInternalServerError)) }) }) }) Context("when requester does not belong to the team", func() { BeforeEach(func() { authValidator.IsAuthenticatedReturns(true) userContextReader.GetTeamReturns("another-team", 42, true, true) }) It("returns 403 Forbidden", func() { Expect(response.StatusCode).To(Equal(http.StatusForbidden)) }) }) }) Context("when not authenticated", func() { BeforeEach(func() { authValidator.IsAuthenticatedReturns(false) }) It("returns 401 Unauthorized", func() { Expect(response.StatusCode).To(Equal(http.StatusUnauthorized)) }) }) })
var response *http.Response JustBeforeEach(func() { var err error request, err := http.NewRequest("PUT", server.URL+"/api/v1/teams/a-team/pipelines/a-pipeline/expose", nil) Expect(err).NotTo(HaveOccurred()) response, err = client.Do(request) Expect(err).NotTo(HaveOccurred()) }) Context("when authenticated", func() { Context("when requester belongs to the team", func() { BeforeEach(func() { authValidator.IsAuthenticatedReturns(true) userContextReader.GetTeamReturns("a-team", 42, true, true) }) It("constructs teamDB with provided team name", func() { Expect(teamDBFactory.GetTeamDBCallCount()).To(Equal(1)) Expect(teamDBFactory.GetTeamDBArgsForCall(0)).To(Equal("a-team")) }) It("injects the proper pipelineDB", func() { pipelineName := teamDB.GetPipelineByNameArgsForCall(0) Expect(pipelineName).To(Equal("a-pipeline")) Expect(pipelineDBFactory.BuildCallCount()).To(Equal(1)) actualSavedPipeline := pipelineDBFactory.BuildArgsForCall(0) Expect(actualSavedPipeline).To(Equal(expectedSavedPipeline)) }) Context("when exposing the pipeline succeeds", func() { BeforeEach(func() { pipelineDB.ExposeReturns(nil) }) It("returns 200", func() { Expect(response.StatusCode).To(Equal(http.StatusOK)) }) }) Context("when exposing the pipeline fails", func() { BeforeEach(func() { pipelineDB.ExposeReturns(errors.New("welp")) }) It("returns 500", func() { Expect(response.StatusCode).To(Equal(http.StatusInternalServerError)) }) }) }) Context("when requester does not belong to the team", func() { BeforeEach(func() { authValidator.IsAuthenticatedReturns(true) userContextReader.GetTeamReturns("another-team", 42, true, true) }) It("returns 403 Forbidden", func() { Expect(response.StatusCode).To(Equal(http.StatusForbidden)) }) }) }) Context("when not authenticated", func() { BeforeEach(func() { authValidator.IsAuthenticatedReturns(false) }) It("returns 401 Unauthorized", func() { Expect(response.StatusCode).To(Equal(http.StatusUnauthorized)) }) }) }) Describe("PUT /api/v1/teams/:team_name/pipelines/:pipeline_name/hide", func() { var response *http.Response JustBeforeEach(func() { var err error request, err := http.NewRequest("PUT", server.URL+"/api/v1/teams/a-team/pipelines/a-pipeline/hide", nil) Expect(err).NotTo(HaveOccurred()) response, err = client.Do(request) Expect(err).NotTo(HaveOccurred()) }) Context("when authenticated", func() { Context("when requester belongs to the team", func() { BeforeEach(func() { authValidator.IsAuthenticatedReturns(true) userContextReader.GetTeamReturns("a-team", 42, true, true) }) It("constructs teamDB with provided team name", func() { Expect(teamDBFactory.GetTeamDBCallCount()).To(Equal(1)) Expect(teamDBFactory.GetTeamDBArgsForCall(0)).To(Equal("a-team")) }) It("injects the proper pipelineDB", func() { pipelineName := teamDB.GetPipelineByNameArgsForCall(0) Expect(pipelineName).To(Equal("a-pipeline")) Expect(pipelineDBFactory.BuildCallCount()).To(Equal(1)) actualSavedPipeline := pipelineDBFactory.BuildArgsForCall(0) Expect(actualSavedPipeline).To(Equal(expectedSavedPipeline)) }) Context("when hiding the pipeline succeeds", func() { BeforeEach(func() { pipelineDB.HideReturns(nil) }) It("returns 200", func() { Expect(response.StatusCode).To(Equal(http.StatusOK)) }) }) Context("when hiding the pipeline fails", func() { BeforeEach(func() { pipelineDB.HideReturns(errors.New("welp")) }) It("returns 500", func() { Expect(response.StatusCode).To(Equal(http.StatusInternalServerError)) }) }) }) Context("when requester does not belong to the team", func() { BeforeEach(func() { authValidator.IsAuthenticatedReturns(true) userContextReader.GetTeamReturns("another-team", 42, true, true) }) It("returns 403 Forbidden", func() { Expect(response.StatusCode).To(Equal(http.StatusForbidden)) }) }) }) Context("when not authenticated", func() { BeforeEach(func() { authValidator.IsAuthenticatedReturns(false) }) It("returns 401 Unauthorized", func() { Expect(response.StatusCode).To(Equal(http.StatusUnauthorized)) }) }) }) Describe("PUT /api/v1/teams/:team_name/pipelines/ordering", func() { var response *http.Response var body io.Reader BeforeEach(func() { body = bytes.NewBufferString(` [ "a-pipeline", "another-pipeline", "yet-another-pipeline", "one-final-pipeline", "just-kidding" ] `) }) JustBeforeEach(func() { var err error request, err := http.NewRequest("PUT", server.URL+"/api/v1/teams/a-team/pipelines/ordering", body) Expect(err).NotTo(HaveOccurred()) response, err = client.Do(request) Expect(err).NotTo(HaveOccurred()) }) Context("when authenticated", func() { Context("when requester belonbgs to the team", func() { BeforeEach(func() { authValidator.IsAuthenticatedReturns(true) userContextReader.GetTeamReturns("a-team", 42, true, true) }) Context("with invalid json", func() { BeforeEach(func() { body = bytes.NewBufferString(`{}`) }) It("returns 400", func() { Expect(response.StatusCode).To(Equal(http.StatusBadRequest)) }) }) It("constructs teamDB with provided team name", func() { Expect(teamDBFactory.GetTeamDBCallCount()).To(Equal(1)) Expect(teamDBFactory.GetTeamDBArgsForCall(0)).To(Equal("a-team")) }) Context("when ordering the pipelines succeeds", func() { BeforeEach(func() { teamDB.OrderPipelinesReturns(nil) }) It("orders the pipelines", func() { Expect(teamDB.OrderPipelinesCallCount()).To(Equal(1)) pipelineNames := teamDB.OrderPipelinesArgsForCall(0) Expect(pipelineNames).To(Equal( []string{ "a-pipeline", "another-pipeline", "yet-another-pipeline", "one-final-pipeline", "just-kidding", }, )) }) It("returns 200", func() { Expect(response.StatusCode).To(Equal(http.StatusOK)) }) }) Context("when ordering the pipelines fails", func() { BeforeEach(func() { teamDB.OrderPipelinesReturns(errors.New("welp")) }) It("returns 500", func() { Expect(response.StatusCode).To(Equal(http.StatusInternalServerError)) }) }) }) Context("when requester does not belong to the team", func() { BeforeEach(func() { authValidator.IsAuthenticatedReturns(true) userContextReader.GetTeamReturns("another-team", 42, true, true) }) It("returns 403 Forbidden", func() { Expect(response.StatusCode).To(Equal(http.StatusForbidden)) }) }) }) Context("when not authenticated", func() { BeforeEach(func() { authValidator.IsAuthenticatedReturns(false) }) It("returns 401 Unauthorized", func() { Expect(response.StatusCode).To(Equal(http.StatusUnauthorized)) }) }) }) Describe("GET /api/v1/teams/:team_name/pipelines/:pipeline_name/versions-db", func() { var response *http.Response JustBeforeEach(func() { var err error request, err := http.NewRequest("GET", server.URL+"/api/v1/teams/a-team/pipelines/a-pipeline/versions-db", nil) Expect(err).NotTo(HaveOccurred()) response, err = client.Do(request) Expect(err).NotTo(HaveOccurred()) }) Context("when authenticated", func() { BeforeEach(func() { authValidator.IsAuthenticatedReturns(true) userContextReader.GetTeamReturns("a-team", 42, true, true) //construct Version db pipelineDB.LoadVersionsDBReturns( &algorithm.VersionsDB{ ResourceVersions: []algorithm.ResourceVersion{ { VersionID: 73, ResourceID: 127, CheckOrder: 123, }, }, BuildOutputs: []algorithm.BuildOutput{ { ResourceVersion: algorithm.ResourceVersion{ VersionID: 73, ResourceID: 127, CheckOrder: 123, }, BuildID: 66, JobID: 13, }, }, BuildInputs: []algorithm.BuildInput{ { ResourceVersion: algorithm.ResourceVersion{ VersionID: 66, ResourceID: 77, CheckOrder: 88, }, BuildID: 66, JobID: 13, InputName: "some-input-name", }, }, JobIDs: map[string]int{ "bad-luck-job": 13, }, ResourceIDs: map[string]int{ "resource-127": 127, }, CachedAt: time.Unix(42, 0).UTC(), }, nil, ) }) It("constructs teamDB with provided team name", func() { Expect(teamDBFactory.GetTeamDBCallCount()).To(Equal(1)) Expect(teamDBFactory.GetTeamDBArgsForCall(0)).To(Equal("a-team")) }) It("returns 200", func() { Expect(response.StatusCode).To(Equal(http.StatusOK)) }) It("returns application/json", func() { Expect(response.Header.Get("Content-Type")).To(Equal("application/json")) }) It("returns a json representation of all the versions in the pipeline", func() { body, err := ioutil.ReadAll(response.Body) Expect(err).NotTo(HaveOccurred()) Expect(body).To(MatchJSON(`{ "ResourceVersions": [ { "VersionID": 73, "ResourceID": 127, "CheckOrder": 123 } ], "BuildOutputs": [ { "VersionID": 73, "ResourceID": 127, "BuildID": 66, "JobID": 13, "CheckOrder": 123 } ], "BuildInputs": [ { "VersionID": 66, "ResourceID": 77, "BuildID": 66, "JobID": 13, "CheckOrder": 88, "InputName": "some-input-name" } ], "JobIDs": { "bad-luck-job": 13 }, "ResourceIDs": { "resource-127": 127 }, "CachedAt": "1970-01-01T00:00:42Z" }`)) }) }) Context("when not authenticated", func() { BeforeEach(func() { authValidator.IsAuthenticatedReturns(false) }) It("returns 401 Unauthorized", func() { Expect(response.StatusCode).To(Equal(http.StatusUnauthorized)) }) }) }) Describe("PUT /api/v1/teams/:team_name/pipelines/:pipeline_name/rename", func() { var response *http.Response JustBeforeEach(func() { var err error request, err := http.NewRequest("PUT", server.URL+"/api/v1/teams/a-team/pipelines/a-pipeline/rename", bytes.NewBufferString(`{"name":"some-new-name"}`)) Expect(err).NotTo(HaveOccurred()) response, err = client.Do(request) Expect(err).NotTo(HaveOccurred()) }) Context("when authenticated", func() { Context("when requester belongs to the team", func() { BeforeEach(func() { authValidator.IsAuthenticatedReturns(true) userContextReader.GetTeamReturns("a-team", 42, true, true) }) It("constructs teamDB with provided team name", func() { Expect(teamDBFactory.GetTeamDBCallCount()).To(Equal(1)) Expect(teamDBFactory.GetTeamDBArgsForCall(0)).To(Equal("a-team")) }) It("injects the proper pipelineDB", func() { pipelineName := teamDB.GetPipelineByNameArgsForCall(0) Expect(pipelineName).To(Equal("a-pipeline")) Expect(pipelineDBFactory.BuildCallCount()).To(Equal(1)) actualSavedPipeline := pipelineDBFactory.BuildArgsForCall(0) Expect(actualSavedPipeline).To(Equal(expectedSavedPipeline)) }) It("returns 204", func() { Expect(response.StatusCode).To(Equal(http.StatusNoContent)) }) It("renames the pipeline to the name provided", func() { Expect(pipelineDB.UpdateNameCallCount()).To(Equal(1)) Expect(pipelineDB.UpdateNameArgsForCall(0)).To(Equal("some-new-name")) }) Context("when an error occurs on update", func() { BeforeEach(func() { pipelineDB.UpdateNameReturns(errors.New("whoops")) }) It("returns a 500 internal server error", func() { Expect(response.StatusCode).To(Equal(http.StatusInternalServerError)) Expect(logger.LogMessages()).To(ContainElement("callbacks.call-to-update-pipeline-name-failed")) }) }) }) Context("when requester does not belong to the team", func() { BeforeEach(func() { authValidator.IsAuthenticatedReturns(true) userContextReader.GetTeamReturns("another-team", 42, true, true) }) It("returns 403 Forbidden", func() { Expect(response.StatusCode).To(Equal(http.StatusForbidden)) }) }) }) Context("when not authenticated", func() { BeforeEach(func() { authValidator.IsAuthenticatedReturns(false) }) It("returns 401 Unauthorized", func() { Expect(response.StatusCode).To(Equal(http.StatusUnauthorized)) }) }) }) })
Describe("PUT /api/v1/teams/:team_name/pipelines/:pipeline_name/expose", func() {
svc.rs
// Possibly unused, but useful during development. pub use crate::proxy::http; use crate::transport::Connect; use crate::{cache, Error}; pub use linkerd2_buffer as buffer; use linkerd2_concurrency_limit as concurrency_limit; pub use linkerd2_stack::{self as stack, layer, NewService}; pub use linkerd2_stack_tracing::{InstrumentMake, InstrumentMakeLayer}; pub use linkerd2_timeout as timeout; use std::task::{Context, Poll}; use std::time::Duration; use tower::layer::util::{Identity, Stack as Pair}; pub use tower::layer::Layer; pub use tower::make::MakeService; pub use tower::spawn_ready::SpawnReady; pub use tower::util::{Either, Oneshot}; pub use tower::{service_fn as mk, Service, ServiceExt}; #[derive(Clone, Debug)] pub struct Layers<L>(L); #[derive(Clone, Debug)] pub struct Stack<S>(S); pub fn layers() -> Layers<Identity> { Layers(Identity::new()) } pub fn stack<S>(inner: S) -> Stack<S> { Stack(inner) } pub fn connect(keepalive: Option<Duration>) -> Stack<Connect> { Stack(Connect::new(keepalive)) } pub fn proxies() -> Stack<IdentityProxy> { Stack(IdentityProxy(())) } #[derive(Copy, Clone, Debug)] pub struct IdentityProxy(()); impl<T> NewService<T> for IdentityProxy { type Service = (); fn new_service(&mut self, _: T) -> Self::Service { () } } #[allow(dead_code)] impl<L> Layers<L> { pub fn push<O>(self, outer: O) -> Layers<Pair<L, O>> { Layers(Pair::new(self.0, outer)) } pub fn push_map_target<M>(self, map_target: M) -> Layers<Pair<L, stack::MapTargetLayer<M>>> { self.push(stack::MapTargetLayer::new(map_target)) } /// Wraps an inner `MakeService` to be a `NewService`. pub fn push_into_new_service( self, ) -> Layers<Pair<L, stack::new_service::FromMakeServiceLayer>> { self.push(stack::new_service::FromMakeServiceLayer::default()) } /// Buffers requests in an mpsc, spawning the inner service onto a dedicated task. pub fn push_spawn_buffer<Req, Rsp>( self, capacity: usize, ) -> Layers<Pair<L, buffer::SpawnBufferLayer<Req, Rsp>>> where Req: Send + 'static, Rsp: Send + 'static, { self.push(buffer::SpawnBufferLayer::new(capacity)) } pub fn push_spawn_buffer_with_idle_timeout<Req, Rsp>( self, capacity: usize, idle_timeout: Duration, ) -> Layers<Pair<L, buffer::SpawnBufferLayer<Req, Rsp>>> where Req: Send + 'static, Rsp: Send + 'static, { self.push(buffer::SpawnBufferLayer::new(capacity).with_idle_timeout(idle_timeout)) } // Makes the service eagerly process and fail requests after the given timeout. pub fn push_failfast(self, timeout: Duration) -> Layers<Pair<L, timeout::FailFastLayer>> { self.push(timeout::FailFastLayer::new(timeout)) } pub fn push_on_response<U>(self, layer: U) -> Layers<Pair<L, stack::OnResponseLayer<U>>> { self.push(stack::OnResponseLayer::new(layer)) } pub fn push_concurrency_limit(self, max: usize) -> Layers<Pair<L, concurrency_limit::Layer>> { self.push(concurrency_limit::Layer::new(max)) } pub fn push_make_ready<Req>(self) -> Layers<Pair<L, stack::MakeReadyLayer<Req>>> { self.push(stack::MakeReadyLayer::new()) } pub fn push_map_response<R: Clone>( self, map_response: R, ) -> Layers<Pair<L, stack::MapResponseLayer<R>>> { self.push(stack::MapResponseLayer::new(map_response)) } pub fn box_http_request<B>(self) -> Layers<Pair<L, http::boxed::request::Layer<B>>> where B: hyper::body::HttpBody + 'static, { self.push(http::boxed::request::Layer::new()) } pub fn box_http_response(self) -> Layers<Pair<L, http::boxed::response::Layer>> { self.push(http::boxed::response::Layer::new()) } pub fn push_oneshot(self) -> Layers<Pair<L, stack::OneshotLayer>> { self.push(stack::OneshotLayer::new()) }
} } impl<M, L: Layer<M>> Layer<M> for Layers<L> { type Service = L::Service; fn layer(&self, inner: M) -> Self::Service { self.0.layer(inner) } } #[allow(dead_code)] impl<S> Stack<S> { pub fn push<L: Layer<S>>(self, layer: L) -> Stack<L::Service> { Stack(layer.layer(self.0)) } pub fn push_map_target<M: Clone>( self, map_target: M, ) -> Stack<stack::map_target::MapTargetService<S, M>> { self.push(stack::map_target::MapTargetLayer::new(map_target)) } pub fn push_request_filter<F: Clone>(self, filter: F) -> Stack<stack::RequestFilter<F, S>> { self.push(layer::mk(|inner| { stack::RequestFilter::new(filter.clone(), inner) })) } /// Wraps a `Service<T>` as a `Service<()>`. /// /// Each time the service is called, the `T`-typed request is cloned and /// issued into the inner service. pub fn push_make_thunk(self) -> Stack<stack::make_thunk::MakeThunk<S>> { self.push(layer::mk(stack::make_thunk::MakeThunk::new)) } pub fn instrument<G: Clone>(self, get_span: G) -> Stack<InstrumentMake<G, S>> { self.push(InstrumentMakeLayer::new(get_span)) } pub fn instrument_from_target(self) -> Stack<InstrumentMake<(), S>> { self.push(InstrumentMakeLayer::from_target()) } /// Wraps an inner `MakeService` to be a `NewService`. pub fn into_new_service(self) -> Stack<stack::new_service::FromMakeService<S>> { self.push(stack::new_service::FromMakeServiceLayer::default()) } pub fn into_make_service<T>(self) -> Stack<stack::new_service::IntoMakeService<S>> where S: NewService<T>, { Stack(stack::new_service::IntoMakeService::new(self.0)) } pub fn push_make_ready<Req>(self) -> Stack<stack::MakeReady<S, Req>> { self.push(stack::MakeReadyLayer::new()) } /// Buffer requests when when the next layer is out of capacity. pub fn spawn_buffer<Req, Rsp>(self, capacity: usize) -> Stack<buffer::Buffer<Req, Rsp>> where Req: Send + 'static, Rsp: Send + 'static, S: Service<Req, Response = Rsp> + Send + 'static, S::Error: Into<Error> + Send + Sync, S::Future: Send, { self.push(buffer::SpawnBufferLayer::new(capacity)) } /// Assuming `S` implements `NewService` or `MakeService`, applies the given /// `L`-typed layer on each service produced by `S`. pub fn push_on_response<L: Clone>(self, layer: L) -> Stack<stack::OnResponse<L, S>> { self.push(stack::OnResponseLayer::new(layer)) } pub fn push_concurrency_limit( self, max: usize, ) -> Stack<concurrency_limit::ConcurrencyLimit<S>> { self.push(concurrency_limit::Layer::new(max)) } pub fn push_timeout(self, timeout: Duration) -> Stack<tower::timeout::Timeout<S>> { self.push(tower::timeout::TimeoutLayer::new(timeout)) } // Makes the service eagerly process and fail requests after the given timeout. pub fn push_failfast(self, timeout: Duration) -> Stack<timeout::FailFast<S>> { self.push(timeout::FailFastLayer::new(timeout)) } pub fn push_oneshot(self) -> Stack<stack::Oneshot<S>> { self.push(stack::OneshotLayer::new()) } pub fn push_map_response<R: Clone>(self, map_response: R) -> Stack<stack::MapResponse<S, R>> { self.push(stack::MapResponseLayer::new(map_response)) } pub fn push_http_insert_target(self) -> Stack<http::insert::target::NewService<S>> { self.push(http::insert::target::layer()) } pub fn cache<T, L, U>(self, track: L) -> Stack<cache::Cache<T, cache::layer::NewTrack<L, S>>> where T: Eq + std::hash::Hash + Send + 'static, S: NewService<T> + Clone, L: tower::layer::Layer<cache::layer::Track<S>> + Clone, L::Service: NewService<T, Service = U>, { self.push(cache::CacheLayer::new(track)) } /// Push a service that either calls the inner service if it is ready, or /// calls a `secondary` service if the inner service fails to become ready /// for the `skip_after` duration. pub fn push_when_unready<B: Clone>( self, secondary: B, skip_after: Duration, ) -> Stack<stack::NewSwitchReady<S, B>> { self.push(layer::mk(|inner: S| { stack::NewSwitchReady::new(inner, secondary.clone(), skip_after) })) } pub fn push_switch<T: Clone, U: Clone>( self, switch: T, other: U, ) -> Stack<stack::MakeSwitch<T, S, U>> { self.push(layer::mk(|inner: S| { stack::MakeSwitch::new(switch.clone(), inner, other.clone()) })) } // pub fn box_http_request<B>(self) -> Stack<http::boxed::BoxRequest<S, B>> // where // B: hyper::body::HttpBody<Data = http::boxed::Data, Error = Error> + 'static, // S: tower::Service<http::Request<http::boxed::BoxBody>>, // { // self.push(http::boxed::request::Layer::new()) // } pub fn box_http_response(self) -> Stack<http::boxed::BoxResponse<S>> { self.push(http::boxed::response::Layer::new()) } pub fn box_new_service<T>(self) -> Stack<stack::BoxNewService<T, S::Service>> where S: NewService<T> + Clone + Send + Sync + 'static, S::Service: Send + 'static, { self.push(layer::mk(stack::BoxNewService::new)) } /// Validates that this stack serves T-typed targets. pub fn check_new<T>(self) -> Self where S: NewService<T>, { self } pub fn check_new_clone<T>(self) -> Self where S: NewService<T>, S::Service: Clone, { self } /// Validates that this stack serves T-typed targets. pub fn check_new_service<T, Req>(self) -> Self where S: NewService<T>, S::Service: Service<Req>, { self } /// Validates that this stack serves T-typed targets. pub fn check_clone_new_service<T, Req>(self) -> Self where S: NewService<T> + Clone, S::Service: Service<Req>, { self } /// Validates that this stack can be cloned pub fn check_clone(self) -> Self where S: Clone, { self } /// Validates that this stack serves T-typed targets. pub fn check_service<T>(self) -> Self where S: Service<T>, { self } /// Validates that this stack serves T-typed targets with `Unpin` futures. pub fn check_service_unpin<T>(self) -> Self where S: Service<T>, S::Future: Unpin, { self } pub fn check_service_response<T, U>(self) -> Self where S: Service<T, Response = U>, { self } /// Validates that this stack serves T-typed targets. pub fn check_make_service<T, U>(self) -> Self where S: MakeService<T, U>, { self } /// Validates that this stack serves T-typed targets. pub fn check_make_service_clone<T, U>(self) -> Self where S: MakeService<T, U> + Clone, S::Service: Clone, { self } pub fn check_new_send_and_static<M, T, Req>(self) -> Self where S: NewService<T, Service = M>, M: Service<Req> + Send + 'static, M::Response: Send + 'static, M::Error: Into<Error> + Send + Sync, M::Future: Send, { self } pub fn into_inner(self) -> S { self.0 } } impl<T, N> NewService<T> for Stack<N> where N: NewService<T>, { type Service = N::Service; fn new_service(&mut self, t: T) -> Self::Service { self.0.new_service(t) } } impl<T, S> Service<T> for Stack<S> where S: Service<T>, { type Response = S::Response; type Error = S::Error; type Future = S::Future; fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> { self.0.poll_ready(cx) } fn call(&mut self, t: T) -> Self::Future { self.0.call(t) } } pub mod make_response { use super::Oneshot; use crate::Error; use futures::TryFuture; use pin_project::pin_project; use std::future::Future; use std::pin::Pin; use std::task::{Context, Poll}; #[derive(Copy, Clone, Debug)] pub struct Layer; #[derive(Clone, Debug)] pub struct MakeResponse<M>(M); #[pin_project] pub struct ResponseFuture<F, S: tower::Service<()>> { #[pin] state: State<F, S>, } #[pin_project(project = StateProj)] enum State<F, S: tower::Service<()>> { Make(#[pin] F), Respond(#[pin] Oneshot<S, ()>), } impl<S> super::Layer<S> for Layer { type Service = MakeResponse<S>; fn layer(&self, inner: S) -> Self::Service { MakeResponse(inner) } } impl<T, M> tower::Service<T> for MakeResponse<M> where M: tower::make::MakeService<T, ()>, M::MakeError: Into<Error>, M::Error: Into<Error>, { type Response = M::Response; type Error = Error; type Future = ResponseFuture<M::Future, M::Service>; fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> { self.0.poll_ready(cx).map_err(Into::into) } fn call(&mut self, req: T) -> Self::Future { ResponseFuture { state: State::Make(self.0.make_service(req)), } } } impl<F, S> Future for ResponseFuture<F, S> where F: TryFuture<Ok = S>, F::Error: Into<Error>, S: tower::Service<()>, S::Error: Into<Error>, { type Output = Result<S::Response, Error>; fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> { let mut this = self.project(); loop { match this.state.as_mut().project() { StateProj::Make(fut) => { let svc = futures::ready!(fut.try_poll(cx)).map_err(Into::into)?; this.state.set(State::Respond(Oneshot::new(svc, ()))) } StateProj::Respond(fut) => return fut.poll(cx).map_err(Into::into), } } } } }
pub fn push_instrument<G: Clone>(self, get_span: G) -> Layers<Pair<L, InstrumentMakeLayer<G>>> { self.push(InstrumentMakeLayer::new(get_span))
time_space_diagram.py
"""Generate a time space diagram for some networks. This method accepts as input a csv file containing the sumo-formatted emission file, and then uses this data to generate a time-space diagram, with the x-axis being the time (in seconds), the y-axis being the position of a vehicle, and color representing the speed of te vehicles. If the number of simulation steps is too dense, you can plot every nth step in the plot by setting the input `--steps=n`. Note: This script assumes that the provided network has only one lane on the each edge, or one lane on the main highway in the case of MergeNetwork. Usage ----- :: python time_space_diagram.py </path/to/emission>.csv </path/to/params>.json """ from flow.utils.rllib import get_flow_params from flow.networks import RingNetwork, FigureEightNetwork, MergeNetwork, I210SubNetwork, HighwayNetwork import argparse import csv try: from matplotlib import pyplot as plt except ImportError: import matplotlib matplotlib.use('TkAgg') from matplotlib import pyplot as plt from matplotlib.collections import LineCollection import matplotlib.colors as colors import numpy as np # networks that can be plotted by this method ACCEPTABLE_NETWORKS = [ RingNetwork, FigureEightNetwork, MergeNetwork, I210SubNetwork, HighwayNetwork ] def import_data_from_emission(fp): r"""Import relevant data from the predefined emission (.csv) file. Parameters ---------- fp : str file path (for the .csv formatted file) Returns ------- dict of dict Key = "veh_id": name of the vehicle \n Elements: * "time": time step at every sample * "edge": edge ID at every sample * "pos": relative position at every sample * "vel": speed at every sample """ # initialize all output variables veh_id, t, edge, rel_pos, vel, lane = [], [], [], [], [], [] # import relevant data from emission file for record in csv.DictReader(open(fp)): veh_id.append(record['id']) t.append(record['time']) edge.append(record['edge_id']) rel_pos.append(record['relative_position']) vel.append(record['speed']) lane.append(record['lane_number']) # we now want to separate data by vehicle ID ret = {key: {'time': [], 'edge': [], 'pos': [], 'vel': [], 'lane': []} for key in np.unique(veh_id)} for i in range(len(veh_id)): ret[veh_id[i]]['time'].append(float(t[i])) ret[veh_id[i]]['edge'].append(edge[i]) ret[veh_id[i]]['pos'].append(float(rel_pos[i])) ret[veh_id[i]]['vel'].append(float(vel[i])) ret[veh_id[i]]['lane'].append(float(lane[i])) return ret def get_time_space_data(data, params): r"""Compute the unique inflows and subsequent outflow statistics. Parameters ---------- data : dict of dict Key = "veh_id": name of the vehicle \n Elements: * "time": time step at every sample * "edge": edge ID at every sample * "pos": relative position at every sample * "vel": speed at every sample params : dict flow-specific parameters, including: * "network" (str): name of the network that was used when generating the emission file. Must be one of the network names mentioned in ACCEPTABLE_NETWORKS, * "net_params" (flow.core.params.NetParams): network-specific parameters. This is used to collect the lengths of various network links. Returns ------- as_array n_steps x n_veh matrix specifying the absolute position of every vehicle at every time step. Set to zero if the vehicle is not present in the network at that time step. as_array n_steps x n_veh matrix specifying the speed of every vehicle at every time step. Set to zero if the vehicle is not present in the network at that time step. as_array a (n_steps,) vector representing the unique time steps in the simulation Raises ------ AssertionError if the specified network is not supported by this method """ # check that the network is appropriate assert params['network'] in ACCEPTABLE_NETWORKS, \ 'Network must be one of: ' + ', '.join(ACCEPTABLE_NETWORKS) # switcher used to compute the positions based on the type of network # switcher used to compute the positions based on the type of network switcher = { RingNetwork: _ring_road, MergeNetwork: _merge, FigureEightNetwork: _figure_eight, I210SubNetwork: _i210_subnetwork, HighwayNetwork: _highway, } # Collect a list of all the unique times. all_time = [] for veh_id in data.keys(): all_time.extend(data[veh_id]['time']) all_time = np.sort(np.unique(all_time)) # Get the function from switcher dictionary func = switcher[params['network']] # Execute the function pos, speed, all_time = func(data, params, all_time) return pos, speed, all_time def _merge(data, params, all_time): r"""Generate position and speed data for the merge. This only include vehicles on the main highway, and not on the adjacent on-ramp. Parameters ---------- data : dict of dict Key = "veh_id": name of the vehicle \n Elements: * "time": time step at every sample * "edge": edge ID at every sample * "pos": relative position at every sample * "vel": speed at every sample params : dict flow-specific parameters all_time : array_like a (n_steps,) vector representing the unique time steps in the simulation Returns ------- as_array n_steps x n_veh matrix specifying the absolute position of every vehicle at every time step. Set to zero if the vehicle is not present in the network at that time step. as_array n_steps x n_veh matrix specifying the speed of every vehicle at every time step. Set to zero if the vehicle is not present in the network at that time step. """ # import network data from flow params inflow_edge_len = 100 premerge = params['net'].additional_params['pre_merge_length'] postmerge = params['net'].additional_params['post_merge_length'] # generate edge starts edgestarts = { 'inflow_highway': 0, 'left': inflow_edge_len + 0.1, 'center': inflow_edge_len + premerge + 22.6, 'inflow_merge': inflow_edge_len + premerge + postmerge + 22.6, 'bottom': 2 * inflow_edge_len + premerge + postmerge + 22.7, ':left_0': inflow_edge_len,
# compute the absolute position for veh_id in data.keys(): data[veh_id]['abs_pos'] = _get_abs_pos(data[veh_id]['edge'], data[veh_id]['pos'], edgestarts) # prepare the speed and absolute position in a way that is compatible with # the space-time diagram, and compute the number of vehicles at each step pos = np.zeros((all_time.shape[0], len(data.keys()))) speed = np.zeros((all_time.shape[0], len(data.keys()))) for i, veh_id in enumerate(sorted(data.keys())): for spd, abs_pos, ti, edge in zip(data[veh_id]['vel'], data[veh_id]['abs_pos'], data[veh_id]['time'], data[veh_id]['edge']): # avoid vehicles outside the main highway if edge in ['inflow_merge', 'bottom', ':bottom_0']: continue ind = np.where(ti == all_time)[0] pos[ind, i] = abs_pos speed[ind, i] = spd return pos, speed, all_time def _highway(data, params, all_time): r"""Generate position and speed data for the highway subnetwork. Parameters ---------- data : dict of dict Key = "veh_id": name of the vehicle \n Elements: * "time": time step at every sample * "edge": edge ID at every sample * "pos": relative position at every sample * "vel": speed at every sample params : dict flow-specific parameters all_time : array_like a (n_steps,) vector representing the unique time steps in the simulation Returns ------- as_array n_steps x n_veh matrix specifying the absolute position of every vehicle at every time step. Set to zero if the vehicle is not present in the network at that time step. as_array n_steps x n_veh matrix specifying the speed of every vehicle at every time step. Set to zero if the vehicle is not present in the network at that time step. """ length = params['net'].additional_params['length'] num_edges = params['net'].additional_params['num_edges'] edge_len = length / num_edges edge_starts = {} for i in range(num_edges): edge_starts.update({"highway_{}".format(i): i * edge_len, ":edge_{}_0".format(i): i * edge_len}) # compute the absolute position for veh_id in data.keys(): data[veh_id]['abs_pos'] = _get_abs_pos_1_edge(data[veh_id]['edge'], data[veh_id]['pos'], edge_starts) # track only vehicles that were around during this time period # create the output variables pos = np.zeros((all_time.shape[0], len(data.keys()))) speed = np.zeros((all_time.shape[0], len(data.keys()))) observed_row_list = [] for i, veh_id in enumerate(sorted(data.keys())): for spd, abs_pos, ti, edge, lane in zip(data[veh_id]['vel'], data[veh_id]['abs_pos'], data[veh_id]['time'], data[veh_id]['edge'], data[veh_id]['lane']): # avoid vehicles not on the relevant edges. Also only check the second to # last lane if edge not in edge_starts.keys() or ti not in all_time: continue else: if i not in observed_row_list: observed_row_list.append(i) ind = np.where(ti == all_time)[0] pos[ind, i] = abs_pos speed[ind, i] = spd pos = pos[:, observed_row_list] speed = speed[:, observed_row_list] return pos, speed, all_time def _ring_road(data, params, all_time): r"""Generate position and speed data for the ring road. Vehicles that reach the top of the plot simply return to the bottom and continue. Parameters ---------- data : dict of dict Key = "veh_id": name of the vehicle \n Elements: * "time": time step at every sample * "edge": edge ID at every sample * "pos": relative position at every sample * "vel": speed at every sample params : dict flow-specific parameters all_time : array_like a (n_steps,) vector representing the unique time steps in the simulation Returns ------- as_array n_steps x n_veh matrix specifying the absolute position of every vehicle at every time step. Set to zero if the vehicle is not present in the network at that time step. as_array n_steps x n_veh matrix specifying the speed of every vehicle at every time step. Set to zero if the vehicle is not present in the network at that time step. """ # import network data from flow params ring_length = params['net'].additional_params["length"] junction_length = 0.1 # length of inter-edge junctions edgestarts = { "bottom": 0, ":right_0": 0.25 * ring_length, "right": 0.25 * ring_length + junction_length, ":top_0": 0.5 * ring_length + junction_length, "top": 0.5 * ring_length + 2 * junction_length, ":left_0": 0.75 * ring_length + 2 * junction_length, "left": 0.75 * ring_length + 3 * junction_length, ":bottom_0": ring_length + 3 * junction_length } # compute the absolute position for veh_id in data.keys(): data[veh_id]['abs_pos'] = _get_abs_pos(data[veh_id]['edge'], data[veh_id]['pos'], edgestarts) # create the output variables pos = np.zeros((all_time.shape[0], len(data.keys()))) speed = np.zeros((all_time.shape[0], len(data.keys()))) for i, veh_id in enumerate(sorted(data.keys())): for spd, abs_pos, ti in zip(data[veh_id]['vel'], data[veh_id]['abs_pos'], data[veh_id]['time']): ind = np.where(ti == all_time)[0] pos[ind, i] = abs_pos speed[ind, i] = spd return pos, speed, all_time def _i210_subnetwork(data, params, all_time): r"""Generate position and speed data for the i210 subnetwork. We only look at the second to last lane of edge 119257908#1-AddedOnRampEdge Parameters ---------- data : dict of dict Key = "veh_id": name of the vehicle \n Elements: * "time": time step at every sample * "edge": edge ID at every sample * "pos": relative position at every sample * "vel": speed at every sample params : dict flow-specific parameters all_time : array_like a (n_steps,) vector representing the unique time steps in the simulation Returns ------- as_array n_steps x n_veh matrix specifying the absolute position of every vehicle at every time step. Set to zero if the vehicle is not present in the network at that time step. as_array n_steps x n_veh matrix specifying the speed of every vehicle at every time step. Set to zero if the vehicle is not present in the network at that time step. """ # import network data from flow params # # edge_starts = {"119257908#0": 0, # "119257908#1-AddedOnRampEdge": 686.98} desired_lane = 1 edge_starts = {"119257914": 0, "119257908#0": 61.58, "119257908#1-AddedOnRampEdge": 686.98 + 61.58} # edge_starts = {"119257908#0": 0} # edge_starts = {"119257908#1-AddedOnRampEdge": 0} # desired_lane = 5 # compute the absolute position for veh_id in data.keys(): data[veh_id]['abs_pos'] = _get_abs_pos_1_edge(data[veh_id]['edge'], data[veh_id]['pos'], edge_starts) # create the output variables # TODO(@ev) handle subsampling better than this low_time = int(0 / params['sim'].sim_step) high_time = int(1600 / params['sim'].sim_step) all_time = all_time[low_time:high_time] # track only vehicles that were around during this time period observed_row_list = [] pos = np.zeros((all_time.shape[0], len(data.keys()))) speed = np.zeros((all_time.shape[0], len(data.keys()))) for i, veh_id in enumerate(sorted(data.keys())): for spd, abs_pos, ti, edge, lane in zip(data[veh_id]['vel'], data[veh_id]['abs_pos'], data[veh_id]['time'], data[veh_id]['edge'], data[veh_id]['lane']): # avoid vehicles not on the relevant edges. Also only check the second to # last lane if edge not in edge_starts.keys() or ti not in all_time or lane != desired_lane: continue else: if i not in observed_row_list: observed_row_list.append(i) ind = np.where(ti == all_time)[0] pos[ind, i] = abs_pos speed[ind, i] = spd pos = pos[:, observed_row_list] speed = speed[:, observed_row_list] return pos, speed, all_time def _figure_eight(data, params, all_time): r"""Generate position and speed data for the figure eight. The vehicles traveling towards the intersection from one side will be plotted from the top downward, while the vehicles from the other side will be plotted from the bottom upward. Parameters ---------- data : dict of dict Key = "veh_id": name of the vehicle \n Elements: * "time": time step at every sample * "edge": edge ID at every sample * "pos": relative position at every sample * "vel": speed at every sample params : dict flow-specific parameters all_time : array_like a (n_steps,) vector representing the unique time steps in the simulation Returns ------- as_array n_steps x n_veh matrix specifying the absolute position of every vehicle at every time step. Set to zero if the vehicle is not present in the network at that time step. as_array n_steps x n_veh matrix specifying the speed of every vehicle at every time step. Set to zero if the vehicle is not present in the network at that time step. """ # import network data from flow params net_params = params['net'] ring_radius = net_params.additional_params['radius_ring'] ring_edgelen = ring_radius * np.pi / 2. intersection = 2 * ring_radius junction = 2.9 + 3.3 * net_params.additional_params['lanes'] inner = 0.28 # generate edge starts edgestarts = { 'bottom': inner, 'top': intersection / 2 + junction + inner, 'upper_ring': intersection + junction + 2 * inner, 'right': intersection + 3 * ring_edgelen + junction + 3 * inner, 'left': 1.5 * intersection + 3 * ring_edgelen + 2 * junction + 3 * inner, 'lower_ring': 2 * intersection + 3 * ring_edgelen + 2 * junction + 4 * inner, ':bottom_0': 0, ':center_1': intersection / 2 + inner, ':top_0': intersection + junction + inner, ':right_0': intersection + 3 * ring_edgelen + junction + 2 * inner, ':center_0': 1.5 * intersection + 3 * ring_edgelen + junction + 3 * inner, ':left_0': 2 * intersection + 3 * ring_edgelen + 2 * junction + 3 * inner, # for aimsun 'bottom_to_top': intersection / 2 + inner, 'right_to_left': junction + 3 * inner, } # compute the absolute position for veh_id in data.keys(): data[veh_id]['abs_pos'] = _get_abs_pos(data[veh_id]['edge'], data[veh_id]['pos'], edgestarts) # create the output variables pos = np.zeros((all_time.shape[0], len(data.keys()))) speed = np.zeros((all_time.shape[0], len(data.keys()))) for i, veh_id in enumerate(sorted(data.keys())): for spd, abs_pos, ti in zip(data[veh_id]['vel'], data[veh_id]['abs_pos'], data[veh_id]['time']): ind = np.where(ti == all_time)[0] pos[ind, i] = abs_pos speed[ind, i] = spd # reorganize data for space-time plot figure_eight_len = 6 * ring_edgelen + 2 * intersection + 2 * junction + 10 * inner intersection_loc = [edgestarts[':center_1'] + intersection / 2, edgestarts[':center_0'] + intersection / 2] pos[pos < intersection_loc[0]] += figure_eight_len pos[np.logical_and(pos > intersection_loc[0], pos < intersection_loc[1])] \ += - intersection_loc[1] pos[pos > intersection_loc[1]] = \ - pos[pos > intersection_loc[1]] + figure_eight_len + intersection_loc[0] return pos, speed, all_time def _get_abs_pos(edge, rel_pos, edgestarts): """Compute the absolute positions from edges and relative positions. This is the variable we will ultimately use to plot individual vehicles. Parameters ---------- edge : list of str list of edges at every time step rel_pos : list of float list of relative positions at every time step edgestarts : dict the absolute starting position of every edge Returns ------- list of float the absolute positive for every sample """ ret = [] for edge_i, pos_i in zip(edge, rel_pos): ret.append(pos_i + edgestarts[edge_i]) return ret def _get_abs_pos_1_edge(edges, rel_pos, edge_starts): """Compute the absolute positions from a subset of edges. This is the variable we will ultimately use to plot individual vehicles. Parameters ---------- edges : list of str list of edges at every time step rel_pos : list of float list of relative positions at every time step edge_starts : dict the absolute starting position of every edge Returns ------- list of float the absolute positive for every sample """ ret = [] for edge_i, pos_i in zip(edges, rel_pos): if edge_i in edge_starts.keys(): ret.append(pos_i + edge_starts[edge_i]) else: ret.append(-1) return ret def make_ts_diagram(flow_params, emission_path, min_speed, max_speed, start, stop, title): # flow_params is imported as a dictionary if '.json' in flow_params: flow_params = get_flow_params(flow_params) else: module = __import__("examples.exp_configs.non_rl", fromlist=[flow_params]) flow_params = getattr(module, flow_params).flow_params # import data from the emission.csv file emission_data = import_data_from_emission(emission_path) # compute the position and speed for all vehicles at all times pos, speed, time = get_time_space_data(emission_data, flow_params) # some plotting parameters cdict = { 'red': ((0, 0, 0), (0.2, 1, 1), (0.6, 1, 1), (1, 0, 0)), 'green': ((0, 0, 0), (0.2, 0, 0), (0.6, 1, 1), (1, 1, 1)), 'blue': ((0, 0, 0), (0.2, 0, 0), (0.6, 0, 0), (1, 0, 0)) } my_cmap = colors.LinearSegmentedColormap('my_colormap', cdict, 1024) # perform plotting operation fig = plt.figure(figsize=(16, 9)) ax = plt.axes() norm = plt.Normalize(min_speed, max_speed) cols = [] xmin = max(time[0], start) xmax = min(time[-1], stop) xbuffer = (xmax - xmin) * 0.025 # 2.5% of range ymin, ymax = np.amin(pos), np.amax(pos) ybuffer = (ymax - ymin) * 0.025 # 2.5% of range ax.set_xlim(xmin - xbuffer, xmax + xbuffer) ax.set_ylim(ymin - ybuffer, ymax + ybuffer) for indx_car in range(pos.shape[1]): unique_car_pos = pos[:, indx_car] if flow_params['network'] == I210SubNetwork or flow_params['network'] == HighwayNetwork: indices = np.where(pos[:, indx_car] != 0)[0] unique_car_speed = speed[indices, indx_car] points = np.array([time[indices], pos[indices, indx_car]]).T.reshape(-1, 1, 2) else: # discontinuity from wraparound disc = np.where(np.abs(np.diff(unique_car_pos)) >= 10)[0] + 1 unique_car_time = np.insert(time, disc, np.nan) unique_car_pos = np.insert(unique_car_pos, disc, np.nan) unique_car_speed = np.insert(speed[:, indx_car], disc, np.nan) # points = np.array( [unique_car_time, unique_car_pos]).T.reshape(-1, 1, 2) segments = np.concatenate([points[:-1], points[1:]], axis=1) lc = LineCollection(segments, cmap=my_cmap, norm=norm) # Set the values used for color mapping lc.set_array(unique_car_speed) lc.set_linewidth(1.75) cols.append(lc) plt.title(title, fontsize=25) plt.ylabel('Position (m)', fontsize=20) plt.xlabel('Time (s)', fontsize=20) for col in cols: line = ax.add_collection(col) cbar = plt.colorbar(line, ax=ax, norm=norm) cbar.set_label('Velocity (m/s)', fontsize=20) cbar.ax.tick_params(labelsize=18) plt.xticks(fontsize=18) plt.yticks(fontsize=18) ########################################################################### # Note: For MergeNetwork only # if flow_params['network'] == 'MergeNetwork': # plt.plot(time, [0] * pos.shape[0], linewidth=3, color="white") # plt.plot(time, [-0.1] * pos.shape[0], linewidth=3, color="white") # ########################################################################### plt.show() if __name__ == '__main__': # create the parser parser = argparse.ArgumentParser( formatter_class=argparse.RawDescriptionHelpFormatter, description='[Flow] Generates time space diagrams for flow networks.', epilog='python time_space_diagram.py </path/to/emission>.csv ' '</path/to/flow_params>.json') # required arguments parser.add_argument('emission_path', type=str, help='path to the csv file.') parser.add_argument('flow_params', type=str, help='path to the flow_params json file.') # optional arguments parser.add_argument('--steps', type=int, default=1, help='rate at which steps are plotted.') parser.add_argument('--title', type=str, default='Time Space Diagram', help='rate at which steps are plotted.') parser.add_argument('--max_speed', type=int, default=8, help='The maximum speed in the color range.') parser.add_argument('--min_speed', type=int, default=0, help='The minimum speed in the color range.') parser.add_argument('--start', type=float, default=0, help='initial time (in sec) in the plot.') parser.add_argument('--stop', type=float, default=float('inf'), help='final time (in sec) in the plot.') args = parser.parse_args() make_ts_diagram(args.flow_params, args.emission_path, args.min_speed, args.max_speed, args.start, args.stop, args.title)
':center_0': inflow_edge_len + premerge + 0.1, ':center_1': inflow_edge_len + premerge + 0.1, ':bottom_0': 2 * inflow_edge_len + premerge + postmerge + 22.6 }
pyscan.py
#! /usr/bin/python # pyscan.py - Python based SYN scanner. A SYN packet is sent through a raw socket. # If a SYN/ACK is received, the Linux TCP stack sends a RST/ACK, and an open # port is assumed. Otherwise, a closed or filtered port is assumed. # Does not rely on scapy, rather creates its own packets and raw sockets. # Must be run as root. # # Thanks to Silver Moon for some example code in his post "Syn flood and # raw sockets at http://www.binarytides.com/python-syn-flood-program-raw-sockets # import socket, sys, random from struct import * # checksum functions - direct from Silver Moon def checksum(msg): s = 0 #loop taking 2 characters at a time for i in range(0, len(msg), 2): w = (ord(msg[i]) <<8) + (ord(msg[i+1]) ) s = s + w s = (s>>16) + (s & 0xffff); # complement and mask to 4 byte short s = ~s & 0xffff return s def create_socket(source_ip,dest_ip): #create a raw socket
def create_ip_header(source_ip, dest_ip): packet = '' # ip header fields headerlen = 5 version = 4 tos = 0 tot_len = 20 + 20 id = random.randrange(18000,65535,1) frag_off = 0 ttl = 255 protocol = socket.IPPROTO_TCP check = 10 saddr = socket.inet_aton ( source_ip ) daddr = socket.inet_aton ( dest_ip ) hl_version = (version << 4) + headerlen ip_header = pack('!BBHHHBBH4s4s', hl_version, tos, tot_len, id, frag_off, ttl, protocol, check, saddr, daddr) return ip_header ''' create_ip_header ''' def create_tcp_syn_header(source_ip, dest_ip, dest_port): # tcp header fields source = random.randrange(32000,62000,1) # source port seq = 0 ack_seq = 0 doff = 5 # tcp flags fin = 0 syn = 1 rst = 0 psh = 0 ack = 0 urg = 0 window = socket.htons (8192) # maximum window size check = 0 urg_ptr = 0 offset_res = (doff << 4) + 0 tcp_flags = fin + (syn<<1) + (rst<<2) + (psh<<3) + (ack<<4) + (urg<<5) tcp_header = pack('!HHLLBBHHH', source, dest_port, seq, ack_seq, offset_res, tcp_flags, window, check, urg_ptr) #pseudo header fields source_address = socket.inet_aton( source_ip ) dest_address = socket.inet_aton( dest_ip ) placeholder = 0 protocol = socket.IPPROTO_TCP tcp_length = len(tcp_header) psh = pack('!4s4sBBH', source_address, dest_address, placeholder, protocol, tcp_length); psh = psh + tcp_header; tcp_checksum = checksum(psh) #make the tcp header again and fill in the correct checksum tcp_header = pack('!HHLLBBHHH', source, dest_port, seq, ack_seq, offset_res, tcp_flags, window, tcp_checksum, urg_ptr) return tcp_header ''' create_tcp_syn_header ends ''' def range_scan(source_ip, dest_ip, start_port, end_port) : syn_ack_received = [] # store the list of open ports here # final full packet - syn packets don't have any data for j in range (start_port, end_port) : s = create_socket(source_ip, dest_ip) ip_header = create_ip_header(source_ip, dest_ip) tcp_header = create_tcp_syn_header(source_ip, dest_ip,j) packet = ip_header + tcp_header s.sendto(packet, (dest_ip, 0)) data = s.recvfrom(1024) [0][0:] ip_header_len = (ord(data[0]) & 0x0f) * 4 ip_header_ret = data[0: ip_header_len - 1] tcp_header_len = (ord(data[32]) & 0xf0)>>2 tcp_header_ret = data[ip_header_len:ip_header_len+tcp_header_len - 1] if ord(tcp_header_ret[13]) == 0x12: # SYN/ACK flags set syn_ack_received.append(j) return syn_ack_received ''' range_scan ends ''' # Here's the program stub to test the code: open_port_list = [] ipsource = '10.1.134.33' ipdest = '10.1.134.33' start = 100 stop = 450 step = (stop-start)/10 scan_ports = range(start, stop, step) if scan_ports[len(scan_ports)-1] < stop: scan_ports.append(stop) for i in range(len(scan_ports)-1): opl = range_scan(ipsource, ipdest, scan_ports[i], scan_ports[i+1]) open_port_list.append(opl) for i in range(len(open_port_list)): print 'Process #: ',i,' Open ports: ',open_port_list[i] print 'A list of all open ports found: ' for i in range(len(open_port_list)): for j in range(len(open_port_list[i])): print open_port_list[i][j],', '
try: s = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_TCP) except socket.error , msg: print 'Socket could not be created. Error: ',str(msg[0]),' Message: ',msg[1] sys.exit() #tell kernel not to put in headers since we are providing it s.setsockopt(socket.IPPROTO_IP, socket.IP_HDRINCL, 1) return s
notify.pb.go
// Copyright 2021 Paingha Joe Alagoa. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.25.0 // protoc v3.14.0 // source: notify.proto package notify import ( proto "github.com/golang/protobuf/proto" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" reflect "reflect" sync "sync" ) const ( // Verify that this generated code is sufficiently up-to-date. _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) // Verify that runtime/protoimpl is sufficiently up-to-date. _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) ) // This is a compile-time assertion that a sufficiently up-to-date version // of the legacy proto package is being used. const _ = proto.ProtoPackageIsVersion4 type SendNotificationRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Content string `protobuf:"bytes,1,opt,name=Content,proto3" json:"Content,omitempty"` } func (x *SendNotificationRequest) Reset() { *x = SendNotificationRequest{} if protoimpl.UnsafeEnabled { mi := &file_notify_proto_msgTypes[0] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *SendNotificationRequest) String() string { return protoimpl.X.MessageStringOf(x) } func (*SendNotificationRequest) ProtoMessage() {} func (x *SendNotificationRequest) ProtoReflect() protoreflect.Message { mi := &file_notify_proto_msgTypes[0] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use SendNotificationRequest.ProtoReflect.Descriptor instead. func (*SendNotificationRequest) Descriptor() ([]byte, []int) { return file_notify_proto_rawDescGZIP(), []int{0} } func (x *SendNotificationRequest) GetContent() string { if x != nil { return x.Content } return "" } type SendNotificationResponse struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields Content string `protobuf:"bytes,1,opt,name=Content,proto3" json:"Content,omitempty"` } func (x *SendNotificationResponse) Reset() { *x = SendNotificationResponse{} if protoimpl.UnsafeEnabled { mi := &file_notify_proto_msgTypes[1] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *SendNotificationResponse) String() string { return protoimpl.X.MessageStringOf(x) } func (*SendNotificationResponse) ProtoMessage() {} func (x *SendNotificationResponse) ProtoReflect() protoreflect.Message { mi := &file_notify_proto_msgTypes[1] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use SendNotificationResponse.ProtoReflect.Descriptor instead. func (*SendNotificationResponse) Descriptor() ([]byte, []int) { return file_notify_proto_rawDescGZIP(), []int{1} } func (x *SendNotificationResponse) GetContent() string { if x != nil { return x.Content } return "" } var File_notify_proto protoreflect.FileDescriptor var file_notify_proto_rawDesc = []byte{ 0x0a, 0x0c, 0x6e, 0x6f, 0x74, 0x69, 0x66, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x33, 0x0a, 0x17, 0x53, 0x65, 0x6e, 0x64, 0x4e, 0x6f, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x22, 0x34, 0x0a, 0x18, 0x53, 0x65, 0x6e, 0x64, 0x4e, 0x6f, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x32, 0x53, 0x0a, 0x06, 0x4e, 0x6f, 0x74, 0x69, 0x66, 0x79, 0x12, 0x49, 0x0a, 0x10, 0x53, 0x65, 0x6e, 0x64, 0x4e, 0x6f, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x18, 0x2e, 0x53, 0x65, 0x6e, 0x64, 0x4e, 0x6f, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x19, 0x2e, 0x53, 0x65, 0x6e, 0x64, 0x4e, 0x6f, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x08, 0x5a, 0x06, 0x6e, 0x6f, 0x74, 0x69, 0x66, 0x79, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( file_notify_proto_rawDescOnce sync.Once file_notify_proto_rawDescData = file_notify_proto_rawDesc ) func
() []byte { file_notify_proto_rawDescOnce.Do(func() { file_notify_proto_rawDescData = protoimpl.X.CompressGZIP(file_notify_proto_rawDescData) }) return file_notify_proto_rawDescData } var file_notify_proto_msgTypes = make([]protoimpl.MessageInfo, 2) var file_notify_proto_goTypes = []interface{}{ (*SendNotificationRequest)(nil), // 0: SendNotificationRequest (*SendNotificationResponse)(nil), // 1: SendNotificationResponse } var file_notify_proto_depIdxs = []int32{ 0, // 0: Notify.SendNotification:input_type -> SendNotificationRequest 1, // 1: Notify.SendNotification:output_type -> SendNotificationResponse 1, // [1:2] is the sub-list for method output_type 0, // [0:1] is the sub-list for method input_type 0, // [0:0] is the sub-list for extension type_name 0, // [0:0] is the sub-list for extension extendee 0, // [0:0] is the sub-list for field type_name } func init() { file_notify_proto_init() } func file_notify_proto_init() { if File_notify_proto != nil { return } if !protoimpl.UnsafeEnabled { file_notify_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*SendNotificationRequest); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } file_notify_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*SendNotificationResponse); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_notify_proto_rawDesc, NumEnums: 0, NumMessages: 2, NumExtensions: 0, NumServices: 1, }, GoTypes: file_notify_proto_goTypes, DependencyIndexes: file_notify_proto_depIdxs, MessageInfos: file_notify_proto_msgTypes, }.Build() File_notify_proto = out.File file_notify_proto_rawDesc = nil file_notify_proto_goTypes = nil file_notify_proto_depIdxs = nil }
file_notify_proto_rawDescGZIP
start_habittracker.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from habittracker.utils import connect_to_database, init_sqlite_table, check_file_existing from habittracker.commands import get_main_user_choice, display_title_bar, evaluate_main_user_choice def
(): database_name = "habits.db" # Initialize database when database doesn't exist if not check_file_existing(database_name): init_sqlite_table(database_name) # Get DB connection connection = connect_to_database(database_name) # Start program display_title_bar() while True: user_choice = get_main_user_choice() if not user_choice == "exit": evaluate_main_user_choice(user_choice, connection) else: break if __name__ == "__main__": main()
main
cli.py
#!/usr/bin/env python import argparse from .database import YamlDatabase as DB from . import utils def cli():
parser = argparse.ArgumentParser() parser.add_argument('-S', '--scope', default='directory', help="flag scope") parser.add_argument('-F', '--output-format', default='yaml', dest='format', help="output format") parser.add_argument('-g', '--get', help="get a value") parser.add_argument('-s', '--set', help="set a value") parser.add_argument('-v', '--value', help="set a value") parser.add_argument('-d', '--dump', action="store_true", help="dump the database") args = parser.parse_args() db = DB(scope=args.scope) if args.get: utils.print_formatted_message(db.get(query=args.get), format=args.format) elif args.set: utils.print_formatted_message(db.set(query=args.set, value=args.value), format=args.format) elif args.dump: utils.print_formatted_message(db.dump(), format=args.format)
test-checkbox.py
import twilltestlib import twill from twill import namespaces, commands from twill.errors import TwillAssertionError from mechanize import BrowserStateError def setup_module(): global url url = twilltestlib.get_url() def
(): namespaces.new_local_dict() twill.commands.reset_browser() browser = twill.get_browser() try: browser.get_title() assert 0, "should never get here" except BrowserStateError: pass commands.go(url) commands.go('/test_checkboxes') commands.fv('1', 'checkboxtest', 'one') commands.fv('1', 'checkboxtest', 'two') commands.fv('1', 'checkboxtest', 'three') commands.fv('1', 'checkboxtest', '-one') commands.fv('1', 'checkboxtest', '-two') commands.fv('1', 'checkboxtest', '-three') commands.submit() assert not 'CHECKBOXTEST' in browser.get_html() commands.fv('1', 'checkboxtest', '+one') commands.fv('1', 'checkboxtest', '+two') commands.fv('1', 'checkboxtest', '+three') commands.submit() assert 'CHECKBOXTEST: ==one,two,three==' in browser.get_html() commands.fv('1', 'checkboxtest', '-one') commands.fv('1', 'checkboxtest', '-two') commands.fv('1', 'checkboxtest', '-three') commands.submit() assert not 'CHECKBOXTEST' in browser.get_html() def test_select_single(): namespaces.new_local_dict() twill.commands.reset_browser() browser = twill.get_browser() try: browser.get_title() assert 0, "should never get here" except BrowserStateError: pass commands.go(url) commands.go('/test_checkboxes') for x in ('1', '0', 'True', 'False'): try: commands.fv('1', 'checkboxtest', x) assert False, ("Should not be able to use a bool style for when " "there are multiple checkboxes") except: pass
test_select_multiple
files.go
// Package files contains functions and structs related to files package files import ( "bufio" "fmt" "net/http" "os" "os/exec" "path/filepath" "regexp" "strings" "github.com/editorconfig/editorconfig-core-go/v2" "github.com/editorconfig-checker/editorconfig-checker/pkg/config" "github.com/editorconfig-checker/editorconfig-checker/pkg/utils" ) // FileInformation is a Struct which represents some FileInformation type FileInformation struct { Line string Content string FilePath string LineNumber int Editorconfig *editorconfig.Definition } // IsExcluded returns wether the file is excluded via arguments or config file func IsExcluded(filePath string, config config.Config) (bool, error) { if len(config.Exclude) == 0 && config.IgnoreDefaults { return false, nil } relativeFilePath, err := GetRelativePath(filePath) if err != nil { return true, err } result, err := regexp.MatchString(config.GetExcludesAsRegularExpression(), relativeFilePath) if err != nil { return true, err } return result, nil } // AddToFiles adds a file to a slice if it isn't already in there // and meets the requirements and returns the new slice func AddToFiles(filePaths []string, filePath string, config config.Config) []string { contentType, err := GetContentType(filePath) config.Logger.Debug("AddToFiles: filePath: %s, contentType: %s", filePath, contentType) if err != nil { config.Logger.Error("Could not get the ContentType of file: %s", filePath) config.Logger.Error(err.Error()) } isExcluded, err := IsExcluded(filePath, config) if err == nil && !isExcluded && IsAllowedContentType(contentType, config) { config.Logger.Verbose("Add %s to be checked", filePath) return append(filePaths, filePath) } config.Logger.Verbose("Don't add %s to be checked", filePath) return filePaths } // GetFiles returns all files which should be checked func GetFiles(config config.Config) ([]string, error) { var filePaths []string // Handle explicit passed files if len(config.PassedFiles) != 0 { for _, passedFile := range config.PassedFiles { if utils.IsDirectory(passedFile) { _ = filepath.Walk(passedFile, func(path string, fi os.FileInfo, err error) error { if fi.Mode().IsRegular() { filePaths = AddToFiles(filePaths, path, config) } return nil }) } else { filePaths = AddToFiles(filePaths, passedFile, config) } } return filePaths, nil } byteArray, err := exec.Command("git", "ls-tree", "-r", "--name-only", "HEAD").Output() if err != nil { // It is not a git repository. cwd, err := os.Getwd() if err != nil { return filePaths, err } _ = filepath.Walk(cwd, func(path string, fi os.FileInfo, err error) error { if fi.Mode().IsRegular() { filePaths = AddToFiles(filePaths, path, config) } return nil }) } filesSlice := strings.Split(string(byteArray[:]), "\n") for _, filePath := range filesSlice { if len(filePath) > 0 { fi, err := os.Stat(filePath) // The err would be a broken symlink for example, // so we want to program to continue but the file should not be checked if err == nil && fi.Mode().IsRegular() { filePaths = AddToFiles(filePaths, filePath, config) } } } return filePaths, nil } // ReadLines returns the lines from a file as a slice func ReadLines(filePath string) []string { var lines []string fileHandle, _ := os.Open(filePath) defer fileHandle.Close() fileScanner := bufio.NewScanner(fileHandle) for fileScanner.Scan() { lines = append(lines, fileScanner.Text()) } return lines } // GetContentType returns the content type of a file func GetContentType(path string) (string, error) { fileStat, err := os.Stat(path) if err != nil { return "", err } if fileStat.Size() == 0 { return "", nil } file, err := os.Open(path) if err != nil { return "", err } defer file.Close() // Only the first 512 bytes are used to sniff the content type. buffer := make([]byte, 512) _, err = file.Read(buffer) if err != nil { return "", err } // Reset the read pointer if necessary. _, err = file.Seek(0, 0) if err != nil { panic(err) } // Always returns a valid content-type and "application/octet-stream" if no others seemed to match. return http.DetectContentType(buffer), nil } // PathExists checks wether a path of a file or directory exists or not func PathExists(filePath string) bool
// GetRelativePath returns the relative path of a file from the current working directory func GetRelativePath(filePath string) (string, error) { if !filepath.IsAbs(filePath) { // Path is already relative. No changes needed return filePath, nil } cwd, err := os.Getwd() if err != nil { return "", fmt.Errorf("Could not get the current working directory") } return filepath.Rel(cwd, filePath) } // IsAllowedContentType returns wether the contentType is // an allowed content type to check or not func IsAllowedContentType(contentType string, config config.Config) bool { result := false for _, allowedContentType := range config.AllowedContentTypes { result = result || strings.Contains(contentType, allowedContentType) } return result }
{ absolutePath, _ := filepath.Abs(filePath) _, err := os.Stat(absolutePath) return err == nil }
market_test.go
package market import ( "testing" "github.com/server-may-cry/bubble-go/models" ) func getMarket() *Market { return NewMarket(Config{ "increase_pack": &Pack{ Reward: RewardStruct{ Increase: map[string]int64{ "credits": 50, }, }, }, "set_pack": &Pack{ Reward: RewardStruct{ Set: map[string]int64{ "credits": 800, }, }, }, }, "cdn://cdn.cdn/") } func TestMarketIncrease(t *testing.T) { market := getMarket() user := models.User{ Credits: 100, } err := market.Buy(&user, "increase_pack") if err != nil { t.Errorf("market.Buy error: %s", err) } if user.Credits != 150 { t.Errorf("Buy(user, \"increase_pack\"): expected %d, actual %d", 150, user.Credits) } } func TestMarketSet(t *testing.T) { market := getMarket() user := models.User{ Credits: 100, } err := market.Buy(&user, "set_pack") if err != nil { t.Errorf("market.Buy error: %s", err) } if user.Credits != 800 { t.Errorf("Buy(user, \"set_pack\"): expected %d, actual %d", 800, user.Credits) } } func TestMarketBuyNotExistPack(t *testing.T) { market := getMarket() user := models.User{ Credits: 100, } err := market.Buy(&user, "pack_not_exist") if err == nil { t.Errorf("error expected on pack %s", "pack_not_exist") } } func
(t *testing.T) { market := getMarket() pack, err := market.GetPack("increase_pack") if pack == nil { t.Error("pack found expocted on increase_pack") } if err != nil { t.Errorf("no error expected on increase_pack, got %s", err.Error()) } } func TestMarketGetNotExistPack(t *testing.T) { market := getMarket() pack, err := market.GetPack("pack_not_exist") if pack != nil { t.Error("no pack expected on pack_not_exist") } if err == nil { t.Error("error expected on pack_not_exist") } } func TestMarketValidate(t *testing.T) { market := getMarket() user := models.User{} err := market.Validate(&user) if err != nil { t.Errorf("no error expected on validation, got %s", err.Error()) } }
TestMarketGetPack
crc_dat.rs
#[doc = "Register `CRC_DAT` reader"] pub struct R(crate::R<CRC_DAT_SPEC>); impl core::ops::Deref for R { type Target = crate::R<CRC_DAT_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl From<crate::R<CRC_DAT_SPEC>> for R { #[inline(always)] fn from(reader: crate::R<CRC_DAT_SPEC>) -> Self { R(reader) } } #[doc = "Register `CRC_DAT` writer"] pub struct W(crate::W<CRC_DAT_SPEC>); impl core::ops::Deref for W { type Target = crate::W<CRC_DAT_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::ops::DerefMut for W { #[inline(always)] fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl From<crate::W<CRC_DAT_SPEC>> for W { #[inline(always)] fn from(writer: crate::W<CRC_DAT_SPEC>) -> Self { W(writer) } } #[doc = "Field `DATA` reader - CRC Write Data Bits\nUser can write data directly by CPU mode or use PDMA function to write data to this field to perform CRC operation.\nNote: When the write data length is 8-bit mode, the valid data in CRC_DAT register is only DATA\\[7:0\\] bits; if the write data length is 16-bit mode, the valid data in CRC_DAT register is only DATA\\[15:0\\]."] pub struct DATA_R(crate::FieldReader<u32, u32>); impl DATA_R { pub(crate) fn new(bits: u32) -> Self { DATA_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for DATA_R { type Target = crate::FieldReader<u32, u32>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `DATA` writer - CRC Write Data Bits\nUser can write data directly by CPU mode or use PDMA function to write data to this field to perform CRC operation.\nNote: When the write data length is 8-bit mode, the valid data in CRC_DAT register is only DATA\\[7:0\\] bits; if the write data length is 16-bit mode, the valid data in CRC_DAT register is only DATA\\[15:0\\]."] pub struct DATA_W<'a> { w: &'a mut W, } impl<'a> DATA_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u32) -> &'a mut W {
impl R { #[doc = "Bits 0:31 - CRC Write Data Bits User can write data directly by CPU mode or use PDMA function to write data to this field to perform CRC operation. Note: When the write data length is 8-bit mode, the valid data in CRC_DAT register is only DATA\\[7:0\\] bits; if the write data length is 16-bit mode, the valid data in CRC_DAT register is only DATA\\[15:0\\]."] #[inline(always)] pub fn data(&self) -> DATA_R { DATA_R::new((self.bits & 0xffff_ffff) as u32) } } impl W { #[doc = "Bits 0:31 - CRC Write Data Bits User can write data directly by CPU mode or use PDMA function to write data to this field to perform CRC operation. Note: When the write data length is 8-bit mode, the valid data in CRC_DAT register is only DATA\\[7:0\\] bits; if the write data length is 16-bit mode, the valid data in CRC_DAT register is only DATA\\[15:0\\]."] #[inline(always)] pub fn data(&mut self) -> DATA_W { DATA_W { w: self } } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.0.bits(bits); self } } #[doc = "CRC Write Data Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [crc_dat](index.html) module"] pub struct CRC_DAT_SPEC; impl crate::RegisterSpec for CRC_DAT_SPEC { type Ux = u32; } #[doc = "`read()` method returns [crc_dat::R](R) reader structure"] impl crate::Readable for CRC_DAT_SPEC { type Reader = R; } #[doc = "`write(|w| ..)` method takes [crc_dat::W](W) writer structure"] impl crate::Writable for CRC_DAT_SPEC { type Writer = W; } #[doc = "`reset()` method sets CRC_DAT to value 0"] impl crate::Resettable for CRC_DAT_SPEC { #[inline(always)] fn reset_value() -> Self::Ux { 0 } }
self.w.bits = (self.w.bits & !0xffff_ffff) | (value as u32 & 0xffff_ffff); self.w } }
repository.ts
import { EOL } from 'os'; import * as events from '@aws-cdk/aws-events'; import * as iam from '@aws-cdk/aws-iam'; import * as kms from '@aws-cdk/aws-kms'; import { ArnFormat, IResource, Lazy, RemovalPolicy, Resource, Stack, Token } from '@aws-cdk/core'; import { IConstruct, Construct } from 'constructs'; import { CfnRepository } from './ecr.generated'; import { LifecycleRule, TagStatus } from './lifecycle'; /** * Represents an ECR repository. */ export interface IRepository extends IResource { /** * The name of the repository * @attribute */ readonly repositoryName: string; /** * The ARN of the repository * @attribute */ readonly repositoryArn: string; /** * The URI of this repository (represents the latest image): * * ACCOUNT.dkr.ecr.REGION.amazonaws.com/REPOSITORY * * @attribute */ readonly repositoryUri: string; /** * Returns the URI of the repository for a certain tag. Can be used in `docker push/pull`. * * ACCOUNT.dkr.ecr.REGION.amazonaws.com/REPOSITORY[:TAG] * * @param tag Image tag to use (tools usually default to "latest" if omitted) */ repositoryUriForTag(tag?: string): string; /** * Returns the URI of the repository for a certain tag. Can be used in `docker push/pull`. * * ACCOUNT.dkr.ecr.REGION.amazonaws.com/REPOSITORY[@DIGEST] * * @param digest Image digest to use (tools usually default to the image with the "latest" tag if omitted) */ repositoryUriForDigest(digest?: string): string; /** * Add a policy statement to the repository's resource policy */ addToResourcePolicy(statement: iam.PolicyStatement): iam.AddToResourcePolicyResult; /** * Grant the given principal identity permissions to perform the actions on this repository */ grant(grantee: iam.IGrantable, ...actions: string[]): iam.Grant; /** * Grant the given identity permissions to pull images in this repository. */ grantPull(grantee: iam.IGrantable): iam.Grant; /** * Grant the given identity permissions to pull and push images to this repository. */ grantPullPush(grantee: iam.IGrantable): iam.Grant; /** * Define a CloudWatch event that triggers when something happens to this repository * * Requires that there exists at least one CloudTrail Trail in your account * that captures the event. This method will not create the Trail. * * @param id The id of the rule * @param options Options for adding the rule */ onCloudTrailEvent(id: string, options?: events.OnEventOptions): events.Rule; /** * Defines an AWS CloudWatch event rule that can trigger a target when an image is pushed to this * repository. * * Requires that there exists at least one CloudTrail Trail in your account * that captures the event. This method will not create the Trail. * * @param id The id of the rule * @param options Options for adding the rule */ onCloudTrailImagePushed(id: string, options?: OnCloudTrailImagePushedOptions): events.Rule; /** * Defines an AWS CloudWatch event rule that can trigger a target when the image scan is completed * * * @param id The id of the rule * @param options Options for adding the rule */ onImageScanCompleted(id: string, options?: OnImageScanCompletedOptions): events.Rule; /** * Defines a CloudWatch event rule which triggers for repository events. Use * `rule.addEventPattern(pattern)` to specify a filter. */ onEvent(id: string, options?: events.OnEventOptions): events.Rule; } /** * Base class for ECR repository. Reused between imported repositories and owned repositories. */ export abstract class RepositoryBase extends Resource implements IRepository { /** * The name of the repository */ public abstract readonly repositoryName: string; /** * The ARN of the repository */ public abstract readonly repositoryArn: string; /** * Add a policy statement to the repository's resource policy */ public abstract addToResourcePolicy(statement: iam.PolicyStatement): iam.AddToResourcePolicyResult; /** * The URI of this repository (represents the latest image): * * ACCOUNT.dkr.ecr.REGION.amazonaws.com/REPOSITORY * */ public get repositoryUri() { return this.repositoryUriForTag(); } /** * Returns the URL of the repository. Can be used in `docker push/pull`. * * ACCOUNT.dkr.ecr.REGION.amazonaws.com/REPOSITORY[:TAG] * * @param tag Optional image tag */ public repositoryUriForTag(tag?: string): string { const tagSuffix = tag ? `:${tag}` : ''; return this.repositoryUriWithSuffix(tagSuffix); } /** * Returns the URL of the repository. Can be used in `docker push/pull`. * * ACCOUNT.dkr.ecr.REGION.amazonaws.com/REPOSITORY[@DIGEST] * * @param digest Optional image digest */ public repositoryUriForDigest(digest?: string): string { const digestSuffix = digest ? `@${digest}` : ''; return this.repositoryUriWithSuffix(digestSuffix); } /** * Returns the repository URI, with an appended suffix, if provided. * @param suffix An image tag or an image digest. * @private */ private repositoryUriWithSuffix(suffix?: string): string { const parts = this.stack.splitArn(this.repositoryArn, ArnFormat.SLASH_RESOURCE_NAME); return `${parts.account}.dkr.ecr.${parts.region}.${this.stack.urlSuffix}/${this.repositoryName}${suffix}`; } /** * Define a CloudWatch event that triggers when something happens to this repository * * Requires that there exists at least one CloudTrail Trail in your account * that captures the event. This method will not create the Trail. * * @param id The id of the rule * @param options Options for adding the rule */ public onCloudTrailEvent(id: string, options: events.OnEventOptions = {}): events.Rule { const rule = new events.Rule(this, id, options); rule.addTarget(options.target); rule.addEventPattern({ source: ['aws.ecr'], detailType: ['AWS API Call via CloudTrail'], detail: { requestParameters: { repositoryName: [this.repositoryName], }, }, }); return rule; } /** * Defines an AWS CloudWatch event rule that can trigger a target when an image is pushed to this * repository. * * Requires that there exists at least one CloudTrail Trail in your account * that captures the event. This method will not create the Trail. * * @param id The id of the rule * @param options Options for adding the rule */ public onCloudTrailImagePushed(id: string, options: OnCloudTrailImagePushedOptions = {}): events.Rule { const rule = this.onCloudTrailEvent(id, options); rule.addEventPattern({ detail: { eventName: ['PutImage'], requestParameters: { imageTag: options.imageTag ? [options.imageTag] : undefined, }, }, }); return rule; } /** * Defines an AWS CloudWatch event rule that can trigger a target when an image scan is completed * * * @param id The id of the rule * @param options Options for adding the rule */ public onImageScanCompleted(id: string, options: OnImageScanCompletedOptions = {}): events.Rule { const rule = new events.Rule(this, id, options); rule.addTarget(options.target); rule.addEventPattern({ source: ['aws.ecr'], detailType: ['ECR Image Scan'], detail: { 'repository-name': [this.repositoryName], 'scan-status': ['COMPLETE'], 'image-tags': options.imageTags ?? undefined, }, }); return rule; } /** * Defines a CloudWatch event rule which triggers for repository events. Use * `rule.addEventPattern(pattern)` to specify a filter. */ public onEvent(id: string, options: events.OnEventOptions = {}) { const rule = new events.Rule(this, id, options); rule.addEventPattern({ source: ['aws.ecr'], resources: [this.repositoryArn], }); rule.addTarget(options.target); return rule; } /** * Grant the given principal identity permissions to perform the actions on this repository */ public grant(grantee: iam.IGrantable, ...actions: string[]) { return iam.Grant.addToPrincipalOrResource({ grantee, actions, resourceArns: [this.repositoryArn], resourceSelfArns: [], resource: this, }); } /** * Grant the given identity permissions to use the images in this repository */ public grantPull(grantee: iam.IGrantable) { const ret = this.grant(grantee, 'ecr:BatchCheckLayerAvailability', 'ecr:GetDownloadUrlForLayer', 'ecr:BatchGetImage'); iam.Grant.addToPrincipal({ grantee, actions: ['ecr:GetAuthorizationToken'], resourceArns: ['*'], scope: this, }); return ret; } /** * Grant the given identity permissions to pull and push images to this repository. */ public grantPullPush(grantee: iam.IGrantable) { this.grantPull(grantee); return this.grant(grantee, 'ecr:PutImage', 'ecr:InitiateLayerUpload', 'ecr:UploadLayerPart', 'ecr:CompleteLayerUpload'); } } /** * Options for the onCloudTrailImagePushed method */ export interface OnCloudTrailImagePushedOptions extends events.OnEventOptions { /** * Only watch changes to this image tag * * @default - Watch changes to all tags */ readonly imageTag?: string; } /** * Options for the OnImageScanCompleted method */ export interface OnImageScanCompletedOptions extends events.OnEventOptions { /** * Only watch changes to the image tags spedified. * Leave it undefined to watch the full repository. * * @default - Watch the changes to the repository with all image tags */ readonly imageTags?: string[]; } export interface RepositoryProps { /** * Name for this repository * * @default Automatically generated name. */ readonly repositoryName?: string; /** * The kind of server-side encryption to apply to this repository. * * If you choose KMS, you can specify a KMS key via `encryptionKey`. If * encryptionKey is not specified, an AWS managed KMS key is used. * * @default - `KMS` if `encryptionKey` is specified, or `AES256` otherwise. */ readonly encryption?: RepositoryEncryption; /** * External KMS key to use for repository encryption. * * The 'encryption' property must be either not specified or set to "KMS". * An error will be emitted if encryption is set to "AES256". * * @default - If encryption is set to `KMS` and this property is undefined, * an AWS managed KMS key is used. */ readonly encryptionKey?: kms.IKey; /** * Life cycle rules to apply to this registry * * @default No life cycle rules */ readonly lifecycleRules?: LifecycleRule[]; /** * The AWS account ID associated with the registry that contains the repository. * * @see https://docs.aws.amazon.com/AmazonECR/latest/APIReference/API_PutLifecyclePolicy.html * @default The default registry is assumed. */ readonly lifecycleRegistryId?: string; /** * Determine what happens to the repository when the resource/stack is deleted. * * @default RemovalPolicy.Retain */ readonly removalPolicy?: RemovalPolicy; /** * Enable the scan on push when creating the repository * * @default false */ readonly imageScanOnPush?: boolean; /** * The tag mutability setting for the repository. If this parameter is omitted, the default setting of MUTABLE will be used which will allow image tags to be overwritten. * * @default TagMutability.MUTABLE */ readonly imageTagMutability?: TagMutability; } export interface RepositoryAttributes { readonly repositoryName: string; readonly repositoryArn: string; } /** * Define an ECR repository */ export class Repository extends RepositoryBase { /** * Import a repository */ public static fromRepositoryAttributes(scope: Construct, id: string, attrs: RepositoryAttributes): IRepository { class Import extends RepositoryBase { public readonly repositoryName = attrs.repositoryName; public readonly repositoryArn = attrs.repositoryArn; public addToResourcePolicy(_statement: iam.PolicyStatement): iam.AddToResourcePolicyResult { // dropped return { statementAdded: false }; } } return new Import(scope, id); } public static fromRepositoryArn(scope: Construct, id: string, repositoryArn: string): IRepository { // if repositoryArn is a token, the repository name is also required. this is because // repository names can include "/" (e.g. foo/bar/myrepo) and it is impossible to // parse the name from an ARN using CloudFormation's split/select. if (Token.isUnresolved(repositoryArn)) { throw new Error('"repositoryArn" is a late-bound value, and therefore "repositoryName" is required. Use `fromRepositoryAttributes` instead'); } const repositoryName = repositoryArn.split('/').slice(1).join('/'); class Import extends RepositoryBase { public repositoryName = repositoryName; public repositoryArn = repositoryArn; public addToResourcePolicy(_statement: iam.PolicyStatement): iam.AddToResourcePolicyResult { // dropped return { statementAdded: false }; } } return new Import(scope, id, { environmentFromArn: repositoryArn, }); } public static fromRepositoryName(scope: Construct, id: string, repositoryName: string): IRepository { class Import extends RepositoryBase { public repositoryName = repositoryName; public repositoryArn = Repository.arnForLocalRepository(repositoryName, scope); public addToResourcePolicy(_statement: iam.PolicyStatement): iam.AddToResourcePolicyResult { // dropped return { statementAdded: false }; } } return new Import(scope, id); } /** * Returns an ECR ARN for a repository that resides in the same account/region * as the current stack. */ public static arnForLocalRepository(repositoryName: string, scope: IConstruct, account?: string): string { return Stack.of(scope).formatArn({ account, service: 'ecr', resource: 'repository', resourceName: repositoryName, }); } private static validateRepositoryName(physicalName: string) { const repositoryName = physicalName; if (!repositoryName || Token.isUnresolved(repositoryName)) { // the name is a late-bound value, not a defined string, // so skip validation return; } const errors: string[] = []; // Rules codified from https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecr-repository.html if (repositoryName.length < 2 || repositoryName.length > 256) { errors.push('Repository name must be at least 2 and no more than 256 characters'); } const isPatternMatch = /^(?:[a-z0-9]+(?:[._-][a-z0-9]+)*\/)*[a-z0-9]+(?:[._-][a-z0-9]+)*$/.test(repositoryName); if (!isPatternMatch) { errors.push('Repository name must follow the specified pattern: (?:[a-z0-9]+(?:[._-][a-z0-9]+)*/)*[a-z0-9]+(?:[._-][a-z0-9]+)*'); } if (errors.length > 0) { throw new Error(`Invalid ECR repository name (value: ${repositoryName})${EOL}${errors.join(EOL)}`); } } public readonly repositoryName: string; public readonly repositoryArn: string; private readonly lifecycleRules = new Array<LifecycleRule>(); private readonly registryId?: string; private policyDocument?: iam.PolicyDocument; constructor(scope: Construct, id: string, props: RepositoryProps = {}) { super(scope, id, { physicalName: props.repositoryName, }); Repository.validateRepositoryName(this.physicalName); const resource = new CfnRepository(this, 'Resource', { repositoryName: this.physicalName, // It says "Text", but they actually mean "Object". repositoryPolicyText: Lazy.any({ produce: () => this.policyDocument }), lifecyclePolicy: Lazy.any({ produce: () => this.renderLifecyclePolicy() }), imageScanningConfiguration: props.imageScanOnPush ? { scanOnPush: true } : { scanOnPush: false }, imageTagMutability: props.imageTagMutability || undefined, encryptionConfiguration: this.parseEncryption(props), }); resource.applyRemovalPolicy(props.removalPolicy); this.registryId = props.lifecycleRegistryId; if (props.lifecycleRules) { props.lifecycleRules.forEach(this.addLifecycleRule.bind(this)); } this.repositoryName = this.getResourceNameAttribute(resource.ref); this.repositoryArn = this.getResourceArnAttribute(resource.attrArn, { service: 'ecr', resource: 'repository', resourceName: this.physicalName, }); } public addToResourcePolicy(statement: iam.PolicyStatement): iam.AddToResourcePolicyResult { if (this.policyDocument === undefined) { this.policyDocument = new iam.PolicyDocument(); } this.policyDocument.addStatements(statement); return { statementAdded: false, policyDependable: this.policyDocument }; } protected validate(): string[] { const errors = super.validate(); errors.push(...this.policyDocument?.validateForResourcePolicy() || []); return errors; } /** * Add a life cycle rule to the repository * * Life cycle rules automatically expire images from the repository that match * certain conditions. */ public addLifecycleRule(rule: LifecycleRule) { // Validate rule here so users get errors at the expected location if (rule.tagStatus === undefined) { rule = { ...rule, tagStatus: rule.tagPrefixList === undefined ? TagStatus.ANY : TagStatus.TAGGED }; } if (rule.tagStatus === TagStatus.TAGGED && (rule.tagPrefixList === undefined || rule.tagPrefixList.length === 0)) { throw new Error('TagStatus.Tagged requires the specification of a tagPrefixList'); } if (rule.tagStatus !== TagStatus.TAGGED && rule.tagPrefixList !== undefined) { throw new Error('tagPrefixList can only be specified when tagStatus is set to Tagged'); } if ((rule.maxImageAge !== undefined) === (rule.maxImageCount !== undefined)) { throw new Error(`Life cycle rule must contain exactly one of 'maxImageAge' and 'maxImageCount', got: ${JSON.stringify(rule)}`); } if (rule.tagStatus === TagStatus.ANY && this.lifecycleRules.filter(r => r.tagStatus === TagStatus.ANY).length > 0) { throw new Error('Life cycle can only have one TagStatus.Any rule'); } this.lifecycleRules.push({ ...rule }); } /** * Render the life cycle policy object */ private renderLifecyclePolicy(): CfnRepository.LifecyclePolicyProperty | undefined { const stack = Stack.of(this); let lifecyclePolicyText: any; if (this.lifecycleRules.length === 0 && !this.registryId) { return undefined; } if (this.lifecycleRules.length > 0) { lifecyclePolicyText = JSON.stringify(stack.resolve({ rules: this.orderedLifecycleRules().map(renderLifecycleRule), })); } return { lifecyclePolicyText, registryId: this.registryId, }; } /** * Return life cycle rules with automatic ordering applied. * * Also applies validation of the 'any' rule. */ private orderedLifecycleRules(): LifecycleRule[] { if (this.lifecycleRules.length === 0) { return []; }
const prioritizedRules = this.lifecycleRules.filter(r => r.rulePriority !== undefined && r.tagStatus !== TagStatus.ANY); const autoPrioritizedRules = this.lifecycleRules.filter(r => r.rulePriority === undefined && r.tagStatus !== TagStatus.ANY); const anyRules = this.lifecycleRules.filter(r => r.tagStatus === TagStatus.ANY); if (anyRules.length > 0 && anyRules[0].rulePriority !== undefined && autoPrioritizedRules.length > 0) { // Supporting this is too complex for very little value. We just prohibit it. throw new Error("Cannot combine prioritized TagStatus.Any rule with unprioritized rules. Remove rulePriority from the 'Any' rule."); } const prios = prioritizedRules.map(r => r.rulePriority!); let autoPrio = (prios.length > 0 ? Math.max(...prios) : 0) + 1; const ret = new Array<LifecycleRule>(); for (const rule of prioritizedRules.concat(autoPrioritizedRules).concat(anyRules)) { ret.push({ ...rule, rulePriority: rule.rulePriority ?? autoPrio++, }); } // Do validation on the final array--might still be wrong because the user supplied all prios, but incorrectly. validateAnyRuleLast(ret); return ret; } /** * Set up key properties and return the Repository encryption property from the * user's configuration. */ private parseEncryption(props: RepositoryProps): CfnRepository.EncryptionConfigurationProperty | undefined { // default based on whether encryptionKey is specified const encryptionType = props.encryption ?? (props.encryptionKey ? RepositoryEncryption.KMS : RepositoryEncryption.AES_256); // if encryption key is set, encryption must be set to KMS. if (encryptionType !== RepositoryEncryption.KMS && props.encryptionKey) { throw new Error(`encryptionKey is specified, so 'encryption' must be set to KMS (value: ${encryptionType.value})`); } if (encryptionType === RepositoryEncryption.AES_256) { return undefined; } if (encryptionType === RepositoryEncryption.KMS) { return { encryptionType: 'KMS', kmsKey: props.encryptionKey?.keyArn, }; } throw new Error(`Unexpected 'encryptionType': ${encryptionType}`); } } function validateAnyRuleLast(rules: LifecycleRule[]) { const anyRules = rules.filter(r => r.tagStatus === TagStatus.ANY); if (anyRules.length === 1) { const maxPrio = Math.max(...rules.map(r => r.rulePriority!)); if (anyRules[0].rulePriority !== maxPrio) { throw new Error(`TagStatus.Any rule must have highest priority, has ${anyRules[0].rulePriority} which is smaller than ${maxPrio}`); } } } /** * Render the lifecycle rule to JSON */ function renderLifecycleRule(rule: LifecycleRule) { return { rulePriority: rule.rulePriority, description: rule.description, selection: { tagStatus: rule.tagStatus || TagStatus.ANY, tagPrefixList: rule.tagPrefixList, countType: rule.maxImageAge !== undefined ? CountType.SINCE_IMAGE_PUSHED : CountType.IMAGE_COUNT_MORE_THAN, countNumber: rule.maxImageAge?.toDays() ?? rule.maxImageCount, countUnit: rule.maxImageAge !== undefined ? 'days' : undefined, }, action: { type: 'expire', }, }; } /** * Select images based on counts */ const enum CountType { /** * Set a limit on the number of images in your repository */ IMAGE_COUNT_MORE_THAN = 'imageCountMoreThan', /** * Set an age limit on the images in your repository */ SINCE_IMAGE_PUSHED = 'sinceImagePushed', } /** * The tag mutability setting for your repository. */ export enum TagMutability { /** * allow image tags to be overwritten. */ MUTABLE = 'MUTABLE', /** * all image tags within the repository will be immutable which will prevent them from being overwritten. */ IMMUTABLE = 'IMMUTABLE', } /** * Indicates whether server-side encryption is enabled for the object, and whether that encryption is * from the AWS Key Management Service (AWS KMS) or from Amazon S3 managed encryption (SSE-S3). * @see https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMetadata.html#SysMetadata */ export class RepositoryEncryption { /** * 'AES256' */ public static readonly AES_256 = new RepositoryEncryption('AES256'); /** * 'KMS' */ public static readonly KMS = new RepositoryEncryption('KMS'); /** * @param value the string value of the encryption */ protected constructor(public readonly value: string) { } }
App.test.tsx
import React from "react"; import ReactDOM from "react-dom"; import App from "./projects/App"; it("renders without crashing", () => { const div = document.createElement("div"); ReactDOM.render(<App />, div);
ReactDOM.unmountComponentAtNode(div); });
laplace.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # @File : fedhf\api\dpm\laplace_noise.py # @Time : 2022-05-02 22:39:42 # @Author : Bingjie Yan # @Email : [email protected] # @License : Apache License 2.0 import numpy as np import torch def laplace_noise(sensitivity, size, epsilon, **kwargs): """ Generate Laplace noise with the given sensitivity. :param sensitivity: the sensitivity of the privacy mechanism :param size: the size of the noise :param epsilon: the privacy parameter :param kwargs: other parameters :return: the generated noise """ noise_scale = sensitivity / epsilon return np.random.laplace(0, noise_scale, size)
""" Clip the model parameters. :param model: the model :param clip: the clipping bound :return: None """ for k, v in model.named_parameters(): v.grad /= max(1, v.grad.norm(1) / clip)
def laplace_clip(model: torch.nn.Module, clip: float):
init.go
// Copyright 2017 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package compare import ( "github.com/gohugoio/hugo/deps" "github.com/gohugoio/hugo/tpl/internal" ) const name = "compare" func init() { f := func(d *deps.Deps) *internal.TemplateFuncsNamespace { ctx := New() ns := &internal.TemplateFuncsNamespace{ Name: name, Context: func(args ...interface{}) interface{} { return ctx }, } ns.AddMethodMapping(ctx.Default, []string{"default"}, [][2]string{ {`{{ "Hugo Rocks!" | default "Hugo Rules!" }}`, `Hugo Rocks!`}, {`{{ "" | default "Hugo Rules!" }}`, `Hugo Rules!`}, }, ) ns.AddMethodMapping(ctx.Eq, []string{"eq"}, [][2]string{ {`{{ if eq .Section "blog" }}current{{ end }}`, `current`}, }, ) ns.AddMethodMapping(ctx.Ge, []string{"ge"}, [][2]string{ {`{{ if ge .Hugo.Version "0.36" }}Reasonable new Hugo version!{{ end }}`, `Reasonable new Hugo version!`}, }, ) ns.AddMethodMapping(ctx.Gt, []string{"gt"}, [][2]string{}, ) ns.AddMethodMapping(ctx.Le, []string{"le"}, [][2]string{}, )
[]string{"lt"}, [][2]string{}, ) ns.AddMethodMapping(ctx.Ne, []string{"ne"}, [][2]string{}, ) ns.AddMethodMapping(ctx.Conditional, []string{"cond"}, [][2]string{ {`{{ cond (eq (add 2 2) 4) "2+2 is 4" "what?" | safeHTML }}`, `2+2 is 4`}, }, ) return ns } internal.AddTemplateFuncsNamespace(f) }
ns.AddMethodMapping(ctx.Lt,
adapter.py
"""This module provides different adapter classes that allow for a smoother combination of Qt and the Deep Learning ToolBox. """ # standard imports from typing import Iterator, Iterable, Any, Callable import logging # Qt imports from PyQt5.QtCore import Qt from PyQt5.QtGui import QKeyEvent from PyQt5.QtWidgets import QComboBox, QListWidget, QListWidgetItem # GUI imports from .utils import qtName, protect, QDebug # logging LOG = logging.getLogger(__name__) class ItemAdapter(QDebug): """This class provides functionality that can be used by QWidgets that allow to choose from lists of items, like `QComboBox` and `QListWidget`. It acts as a translator mapping between the data structures used in the Deep Learning ToolBox and the Qt widgets. The QWidgets allow to store items and associated data in different ways: * The `QListWidget` uses `QListWidgetItem`s to represent the list items. Such an item is not a QWidget, but holds some information specifying display properties (like foreground and background color or icons), the text value of the item and it allows to store additional associated user date by introducing specific roles. * The `QComboBox` does not use an explict class to represent list items, but it also allows to set display properties and to store associated information for each item using roles. Both Widgets have the following comonalities: * New items can be registered with `QComboBox.addItem(text, [icon], [userData])` and `QListWidget.addItem(label=text)` * Items can be accessed by index: `QComboBox.itemText(index)` and `QListWidget.item(row).text()` * Items can be accessed by text: `QComboBox.findText(text)` gives a single index while `QList.findItems(text)` returns a list of item objects. * Items can be removed: `QComboBox.removeItem(index)` and `QListWidget.takeItem(QListWidget.item(index)) * There may be a current item (selected item). The numerical index can be obtained by `QComboBox.currentIndex()` and `QListWidget.currentRow()` * The text of the current item can be obtained by `QComboBox.currentText()` and `QListWidget.currentItem().text()` * data associated with the current item can be obtained by `QComboBox.currentData(role)` and `QListWidget.currentItem().data(role)` """ _itemToText: Callable[[Any], str] = str def __init_subclass__(cls, itemType: type = None, itemToText: Callable[[Any], str] = None, **kwargs) -> None: super().__init_subclass__(**kwargs) if itemType is not None: setattr(cls, qtName(itemType.__name__), cls._currentItem) setattr(cls, qtName('set_' + itemType.__name__), cls._currentItem) if itemToText is not None: cls._itemToText = staticmethod(itemToText) print(f"DEBUG1[{cls.__name__}]: itemToText:", itemToText, cls._itemToText) def __init__(self, itemToText: Callable[[Any], str] = None, **kwargs) -> None: super().__init__(**kwargs) self.setItemToText(itemToText) # # methods to be implemented by subclasses # def _items(self) -> Iterator[Any]: """An iterator for the items in this :py:class:`ItemAdapter`. """ raise NotImplementedError("A 'ItemAdapter' has to implement " "the _items() method") def _addItem(self, item: Any) -> None: """Add an item to this :py:class:`ItemAdapter`. It is assumed that the item is not yet contained in this :py:class:`ItemAdapter`. """ raise NotImplementedError("A 'ItemAdapter' has to implement " "the _addItem() method") def _removeItem(self, item: Any) -> None: """Remove an item from this :py:class:`ItemAdapter`. It is assumed that the item is contained in this :py:class:`ItemAdapter`, otherwise a :py:class:`ValueError` is raised. """ raise NotImplementedError("A 'ItemAdapter' has to implement " "the _removeElement() method") def _currentItem(self) -> Any: """Get the currently selected item. This may be `None` if no itm is selected. """ raise NotImplementedError("A 'ItemAdapter' has to implement " "the _currentItem() method") def _setCurrentItem(self, item: Any) -> None: """Select the given entry in this :py:class:`ItemAdapter`. Arguments --------- item: Any The item to become the current item. If the item is not contained in this :py:class:`ItemAdapter` (e.g. if `item` is `None`), the current will be set to `None`. """ raise NotImplementedError("A 'ItemAdapter' has to implement " "the _setCurrentItem() method") # # Implemented methods # def _countItems(self) -> int: """Get the number of items in this :py:class:`ItemAdapter`. """ return sum(1 for _ in self._items()) def _textForItem(self, item: Any) -> str: """Get the text to be display from a given item. """ return self._itemToText(item) def _formatItem(self, item: Any) -> None: """May be implemented by a subclass to format an item. This method is only called if the item is currently displayed by this :py:class:`ItemAdapter` (has been added and was not removed), but it may be called several times for the same item (to trigger an update of this item). The base implementation does nothing, but derived classes may overwrite this method to allow for fancy formating. """ def _getItemAt(self, index: int) -> Any: """ Raises ------ IndexError: The index provided is invalid. """ try: return next((x for i, x in enumerate(self._items()) if i == index)) except StopIteration: raise IndexError(f"Index {index} beyond end of items.") def _getTextAt(self, index: int) -> str: """ Raises ------ IndexError: The index provided is invalid. """ return self._textForItem(self._getItemAt(index)) def _indexOfItem(self, item: Any) -> int: """ Raises ------ LookupError: The given item is not found in this :py:class:`ItemAdapter`. """ try: return next(i for i, x in enumerate(self._items()) if x == item) except StopIteration: raise LookupError(f"Item {item} not found.") def _indexOfText(self, text: str) -> int: """ Raises ------ LookupError: The given text is not found in this :py:class:`ItemAdapter`. """ try: return next(i for i, t in enumerate(self._texts()) if t == text) except StopIteration: raise LookupError(f"Item with text '{text}' not found") def _findItem(self, text: str) -> Any: """ Raises ------ LookupError: The given text is not found in this :py:class:`ItemAdapter`. """ try: return next(item for item in self._items() if self._textForItem(item) == text) except StopIteration: raise LookupError(f"Item with text '{text}' not found.") def _setCurrentText(self, text: str) -> None: """ """ self._setCurrentItem(self._findItem(text)) def _texts(self) -> Iterator[str]: """An iterator for the texts presented by this :py:class:`ItemAdapter`. """ for item in self._items(): yield self._textForItem(item) def _removeText(self, text: str) -> None: """Remove the item with the given text. This may be overwritten by subclasses when a more efficient implementation is possible. """ self._removeItem(self._findItem(text)) def _removeItemAt(self, index: int) -> None: """Remove the item at the given index. Raises ------ IndexError: The index provided is invalid. """ self._removeItem(self._getItemAt(index)) def _removeAllItems(self) -> None: """Remove all items in this :py:class:`ItemAdapter`. """ try: self._removeItemAt(0) except IndexError: pass # no item left to remove
def _formatAllItems(self) -> None: """ """ for item in self._items(): self._formatItem(item) def _updateAllItems(self) -> None: """Update the display of the list elements. This may be implemented by subclasses that would like to adapt the style of display depending on the state of the element. This method will be called when the list has been updated (e.g. by directly adding or removing elements, or by filling the list from some iterable), but subclasses may also call this method proactively in repsonse to notifications. """ # # public interface # def setFromIterable(self, iterable: Iterable) -> None: """Set the items in this :py:class:`ItemAdapter` from an iterable. This will first remove the old items and then add the new items. """ self._removeAllItems() for item in iterable: self._addItem(item) def updateFromIterable(self, iterable: Iterable) -> None: """Update the items in this :py:class:`ItemAdapter` from an iterable. Items from the iterable, that are not yet contained in the list are added, while items originally contained in this :py:class:`ItemAdapter`, that are not iterated by the iterable, are removed. """ # 1. Create a set containing the texts for items already contained # in this list (this is used for bookkeeping). bookkeeping = set(self._texts()) # 2. Iterate over entries from the iterable and add entries # missing in this list. for item in iterable: text = self._textForItem(item) if text in bookkeeping: bookkeeping.remove(text) else: self._addItem(item) # 3. Remove items from this list that are no longer present for text in bookkeeping: self._removeText(text) def setItemToText(self, itemToText: Callable[[Any], str]) -> None: """Set the function to be used when converting items to their textual presentation. """ if itemToText is None: self.__dict__.pop('_itemToText', None) else: self._itemToText = itemToText self._formatAllItems() @protect def keyPressEvent(self, event: QKeyEvent) -> None: """Process key events. The :py:class:`ItemAdapter` supports the following keys: C: clear the currently selected entry Note: in a :py:class:`QComboBox` this event is only received if the combobox is closed (not while currently selecting an entry). """ key = event.key() LOG.debug("ItemAdapter[%s].keyPressEvent: key=%d", type(self).__name__, key) if key == Qt.Key_C: # clear self._setCurrentItem(None) elif key == Qt.Key_Y: # no itemToText function (inherit from super) self.setItemToText(None) elif key == Qt.Key_Z: # simple str() as itemToText function (debug) self.setItemToText(str) elif hasattr(super(), 'keyPressEvent'): super().keyPressEvent(event) else: event.ignore() def debug(self) -> None: """Ouput debug information for this :py:class:`ItemAdapter`. """ if hasattr(super(), 'debug'): super().debug() print(f"debug: ItemAdapter[{type(self).__name__}]: " f"with {self._countItems()} entries:") for index, item in enumerate(self._items()): print(f"debug:{'**' if item is self._currentItem() else ' '}" f"({index+1}) {self._textForItem(item)} " f"[{repr(item)}]") class QAdaptedComboBox(ItemAdapter, QComboBox): """A :py:class:`QComboBox` implementing the :py:class:`ItemAdapter` interface. """ # # methods to be implemented by subclasses # def _countItems(self) -> int: """Get the number of items in this :py:class:`QAdaptedComboBox`. """ return self.count() def _items(self) -> Iterator[Any]: """An iterator for the items in this :py:class:`QAdaptedComboBox`. """ for index in range(self.count()): yield self.itemData(index) def _texts(self) -> Iterator[str]: """An iterator for the texts presented by this :py:class:`QAdaptedComboBox`. """ for index in range(self.count()): yield self.itemText(index) def _addItem(self, item: Any) -> None: """Add an item to this :py:class:`QAdaptedComboBox`. It is assumed that the item is not yet contained in this :py:class:`QAdaptedComboBox`. """ self.addItem(self._textForItem(item), item) self._formatItem(item) def _removeItem(self, item: Any) -> None: """Remove an item from this :py:class:`QAdaptedComboBox`. It is assumed that the item is contained in this :py:class:`QAdaptedComboBox`, otherwise a :py:class:`ValueError` is raised. """ self._removeItemAt(self._indexOfItem(item)) def _removeItemAt(self, index: int) -> None: """Remove the item at the given index. """ self.removeItem(index) def _removeText(self, text: str) -> None: """Remove the item with the given text. This may be overwritten by subclasses when a more efficient implementation is possible. """ self._removeItemAt(self._indexOfText(text)) def _formatItemAt(self, index: int) -> None: """Format the item at the given index to reflect the state of the underlying item. This method may be extended by subclasses. """ self.setItemText(index, self._textForItem(self.itemData(index))) def _formatItem(self, item: Any) -> None: """Update the format of the item's presentation in this :py:class:`QAdaptedComboBox` to reflect its state. """ self._formatItemAt(self._indexOfItem(item)) def _formatAllItems(self) -> None: """Format all items in this :py:class:`QAdaptedComboBox`. """ for index in range(self.count()): self._formatItemAt(index) def _currentItem(self) -> Any: """Get the currently selected item. This may be `None` if no itm is selected. """ return self.currentData() def _setCurrentItem(self, item: Any) -> None: """Select the given entry in this :py:class:`QAdaptedComboBox`. Arguments --------- item: Any The item to become the current item. If the item is not contained in this :py:class:`QAdaptedComboBox` (e.g. if `item` is `None`), the current will be set to `None`. """ try: self.setCurrentIndex(self._indexOfItem(item)) except LookupError: # For an empty QComboBox or a QComboBox in which no # current entry is set, the index is -1 (which is also # returned by QComboBox.findText if the entry is not found). self.setCurrentIndex(-1) class QAdaptedListWidget(ItemAdapter, QListWidget): """A :py:class:`QListWidget` implementing the :py:class:`ItemAdapter` interface. """ def __init__(self, **kwargs) -> None: super().__init__(**kwargs) self._formater = None def setListWidgetItemFormater(self, formater: Callable[[QListWidgetItem], None]) -> None: """Set a formater for the list items. """ self._formater = formater self._formatAllItems() def updateFormat(self) -> None: """Update the format of all items in this :py:class:`QAdaptedListWidget`. """ self._formatAllItems() # # methods to be implemented by subclasses # def _countItems(self) -> int: """Get the number of items in this :py:class:`QAdaptedListWidget`. """ return self.count() def _qitem(self, item: Any) -> QListWidgetItem: """Get the :py:class:`QListWidgetItem` that holds the given item. """ return next((qitem for qitem in self._qitems() if qitem.data(Qt.UserRole) is item), None) def _qitems(self) -> Iterator[QListWidgetItem]: """An :py:class:`Iterator` for the :py:class:`QListWidgetItem` in this :py:class:`QAdaptedListWidget`. """ for index in range(self.count()): yield self.item(index) def _formatQItem(self, qitem: QListWidgetItem) -> None: """Format the given :py:class:`QListWidgetItem` to reflect the state of the underlying item. This method may be extended by subclasses. """ qitem.setText(self._textForItem(qitem.data(Qt.UserRole))) if self._formater is not None: self._formater(qitem) def _items(self) -> Iterator[Any]: """An iterator for the items in this :py:class:`QAdaptedComboBox`. """ for qitem in self._qitems(): yield qitem.data(Qt.UserRole) def _texts(self) -> Iterator[str]: """An iterator for the texts presented by this :py:class:`QAdaptedListWidget`. """ for qitem in self._qitems(): yield qitem.text() def _addItem(self, item: Any) -> None: """Add an item to this :py:class:`QAdaptedComboBox`. It is assumed that the item is not yet contained in this :py:class:`QAdaptedListWidget`. """ qitem = QListWidgetItem(self._textForItem(item)) qitem.setData(Qt.UserRole, item) self.addItem(qitem) self._formatQItem(qitem) def _formatItem(self, item: Any) -> None: """Update the format of the item's presentation in this :py:class:`QAdaptedListWidget` to reflect its state. """ self._formatQItem(self._qitem(item)) def _formatAllItems(self) -> None: """Format all items in this :py:class:`QAdaptedListWidget`. """ for qitem in self._qitems(): self._formatQItem(qitem) def _removeItem(self, item: Any) -> None: """Remove an item from this :py:class:`QAdaptedListWidget`. It is assumed that the item is contained in this :py:class:`QAdaptedComboBox`, otherwise a :py:class:`ValueError` is raised. """ qitem = self.takeItem(self._indexOfItem(item)) del qitem def _currentItem(self) -> Any: """Get the currently selected item. This may be `None` if no itm is selected. """ qitem = self.currentItem() return None if qitem is None else qitem.data(Qt.UserRole) def _setCurrentItem(self, item: Any) -> None: """Select the given entry in this :py:class:`QAdaptedListWidget`. Arguments --------- item: Any The item to become the current item. If the item is not contained in this :py:class:`QAdaptedListWidget` (e.g. if `item` is `None`), the current will be set to `None`. """ try: self.setCurrentRow(self._indexOfItem(item)) except LookupError: self.setCurrentRow(-1)
callback_declaration_bindings.rs
use super::{ ast::AttachFunction, dependency_resolver::DependenciesResolver, types::StructFieldType, utils::{filter_dependencies, insert_function_pointer_type_callback_name_map, try_to_extract_function_type}, }; use crate::ast::{FieldType, FunctionDeclaration, FunctionType, SourceFile, StructDeclaration, TypeDeclaration}; use tap::TapOps; pub(super) fn insert_callback_declaration_bindings( source_file: &SourceFile, dependency_resolver: &mut DependenciesResolver, ) { source_file.type_declarations().iter().for_each(|type_declaration| { if let TypeDeclaration::Struct(s) = type_declaration { for_struct_node(s, dependency_resolver); } }); source_file .function_declarations() .iter() .for_each(|function_declaration| { for_function_node(function_declaration, dependency_resolver); }); } fn for_struct_node(struct_declaration: &StructDeclaration, dependency_resolver: &mut DependenciesResolver) { struct_declaration .fields() .iter() .filter_map(|field| { if let FieldType::NamedType(named_type) = field.field_type() { try_to_extract_function_type(&named_type).map(|function_type| (field.name(), function_type)) } else { None } }) .for_each(|(s, ft)| { let callback_name = format!( "{}_{}_callback", struct_declaration .struct_name() .as_ref() .or_else(|| struct_declaration.typedef_name().as_ref()) .unwrap(), s.as_ref().unwrap(), ); insert_callback_node(&callback_name, ft, dependency_resolver); insert_function_pointer_type_callback_name_map(ft, callback_name); }); } fn for_function_node(function_declaration: &FunctionDeclaration, dependency_resolver: &mut DependenciesResolver) { function_declaration .parameters() .iter() .filter_map(|parameter| { try_to_extract_function_type(parameter.parameter_type()) .map(|function_type| (function_declaration.name(), function_type)) }) .for_each(|(s, ft)| { let callback_name = format!("{}_{}_callback", function_declaration.name(), s); insert_callback_node(&callback_name, ft, dependency_resolver); insert_function_pointer_type_callback_name_map(ft, callback_name); }); } fn insert_callback_node( name: impl Into<String>, function_type: &FunctionType, dependency_resolver: &mut DependenciesResolver, ) { let name = name.into(); let new_callback_node = Box::new(AttachFunction::new( name.to_owned(), StructFieldType::from(function_type.return_type().to_owned(), false), true, )) .tap(|new_callback_node| { *new_callback_node.parameters_mut() = function_type .parameter_types() .iter() .map(|parameter_type| StructFieldType::from(parameter_type.to_owned(), false)) .collect(); }); let mut dependencies = new_callback_node .parameters() .iter() .filter_map(filter_dependencies) .collect::<Vec<_>>(); if let Some(return_type_dependency) = filter_dependencies(new_callback_node.return_value()) {
}
dependencies.push(return_type_dependency); } dependency_resolver.insert(name, new_callback_node, dependencies);
ImageLoader.min.js
/** * Hilo 1.3.0 for cmd * Copyright 2016 alibaba.com * Licensed under the MIT License */
define(function(r,o,n){var a=r("hilo/core/Class"),e=a.create({load:function(r){var o=this,n=new Image;r.crossOrigin&&(n.crossOrigin=r.crossOrigin),n.onload=function(){o.onLoad(n)},n.onerror=n.onabort=o.onError.bind(n),n.src=r.src+(r.noCache?(r.src.indexOf("?")==-1?"?":"&")+"t="+ +new Date:"")},onLoad:function(r){return r.onload=r.onerror=r.onabort=null,r},onError:function(r){var o=r.target;return o.onload=o.onerror=o.onabort=null,r}});return e});
leo_cloud.py
#@+leo-ver=5-thin #@+node:ekr.20170925083314.1: * @file ../plugins/leo_cloud.py #@+<< docstring >> #@+node:ekr.20210518113636.1: ** << docstring >> """ leo_cloud.py - synchronize Leo subtrees with remote central server Terry N. Brown, [email protected], Fri Sep 22 10:34:10 2017 This plugin allows subtrees within a .leo file to be stored in the cloud. It should be possible to support various cloud platforms, currently git and systems like DropBox are supported (i.e. you can use GitLab or GitHub or your own remote git server). A leo_cloud subtree has a top node with a headline that starts with '@leo_cloud'. The rest of the headline is ignored. The body of this top node is used to describe the cloud service, e.g.: type: Git remote: [email protected]:tnbrown/leo_cloud_storage.git local: ~/.leo/leo_cloud/gitlab_leo_cloud_storage ID: shortcuts read_on_load: ask write_on_save: ask The first three lines can be repeated with different IDs to store different subtrees at the same remote cloud location. read_on_load: / write_on_save: can be yes, no, ask, or background (read_on_load only). If it's not one of those three, there's a warning dialog. `background` performs a check against the cloud in the background, and then behaves like `ask` if a difference is detected. There's also a file system backend, which would look like this: type: FileSystem root: ~/DropBox/leo_cloud ID: my_notes read_on_load: ask write_on_save: ask If you set up the FileSystem backend it into a folder that is sync'ed externally, as shown above, it can serve as a cloud adapter for services like DropBox, Google Drive, OneDrive, etc. etc. In addition to the Git and FileSystem cloud types it should be possible to add many others - AWS, WebDAV, sFTP, whatever. FYI: https://gitlab.com/ gives you free private repos. The plugin stores headline, body, and uA (unknown attributes). The caveat is that it must be JSON serializable, this is to avoid pickle flavor issues. I don't think this will cause problems except for legacy datetime objects from the todo.py plugin and set()s in the tags plugin. I think both can be fixed easily - a custom JSON writer can write datetime as iso string time and sets as lists, and the tags plugin can coerce lists to sets. I think the todo.py plugin already reads iso string time values. My intended use was a common synchronized todo list across machines, which this achieves. An unintended bonus is that you can use it to sync. your settings across machines easily too. Like this: @settings @keys @leo_cloud @shortcuts "just works", so now your shortcuts etc. can be stored on a central server. """ #@-<< docstring >> #@+<< imports >> #@+node:ekr.20210518113710.1: ** << imports >> import json import os import re import shlex import subprocess import tempfile import threading from copy import deepcopy from datetime import date, datetime from hashlib import sha1 from leo.core import leoGlobals as g from leo.core.leoNodes import vnode from leo.core.leoQt import QtCore # see QTimer in LeoCloud.__init__ # # Fail fast, right after all imports. g.assertUi('qt') # May raise g.UiTypeException, caught by the plugins manager. #@-<< imports >> # for 'key: value' lines in body text KWARG_RE = re.compile(r"^([A-Za-z][A-Za-z0-9_]*): (.*)") #@+others #@+node:ekr.20201012111338.3: ** init (leo_cloud.py) def init (): g.registerHandler(('new','open2'), onCreate) g.registerHandler(('save1'), onSave) g.plugin_signon(__name__) return True #@+node:ekr.20201012111338.4: ** onCreate (leo_cloud.py) def onCreate (tag, keys): c = keys.get('c') if not c: return c._leo_cloud = LeoCloud(c) #@+node:ekr.20201012111338.5: ** onSave (leo_cloud.py) def onSave(tag, keys): c = keys.get('c') if not c: return None if getattr(c, '_leo_cloud'): c._leo_cloud.save_clouds() return None # explicitly not stopping save1 hook #@+node:ekr.20201012111338.6: ** lc_read_current (leo_cloud.py) @g.command("lc-read-current") def lc_read_current(event): """write current Leo Cloud subtree to cloud""" c = event.get('c') if not c or not hasattr(c, '_leo_cloud'): return c._leo_cloud.read_current() #@+node:ekr.20201012111338.7: ** lc_write_current (leo_cloud.py) @g.command("lc-write-current") def lc_write_current(event): """write current Leo Cloud subtree to cloud""" c = event.get('c') if not c or not hasattr(c, '_leo_cloud'): return c._leo_cloud.write_current() #@+node:ekr.20201012111338.8: ** class LeoCloudIOBase class LeoCloudIOBase: """Leo Cloud IO layer Base Class LeoCloudIO layer sits between LeoCloud plugin and backends, which might be leo_cloud_server.py or Google Drive etc. etc. """ #@+others #@+node:ekr.20201012111338.9: *3* LeoCloudIOBase.__init__ def __init__(self, c, p, kwargs): """ Args: c (context): Leo outline p (position): @leo_cloud position kwargs (dict): key word args from p.b """ self.v = p.v self.c = c self.lc_id = kwargs['ID'] #@+node:ekr.20201012111338.10: *3* LeoCloudIOBase.get_subtree def get_subtree(self, lc_id): """get_subtree - get a Leo subtree from the cloud Args: lc_id (str(?)): resource to get :returns: vnode build from lc_id """ # pylint: disable=no-member # self.get_data return self.c._leo_cloud.from_dict(self.get_data(lc_id)) #@+node:ekr.20201012111338.11: *3* LeoCloudIOBase.put_subtree def put_subtree(self, lc_id, v): """put - put a subtree into the Leo Cloud Args: lc_id (str(?)): place to put it v (vnode): subtree to put """ # pylint: disable=no-member # self.put_data self.put_data(lc_id, LeoCloud.to_dict(v)) #@-others #@+node:ekr.20201012111338.12: ** class LeoCloudIOFileSystem(LeoCloudIOBase) class LeoCloudIOFileSystem(LeoCloudIOBase): """Leo Cloud IO layer that just loads / saves local files. i.e it's just for development / testing """ #@+others #@+node:ekr.20201012111338.13: *3* LeoCloudIOFileSystem(LeoCloudIOBase).__init__ def __init__(self, c, p, kwargs): """ Args: basepath (str): root folder for data """ LeoCloudIOBase.__init__(self, c, p, kwargs) self.basepath = os.path.expanduser(kwargs['root']) if not os.path.exists(self.basepath): os.makedirs((self.basepath)) #@+node:ekr.20201012111338.14: *3* LeoCloudIOFileSystem(LeoCloudIOBase).get_data def get_data(self, lc_id): """get_data - get a Leo Cloud resource Args: lc_id (str(?)): resource to get Returns: object loaded from JSON """ filepath = os.path.join(self.basepath, lc_id+'.json') with open(filepath) as data: return json.load(data) #@+node:ekr.20201012111338.15: *3* LeoCloudIOFileSystem(LeoCloudIOBase).put_data def put_data(self, lc_id, data): """put - store data in the Leo Cloud Args: lc_id (str(?)): place to put it data (obj): data to store """ filepath = os.path.join(self.basepath, lc_id+'.json') with open(filepath, 'w') as out: return out.write(LeoCloud.to_json(data)) #@-others #@+node:ekr.20201012111338.16: ** class LeoCloudIOGit(LeoCloudIOBase) class LeoCloudIOGit(LeoCloudIOBase): """Leo Cloud IO layer that just loads / saves local files. i.e it's just for development / testing """ #@+others #@+node:ekr.20201012111338.17: *3* LeoCloudIOGit(LeoCloudIOBase).__init__ def __init__(self, c, p, kwargs): """ Args: basepath (str): root folder for data """ # if p.v._leo_cloud_io was used, we'd probably also need to pull # in get_data(), so don't bother with p.v._leo_cloud_io # p.v._leo_cloud_io = self LeoCloudIOBase.__init__(self, c, p, kwargs) self.remote = kwargs['remote'] self.local = os.path.expanduser(kwargs['local']) if not os.path.exists(self.local): os.makedirs((self.local)) if not os.listdir(self.local): self._run_git('git clone "%s" "%s"'% (self.remote, self.local)) self._run_git('git -C "%s" pull' % self.local) #@+node:ekr.20201012111338.18: *3* LeoCloudIOGit(LeoCloudIOBase)._run_git def _run_git(self, text): """_run_git - run a git command Args: text (str): command to run """ subprocess.Popen(shlex.split(text)).wait() #@+node:ekr.20201012111338.19: *3* LeoCloudIOGit(LeoCloudIOBase).get_data def get_data(self, lc_id): """get_data - get a Leo Cloud resource Args: lc_id (str(?)): resource to get :returns: object loaded from JSON """ filepath = os.path.join(self.local, lc_id+'.json') with open(filepath) as data: return json.load(data) #@+node:ekr.20201012111338.20: *3* LeoCloudIOGit(LeoCloudIOBase).put_data def put_data(self, lc_id, data): """put - store data in the Leo Cloud Args: lc_id (str(?)): place to put it data (obj): data to store """ filepath = os.path.join(self.local, lc_id+'.json') with open(filepath, 'w') as out: out.write(LeoCloud.to_json(data)) self._run_git('git -C "%s" add "%s"' % (self.local, lc_id+'.json')) self._run_git('git -C "%s" commit -mupdates' % self.local) self._run_git('git -C "%s" push' % self.local) #@-others #@+node:ekr.20201012111338.21: ** class LeoCloud class LeoCloud: #@+others #@+node:ekr.20201012111338.22: *3* LeoCloud.__init__ def __init__(self, c): """ Args: c (context): Leo context """ self.c = c self.bg_finished = False # used for background thread self.bg_results = [] # results from background thread # we're here via open2 hook, but too soon to load from cloud, # so defer QtCore.QTimer.singleShot(0, self.load_clouds) #@+node:ekr.20201012111338.23: *3* LeoCloud.bg_check def bg_check(self, to_check): """ bg_check - run from load_clouds() to look for changes in cloud in background. WARNING: no gui impacting calls allowed here (g.es() etc.) Args: to_check (list): list of (vnode, kwargs, hash) tuples to check This (background) thread can't handle any changes found, because it would have to interact with the user and GUI code can only be called from the main thread. We don't want to use QThread, to allow this to work without Qt. So we just collect results and set self.bg_finished = True, which the main thread watches using g.IdleTime() """ for v, kwargs, local_hash in to_check: c = v.context p = c.vnode2position(v) lc_io = getattr(v, '_leo_cloud_io', None) or self.io_from_node(p) subtree = lc_io.get_subtree(lc_io.lc_id) remote_hash = self.recursive_hash(subtree, [], include_current=False) self.bg_results.append((v, local_hash == remote_hash)) if False and local_hash != remote_hash: # disabled dev. / debug code # record difference for inspection tmpdir = tempfile.mkdtemp() with open(os.path.join(tmpdir, 'leo_cloug_local.json'), 'w') as out: out.write(self.to_json(self.to_dict(v))) with open(os.path.join(tmpdir, 'leo_cloug_remote.json'), 'w') as out: out.write(self.to_json(self.to_dict(subtree))) self.bg_finished = True #@+node:ekr.20201012111338.24: *3* LeoCloud.bg_post_process def bg_post_process(self, timer): """ bg_post_process - check to see if background checking is finished, handle any changed cloud trees found Args: timer (leo-idle-timer): Leo idle timer """ if not self.bg_finished: return timer.stop() from_background = set() for v, unchanged in self.bg_results: kwargs = self.kw_from_node(v) if unchanged: g.es("Cloud tree '%s' unchanged" % kwargs['ID']) else: from_background.add((kwargs['remote'], kwargs['ID'])) g.es("Cloud tree '%s' DOES NOT MATCH" % kwargs['ID']) if from_background: self.load_clouds(from_background=from_background) #@+node:ekr.20201012111338.25: *3* LeoCloud.find_at_leo_cloud def find_at_leo_cloud(self, p): """find_at_leo_cloud - find @leo_cloud node Args: p (position): start from here, work up Returns: position or None """ while not p.h.startswith("@leo_cloud") and p.parent(): p = p.parent() if not p.h.startswith("@leo_cloud"): g.es("No @leo_cloud node found", color='red') return None return p #@+node:ekr.20201012111338.26: *3* LeoCloud._find_clouds_recursive def _find_clouds_recursive(self, v, found): """see find_clouds()""" if v.h.startswith('@ignore'): return if v.h.startswith('@leo_cloud'): found.add(v) return for child in v.children: self._find_clouds_recursive(child, found) #@+node:ekr.20201012111338.27: *3* LeoCloud.find_clouds def find_clouds(self): """find_clouds - return a list of @leo_cloud nodes respects @ignore in headlines, doesn't recurse into @leo_cloud nodes """ found = set() self._find_clouds_recursive(self.c.hiddenRootNode, found) valid = [] for lc in found: if 'ID' in self.kw_from_node(lc): valid.append(lc) else: g.es('%s - no ID: line' % lc.h, color='red') return valid #@+node:ekr.20201012111338.28: *3* LeoCloud._from_dict_recursive def _from_dict_recursive(self, top, d): """see from_dict()""" top.h = d['h'] top.b = d['b'] top.u = d['u'] top.children[:] = [] for child in d['children']: top.children.append(self._from_dict_recursive(vnode(self.c), child)) return top #@+node:ekr.20201012111338.29: *3* LeoCloud.from_dict def from_dict(self, d): """from_dict - make a Leo subtree from a dict Args: d (dict): input dict Returns: vnode """ return self._from_dict_recursive(vnode(self.c), d) #@+node:ekr.20201012111338.30: *3* LeoCloud.io_from_node def io_from_node(self, p): """io_from_node - create LeoCloudIO instance from body text Args: p (position): node containing text Returns: LeoCloudIO instance """ kwargs = self.kw_from_node(p) # pylint: disable=eval-used lc_io_class = eval("LeoCloudIO%s" % kwargs['type']) return lc_io_class(self.c, p, kwargs) #@+node:ekr.20201012111338.31: *3* LeoCloud.kw_from_node def
(self, p): """kw_from_node - read keywords from body text Args: p (position): node containing text Returns: dict """ kwargs = {'remote': None} # some methods assume 'remote' exists, but it's absent in LeoCloudIOFileSystem for line in p.b.split('\n'): kwarg = KWARG_RE.match(line) if kwarg: kwargs[kwarg.group(1)] = kwarg.group(2) return kwargs #@+node:ekr.20201012111338.32: *3* LeoCloud.load_clouds def load_clouds(self, from_background=None): """ load_clouds - Handle loading from cloud on startup and after background checking for changes. Args: from_background (set): set of (remote, ID) str tuples if we're called after a background check process finds changes. """ if from_background is None: from_background = set() skipped = [] background = [] # things to check in background for lc_v in self.find_clouds(): kwargs = self.kw_from_node(lc_v) if from_background and \ (kwargs['remote'], kwargs['ID']) not in from_background: # only process nodes from the background checking continue read = False read_on_load = kwargs.get('read_on_load', '').lower() if from_background: # was 'background', changes found, so now treat as 'ask' read_on_load = 'ask' if read_on_load == 'yes': read = True elif read_on_load == 'ask': try: last_read = datetime.strptime( lc_v.u['_leo_cloud']['last_read'], "%Y-%m-%dT%H:%M:%S.%f") except KeyError: last_read = None message = "Read cloud data '%s', overwriting local nodes?" % kwargs['ID'] if last_read: delta = datetime.now() - last_read message = "%s\n%s, %sh:%sm:%ss ago" % ( message, last_read.strftime("%a %b %d %H:%M"), 24*delta.days+int(delta.seconds / 3600), int(delta.seconds / 60) % 60, delta.seconds % 60) read = g.app.gui.runAskYesNoCancelDialog(self.c, "Read cloud data?", message=message) read = str(read).lower() == 'yes' if read: self.read_current(p=self.c.vnode2position(lc_v)) elif read_on_load == 'background': # second time round, with from_background data, this will # have been changed to 'ask' (above), so no infinite loop background.append((lc_v, kwargs, self.recursive_hash(lc_v, [], include_current=False))) elif read_on_load == 'no': g.es("NOTE: not reading '%s' from cloud" % kwargs['ID']) elif read_on_load != 'ask': skipped.append(kwargs['ID']) if skipped: g.app.gui.runAskOkDialog(self.c, "Unloaded cloud data", message="There is unloaded (possibly stale) cloud data, use\nread_on_load: yes|no|ask\n" "in @leo_cloud nodes to avoid this message.\nUnloaded data:\n%s" % ', '.join(skipped)) if background: # send to background thread for checking names = ', '.join([i[1]['ID'] for i in background]) g.es("Checking cloud trees in background:\n%s" % names) thread = threading.Thread(target=self.bg_check, args=(background,)) thread.start() # start watching for results g.IdleTime(self.bg_post_process).start() #@+node:ekr.20201012111338.33: *3* LeoCloud.read_current def read_current(self, p=None): """read_current - read current tree from cloud """ if p is None: p = self.find_at_leo_cloud(self.c.p) if not p: return old_p = self.c.p.copy() g.es("Reading from cloud...") # some io's as slow to init. - reassure user # io's can cache themselves on the vnode, but should think hard # about whether they want to lc_io = getattr(p.v, '_leo_cloud_io', None) or self.io_from_node(p) v = lc_io.get_subtree(lc_io.lc_id) p.deleteAllChildren() for child_n, child in enumerate(v.children): child._addLink(child_n, p.v) if hasattr(self.c, 'cleo'): self.c.cleo.loadAllIcons() self.c.redraw(p=old_p if self.c.positionExists(old_p) else p) g.es("Read %s" % lc_io.lc_id) # set c changed but don't dirty tree, which would cause # write to cloud prompt on save # but... (a) top node is ending up dirty anyway, and (b) this is ok # because we want the user to understand why the outline's changed, # so just ignore top node dirtiness in self.subtree_changed() self.c.setChanged() p.v.u.setdefault('_leo_cloud', {})['last_read'] = datetime.now().isoformat() #@+node:ekr.20201012111338.34: *3* LeoCloud.recursive_hash @staticmethod def recursive_hash(nd, tree, include_current=True): """ recursive_hash - recursively hash a tree Args: nd (vnode): node to hash tree (list): recursive list of hashes include_current (bool): include h/b/u of current node in hash? Returns: str: sha1 hash of tree Calling with include_current=False ignores the h/b/u of the top node To hash a dict, need a string representation that sorts keys, i.e. json.dumps(s, sort_keys=True) Trailing newlines are ignored in body text. """ childs = [] hashes = [LeoCloud.recursive_hash(child, childs) for child in nd.children] if include_current: hashes.extend([nd.h + nd.b.rstrip('\n') + json.dumps(LeoCloud._ua_clean(nd.u), sort_keys=True)]) whole_hash = sha1(''.join(hashes).encode('utf-8')).hexdigest() tree.append([whole_hash, childs]) return whole_hash #@+node:ekr.20201012111338.35: *3* LeoCloud.save_clouds def save_clouds(self): """check for clouds to save when outline is saved""" skipped = [] no = [] unchanged = [] for lc_v in self.find_clouds(): kwargs = self.kw_from_node(lc_v) write = False write_on_save = kwargs.get('write_on_save', '').lower() if not self.subtree_changed(lc_v): write_on_save = 'unchanged' if write_on_save == 'yes': write = True elif write_on_save == 'ask': write = g.app.gui.runAskYesNoCancelDialog(self.c, "Write cloud data?", message="Write cloud data '%s', overwriting remote version?" % kwargs['ID']) write = str(write).lower() == 'yes' if write: self.write_current(p=self.c.vnode2position(lc_v)) elif write_on_save == 'no': no.append(kwargs['ID']) elif write_on_save == 'unchanged': unchanged.append(kwargs['ID']) elif write_on_save != 'ask': skipped.append(kwargs['ID']) if skipped: g.app.gui.runAskOkDialog(self.c, "Unsaved cloud data", message="There is unsaved cloud data, use\nwrite_on_save: yes|no|ask\n" "in @leo_cloud nodes to avoid this message.\nUnsaved data:\n%s" % ', '.join(skipped)) if unchanged: g.es("Unchanged cloud data: %s" % ', '.join(unchanged)) if no: g.es("Cloud data never saved: %s" % ', '.join(no)) #@+node:ekr.20201012111338.36: *3* LeoCloud.subtree_changed def subtree_changed(self, p): """subtree_changed - check if subtree is changed Args: p (position): top of subtree Returns: bool """ if isinstance(p, vnode): p = self.c.vnode2position(p) for nd in p.subtree_iter(): if nd.isDirty(): break else: return False return True #@+node:ekr.20201012111338.37: *3* LeoCloud._to_json_serial @staticmethod def _to_json_serial(obj): """JSON serializer for objects not serializable by default json code""" if isinstance(obj, (datetime, date)): return obj.isoformat() if isinstance(obj, set): return list(obj) raise TypeError ("Type %s not serializable" % type(obj)) #@+node:ekr.20201012111338.38: *3* LeoCloud.to_json @staticmethod def to_json(data): """to_json - convert dict to appropriate JSON Args: data (dict): data to convert Returns: str: json """ return json.dumps( data, sort_keys=True, # prevent unnecessary diffs indent=0, # make json readable on cloud web pages default=LeoCloud._to_json_serial ) #@+node:ekr.20201012111338.39: *3* LeoCloud._to_dict_recursive @staticmethod def _to_dict_recursive(v, d): """_to_dict_recursive - recursively make dictionary representation of v Args: v (vnode): subtree to convert d (dict): dict for results Returns: dict of subtree """ d['b'] = v.b d['h'] = v.h d['u'] = v.u d['children'] = [] for child in v.children: d['children'].append(LeoCloud._to_dict_recursive(child, dict())) return d #@+node:ekr.20201012111338.40: *3* LeoCloud.to_dict @staticmethod def to_dict(v): """to_dict - make dictionary representation of v Args: v (vnode): subtree to convert Returns: dict of subtree """ return LeoCloud._to_dict_recursive(v, dict()) #@+node:ekr.20201012111338.41: *3* LeoCloud._ua_clean @staticmethod def _ua_clean(d): """_ua_clean - strip todo icons from dict Args: d (dict): dict to clean Returns: cleaned dict recursive_hash() to compare trees stumbles on todo icons which are derived information from the todo attribute and include *local* paths to icon images """ d = deepcopy(d) if 'icons' in d: d['icons'] = [i for i in d['icons'] if not i.get('cleoIcon')] return d #@+node:ekr.20201012111338.42: *3* LeoCloud.write_current def write_current(self, p=None): """write_current - write current tree to cloud """ if p is None: p = self.find_at_leo_cloud(self.c.p) if not p: return g.es("Storing to cloud...") # some io's as slow to init. - reassure user lc_io = getattr(p.v, '_leo_cloud_io', None) or self.io_from_node(p) lc_io.put_subtree(lc_io.lc_id, p.v) g.es("Stored %s" % lc_io.lc_id) # writing counts as reading, last read time msg. confusing otherwise p.v.u.setdefault('_leo_cloud', {})['last_read'] = datetime.now().isoformat() #@-others #@-others #@@language python #@@tabwidth -4 #@-leo
kw_from_node
lex_attrs.py
from ...attrs import LIKE_NUM # Thirteen, fifteen etc. are written separate: on üç _num_words = [ "bir", "iki", "üç", "dört", "beş", "altı", "yedi", "sekiz", "dokuz", "on", "yirmi", "otuz", "kırk", "elli", "altmış", "yetmiş", "seksen", "doksan", "yüz", "bin", "milyon", "milyar", "trilyon", "katrilyon", "kentilyon", ] _ordinal_words = [ "birinci", "ikinci", "üçüncü", "dördüncü", "beşinci", "altıncı", "yedinci", "sekizinci", "dokuzuncu", "onuncu", "yirminci", "otuzuncu", "kırkıncı", "ellinci", "altmışıncı", "yetmişinci", "sekseninci", "doksanıncı", "yüzüncü", "bininci", "mliyonuncu", "milyarıncı", "trilyonuncu", "katrilyonuncu", "kentilyonuncu", ] _ordinal_endings = ("inci", "ıncı", "nci", "ncı", "uncu", "üncü") def like_num(text): if text.startswith(("+
"±", "~")): text = text[1:] text = text.replace(",", "").replace(".", "") if text.isdigit(): return True if text.count("/") == 1: num, denom = text.split("/") if num.isdigit() and denom.isdigit(): return True text_lower = text.lower() # Check cardinal number if text_lower in _num_words: return True # Check ordinal number if text_lower in _ordinal_words: return True if text_lower.endswith(_ordinal_endings): if text_lower[:-3].isdigit() or text_lower[:-4].isdigit(): return True return False LEX_ATTRS = {LIKE_NUM: like_num}
", "-",
mod.rs
pub mod cfg; mod constant_folding; mod expression; mod external_functions; mod reaching_definitions; mod statements; mod storage; mod vector_to_slice; use self::cfg::{ControlFlowGraph, Instr, Vartable}; use self::expression::expression; use crate::sema::ast::Namespace; /// The contracts are fully resolved but they do not have any a CFG which is needed for the llvm code emitter /// not all contracts need a cfg; only those for which we need the pub fn codegen(contract_no: usize, ns: &mut Namespace)
/// This function will set all contract storage initializers and should be called from the constructor fn storage_initializer(contract_no: usize, ns: &mut Namespace) -> ControlFlowGraph { let mut cfg = ControlFlowGraph::new(String::from("storage_initializer"), None); let mut vartab = Vartable::new(ns.next_id); for layout in &ns.contracts[contract_no].layout { let var = &ns.contracts[layout.contract_no].variables[layout.var_no]; if let Some(init) = &var.initializer { let storage = ns.contracts[contract_no].get_storage_slot(layout.contract_no, layout.var_no, ns); let value = expression(&init, &mut cfg, contract_no, ns, &mut vartab); cfg.add( &mut vartab, Instr::SetStorage { value, ty: var.ty.clone(), storage, }, ); } } cfg.add(&mut vartab, Instr::Return { value: Vec::new() }); cfg.vars = vartab.drain(); reaching_definitions::find(&mut cfg); constant_folding::constant_folding(&mut cfg, ns); vector_to_slice::vector_to_slice(&mut cfg, ns); cfg }
{ if ns.contracts[contract_no].is_concrete() { let mut cfg_no = 0; let mut all_cfg = Vec::new(); external_functions::add_external_functions(contract_no, ns); // all the functions should have a cfg_no assigned, so we can generate call instructions to the correct function for (_, func_cfg) in ns.contracts[contract_no].all_functions.iter_mut() { *func_cfg = cfg_no; cfg_no += 1; } all_cfg.resize(cfg_no, ControlFlowGraph::placeholder()); // clone all_functions so we can pass a mutable reference to generate_cfg for (function_no, cfg_no) in ns.contracts[contract_no] .all_functions .iter() .map(|(function_no, cfg_no)| (*function_no, *cfg_no)) .collect::<Vec<(usize, usize)>>() .into_iter() { cfg::generate_cfg(contract_no, Some(function_no), cfg_no, &mut all_cfg, ns) } // Generate cfg for storage initializers let cfg = storage_initializer(contract_no, ns); let pos = all_cfg.len(); all_cfg.push(cfg); ns.contracts[contract_no].initializer = Some(pos); if !ns.contracts[contract_no].have_constructor(ns) { // generate the default constructor let func = ns.default_constructor(contract_no); let cfg_no = all_cfg.len(); all_cfg.push(ControlFlowGraph::placeholder()); cfg::generate_cfg(contract_no, None, cfg_no, &mut all_cfg, ns); ns.contracts[contract_no].default_constructor = Some((func, cfg_no)); } ns.contracts[contract_no].cfg = all_cfg; } }
npy_append_array.py
import numpy as np import os.path from struct import pack, unpack from io import BytesIO def header_tuple_dict(tuple_in): return { 'shape': tuple_in[0], 'fortran_order': tuple_in[1], 'descr': np.lib.format.dtype_to_descr(tuple_in[2]) } def has_fortran_order(arr): return not arr.flags.c_contiguous and arr.flags.f_contiguous def peek(fp, length): pos = fp.tell() tmp = fp.read(length) fp.seek(pos) return tmp class NpyAppendArray: def __init__(self, filename):
def __init(self): self.fp = open(self.filename, mode="rb+") fp = self.fp magic = np.lib.format.read_magic(fp) self.is_version_1 = magic[0] == 1 and magic[1] == 0 self.is_version_2 = magic[0] == 2 and magic[1] == 0 if not self.is_version_1 and not self.is_version_2: raise NotImplementedError( "version (%d, %d) not implemented"%magic ) self.header_length, = unpack("<H", peek(fp, 2)) if self.is_version_1 \ else unpack("<I", peek(fp, 4)) self.header = np.lib.format.read_array_header_1_0(fp) if \ self.is_version_1 else np.lib.format.read_array_header_2_0(fp) if self.header[1] != False: raise NotImplementedError("fortran_order not implemented") fp.seek(0) self.header_bytes = fp.read(self.header_length + ( 10 if self.is_version_1 else 12 )) fp.seek(0, 2) self.__is_init = True def __create_header_bytes(self, header_map, spare_space=False): io = BytesIO() np.lib.format.write_array_header_2_0(io, header_map) if spare_space: io.getbuffer()[8:12] = pack("<I", int( io.getbuffer().nbytes-12+64 )) io.getbuffer()[-1] = 32 io.write(b" "*64) io.getbuffer()[-1] = 10 return io.getbuffer() def append(self, arr): if not arr.flags.c_contiguous: raise NotImplementedError("ndarray needs to be c_contiguous") if has_fortran_order(arr): raise NotImplementedError("fortran_order not implemented") arr_descr = np.lib.format.dtype_to_descr(arr.dtype) if not self.__is_init: with open(self.filename, "wb") as fp0: fp0.write(self.__create_header_bytes({ 'descr': arr_descr, 'fortran_order': False, 'shape': arr.shape }, True)) arr.tofile(fp0) # np.save(self.filename, arr) self.__init() return descr = self.header[2] if arr_descr != descr: raise TypeError("incompatible ndarrays types %s and %s"%( arr_descr, descr )) shape = self.header[0] if len(arr.shape) != len(shape): raise TypeError("incompatible ndarrays shape lengths %s and %s"%( len(arr.shape), len(shape) )) for i, e in enumerate(shape): if i > 0 and e != arr.shape[i]: raise TypeError("ndarray shapes can only differ on zero axis") new_shape = list(shape) new_shape[0] += arr.shape[0] new_shape = tuple(new_shape) self.header = (new_shape, self.header[1], self.header[2]) self.fp.seek(0) new_header_map = header_tuple_dict(self.header) new_header_bytes = self.__create_header_bytes(new_header_map, True) header_length = len(self.header_bytes) if header_length != len(new_header_bytes): new_header_bytes = self.__create_header_bytes(new_header_map) if header_length != len(new_header_bytes): raise TypeError("header length mismatch, old: %d, new: %d"%( header_length, len(new_header_bytes) )) self.header_bytes = new_header_bytes self.fp.write(new_header_bytes) self.fp.seek(0, 2) arr.tofile(self.fp) def __del__(self): if self.fp is not None: self.fp.close()
self.filename = filename self.fp = None self.__is_init = False if os.path.isfile(filename): self.__init()
preprocess.py
#!/usr/bin/env python # # (c) 2016 -- onwards Georgios Gousios <[email protected]>, Rik Nijessen <[email protected]> # from __future__ import print_function import pickle import random import urllib import numpy as np import argparse from config import * from code_tokenizer import CodeTokenizer from my_tokenizer import MyTokenizer from keras.preprocessing.sequence import pad_sequences @timeit def load_pr_csv(file): """ Load a PR dataset, including all engineered features :return: A pandas dataframe with all data loaded """ print("Loading pull requests file ", file) pullreqs = pd.read_csv(file) pullreqs.set_index(['project_name', 'github_id']) return pullreqs def
(): """ Make sure that the PR diffs have been downloaded in the appropriate dir """ if not os.path.exists(DIFFS_DIR): print("Downloading pull request diffs") import tarfile urllib.urlretrieve(DIFFS_DATA_URL, DIFFS_FILE) tar = tarfile.open(DIFFS_FILE, "r:gz") tar.extractall() tar.close() def read_title_and_comments(file): str = open(file).read() splitted = str.split("\n") title = splitted[0] # remove title and empty space comment = str[2:] return title, comment @timeit def create_code_tokenizer(code, vocabulary_size): tokenizer = CodeTokenizer(nb_words=vocabulary_size) tokenizer.fit_on_texts(code) word_index = tokenizer.word_index print('Found %s unique tokens.' % len(word_index)) return tokenizer def create_text_tokenizer(texts, vocabulary_size): tokenizer = MyTokenizer(nb_words=vocabulary_size) tokenizer.fit_on_texts(texts) word_index = tokenizer.word_index print('Found %s unique tokens.' % len(word_index)) return tokenizer @timeit def tokenize(tokenizer, texts, maxlen): print("Tokenizing") sequences = tokenizer.texts_to_sequences(texts) return pad_sequences(sequences, maxlen=maxlen) def load_data(pullreqs): diffs = [] titles = [] comments = [] labels = [] successful = failed = 0 for i, row in pullreqs.iterrows(): try: name = (row['project_name']).replace('/','@')+"@"+str(row['github_id'])+'.patch' diff_file = os.path.join(DIFFS_DIR, name) comment_file = os.path.join(TXTS_DIR, name.replace(".patch",".txt")) diff = open(diff_file).read() title, comment = read_title_and_comments(comment_file) diffs.append(diff) titles.append(title) comments.append(comment) labels.append(int(row['merged'] * 1)) successful += 1 except: failed += 1 pass print("%s diffs loaded, %s diffs failed" % (successful, failed), end='\r') print("") return diffs, comments, titles, labels @timeit def create_dataset(prefix="default", diff_vocabulary_size=20000, comment_vocabulary_size=20000, title_vocabulary_size=20000, max_diff_length=100, max_comment_length=100, max_title_length=100): """ Create a dataset for further processing :param prefix: Name for the dataset :param balance_ratio: The ratio between merged and unmerged PRs to include :param num_diffs: Total number of diffs to load. Any value below 1 means load all diffs. :param langs: Only include PRs for repos whose primary language is within this array :param diff_vocabulary_size: (Max) size of the diff vocabulary to use for tokenizing :param comment_vocabulary_size: (Max) size of the comment vocabulary to use for tokenizing :param title_vocabulary_size: (Max) size of the title vocabulary to use for tokenizing :param max_diff_length: Maximum length of the input diff sequences :param max_comment_length: Maximum length of the input comment sequences :param max_title_length: Maximum length of the input title sequences :return: A training and testing dataset, along with the config used to produce it """ config = locals() pullreqs_train = load_pr_csv(train_csv_file % prefix) pullreqs_test = load_pr_csv(test_csv_file % prefix) pullreqs_validation = load_pr_csv(validation_csv_file % prefix) ensure_diffs() tr_diffs, tr_comments, tr_titles, tr_labels = load_data(pullreqs_train) val_diffs, val_comments, val_titles, val_labels = load_data(pullreqs_validation) te_diffs, te_comments, te_titles, te_labels = load_data(pullreqs_test) code_tokenizer = create_code_tokenizer(tr_diffs+val_diffs, diff_vocabulary_size) diff_train = tokenize(code_tokenizer, tr_diffs, max_diff_length) diff_val = tokenize(code_tokenizer, val_diffs, max_diff_length) diff_test = tokenize(code_tokenizer, te_diffs, max_diff_length) comment_tokenizer = create_text_tokenizer(tr_comments+val_comments, comment_vocabulary_size) comment_train = tokenize(comment_tokenizer, tr_comments, max_comment_length) comment_val = tokenize(code_tokenizer, val_comments, max_comment_length) comment_test = tokenize(comment_tokenizer, te_comments, max_comment_length) title_tokenizer = create_text_tokenizer(tr_titles+val_titles, title_vocabulary_size) title_train = tokenize(title_tokenizer, tr_titles, max_title_length) title_val = tokenize(code_tokenizer, val_titles, max_title_length) title_test = tokenize(title_tokenizer, te_titles, max_title_length) y_train = np.asarray(tr_labels) y_val = np.asarray(val_labels) y_test = np.asarray(te_labels) print('Shape of diff tensor:', diff_train.shape) print('Shape of comment tensor:', comment_train.shape) print('Shape of title tensor:', title_train.shape) print('Shape of label tensor:', y_train.shape) # Save dataset with open(diff_vocab_file % prefix, 'w') as f: pickle.dump(code_tokenizer, f) with open(comment_vocab_file % prefix, 'w') as f: pickle.dump(comment_tokenizer, f) with open(title_vocab_file % prefix, 'w') as f: pickle.dump(title_tokenizer, f) with open(diff_train_file % prefix, 'w') as f: pickle.dump(diff_train, f) with open(comment_train_file % prefix, 'w') as f: pickle.dump(comment_train, f) with open(title_train_file % prefix, 'w') as f: pickle.dump(title_train, f) with open(y_train_file % prefix, 'w') as f: pickle.dump(y_train, f) with open(diff_val_file % prefix, 'w') as f: pickle.dump(diff_val, f) with open(comment_val_file % prefix, 'w') as f: pickle.dump(comment_val, f) with open(title_val_file % prefix, 'w') as f: pickle.dump(title_val, f) with open(y_val_file % prefix, 'w') as f: pickle.dump(y_val, f) # save testdata with open(diff_test_file % prefix, 'w') as f: pickle.dump(diff_test, f) with open(comment_test_file % prefix, 'w') as f: pickle.dump(comment_test, f) with open(title_test_file % prefix, 'w') as f: pickle.dump(title_test, f) with open(y_test_file % prefix, 'w') as f: pickle.dump(y_test, f) with open(config_file % prefix, 'w') as f: pickle.dump(config, f) return diff_train, comment_train, title_train, y_train, diff_val, comment_val, title_val, y_val, diff_test, comment_test, title_test, y_test, config parser = argparse.ArgumentParser() parser.add_argument('--prefix', default='default') parser.add_argument('--diff_vocabulary_size', type=int, default=50000) parser.add_argument('--comment_vocabulary_size', type=int, default=50000) parser.add_argument('--title_vocabulary_size', type=int, default=10000) parser.add_argument('--max_diff_sequence_length', type=int, default=150) parser.add_argument('--max_comment_sequence_length', type=int, default=150) parser.add_argument('--max_title_sequence_length', type=int, default=150) args = parser.parse_args() if __name__ == '__main__': create_dataset(args.prefix, args.diff_vocabulary_size, args.comment_vocabulary_size, args.title_vocabulary_size, args.max_diff_sequence_length, args.max_comment_sequence_length, args.max_title_sequence_length)
ensure_diffs
clean_svt_transcript.py
#!/usr/bin/env python3 import argparse import codecs import sys def transform(i,o):
if __name__ == "__main__": parser = argparse.ArgumentParser(description='') parser.add_argument('infile', nargs='?', type=argparse.FileType('r', encoding='utf-8'), default=codecs.getreader('utf-8')(sys.stdin.buffer)) parser.add_argument('outfile', nargs='?', type=argparse.FileType('w', encoding='utf-8'), default=codecs.getwriter('utf-8')(sys.stdout.buffer)) args = parser.parse_args() transform(args.infile, args.outfile)
for line in i: if len(line.strip()) == 0: continue key, trans = line.strip().split(None, 1) ntrans = [] for t in trans.split(): if t.startswith("<"): continue ntrans.append(t.lower()) print("{} {}".format(key, " ".join(ntrans)), file=o)
__init__.py
# defusedxml # # Copyright (c) 2013 by Christian Heimes <[email protected]> # Licensed to PSF under a Contributor Agreement. # See https://www.python.org/psf/license for licensing details. """Defuse XML bomb denial of service vulnerabilities """ from __future__ import print_function, absolute_import from .common import ( DefusedXmlException, DTDForbidden, EntitiesForbidden, ExternalReferenceForbidden, NotSupportedError, _apply_defusing, ) def
(): """Monkey patch and defuse all stdlib packages :warning: The monkey patch is an EXPERIMETNAL feature. """ defused = {} from . import cElementTree from . import ElementTree from . import minidom from . import pulldom from . import sax from . import expatbuilder from . import expatreader from . import xmlrpc xmlrpc.monkey_patch() defused[xmlrpc] = None for defused_mod in [ cElementTree, ElementTree, minidom, pulldom, sax, expatbuilder, expatreader, ]: stdlib_mod = _apply_defusing(defused_mod) defused[defused_mod] = stdlib_mod return defused __version__ = "0.6.0rc1" __all__ = [ "DefusedXmlException", "DTDForbidden", "EntitiesForbidden", "ExternalReferenceForbidden", "NotSupportedError", ]
defuse_stdlib
setup_yukicon2015.py
# encoding: utf-8 from datetime import datetime, timedelta from django.conf import settings from django.core.management.base import BaseCommand from django.utils.timezone import now from dateutil.tz import tzlocal from core.utils import slugify class Setup(object): def setup(self, test=False): self.test = test self.tz = tzlocal() self.setup_core() self.setup_tickets() def setup_core(self): from core.models import Venue, Event self.venue, unused = Venue.objects.get_or_create(name='Espoon kulttuurikeskus', defaults=dict( name_inessive='Espoon kulttuurikeskuksessa', )) self.event, unused = Event.objects.get_or_create(slug='yukicon2015', defaults=dict( name='Yukicon 2.0', name_genitive='Yukicon 2.0 -tapahtuman', name_illative='Yukicon 2.0 -tapahtumaan', name_inessive='Yukicon 2.0 -tapahtumassa', homepage_url='http://www.yukicon.fi', organization_name='Yukitea ry', organization_url='http://www.yukicon.fi', start_time=datetime(2015, 1, 10, 10, 0, tzinfo=self.tz), end_time=datetime(2015, 1, 11, 18, 0, tzinfo=self.tz), venue=self.venue, )) def setup_tickets(self): from tickets.models import TicketsEventMeta, LimitGroup, Product tickets_admin_group, = TicketsEventMeta.get_or_create_groups(self.event, ['admins']) defaults = dict( admin_group=tickets_admin_group, due_days=14, shipping_and_handling_cents=0, reference_number_template="2015{:05d}", contact_email='Yukicon <[email protected]>', plain_contact_email='[email protected]', ticket_free_text=u"Tämä on sähköinen lippusi Yukicon 2.0 -tapahtumaan. Sähköinen lippu vaihdetaan rannekkeeseen\n" u"lipunvaihtopisteessä saapuessasi tapahtumaan. Voit tulostaa tämän lipun tai näyttää sen\n" u"älypuhelimen tai tablettitietokoneen näytöltä. Mikäli kumpikaan näistä ei ole mahdollista, ota ylös\n" u"kunkin viivakoodin alla oleva neljästä tai viidestä sanasta koostuva sanakoodi ja ilmoita se\n" u"lipunvaihtopisteessä.\n\n" u"Tervetuloa Yukiconiin!", front_page_text=u"<h2>Tervetuloa ostamaan pääsylippuja Yukicon 2.0 -tapahtumaan!</h2>" u"<p>Liput maksetaan suomalaisilla verkkopankkitunnuksilla heti tilauksen yhteydessä.</p>" u"<p>Lue lisää tapahtumasta <a href='http://www.yukicon.fi'>Yukiconin kotisivuilta</a>.</p>", ) if self.test: t = now() defaults.update( ticket_sales_starts=t - timedelta(days=60), ticket_sales_ends=t + timedelta(days=60), ) else: defaults.update( ticket_sales_starts=datetime(2014, 11, 20, 18, 0, tzinfo=self.tz), ticket_sales_ends=datetime(2015, 1, 11, 18, 0, tzinfo=self.tz), ) meta, unused = TicketsEventMeta.objects.get_or_create(event=self.event, defaults=defaults) def limit_group(description, limit):
description=description, defaults=dict(limit=limit), ) return limit_group def ordering(): ordering.counter += 10 return ordering.counter ordering.counter = 0 for product_info in [ dict( name=u'Yukicon 2015 -pääsylippu', description=u'Lippu kattaa koko viikonlopun. Maksettuasi sinulle lähetetään PDF-lippu antamaasi sähköpostiin, jota vastaan saat rannekkeen tapahtuman ovelta.', limit_groups=[ limit_group('Pääsyliput', 1450), ], price_cents=1700, requires_shipping=False, electronic_ticket=True, available=True, ordering=ordering(), ), ]: name = product_info.pop('name') limit_groups = product_info.pop('limit_groups') product, unused = Product.objects.get_or_create( event=self.event, name=name, defaults=product_info ) if not product.limit_groups.exists(): product.limit_groups = limit_groups product.save() class Command(BaseCommand): args = '' help = 'Setup yukicon2015 specific stuff' def handle(self, *args, **opts): Setup().setup(test=settings.DEBUG)
limit_group, unused = LimitGroup.objects.get_or_create( event=self.event,
codelist-register.ts
/** * 代码表服务注册中心 * * @export * @class CodeListRegister */ export class CodeListRegister {
所有实体数据服务Map * * @protected * @type {*} * @memberof CodeListRegister */ protected allCodeList: Map<string, () => Promise<any>> = new Map(); /** * 已加载实体数据服务Map缓存 * * @protected * @type {Map<string, any>} * @memberof CodeListRegister */ protected serviceCache: Map<string, any> = new Map(); /** * Creates an instance of CodeListRegister. * @memberof CodeListRegister */ constructor() { this.init(); } /** * 初始化 * * @protected * @memberof CodeListRegister */ protected init(): void { this.allCodeList.set('SysOperator', () => import('@/codelist/sys-operator')); } /** * 加载实体数据服务 * * @protected * @param {string} serviceName * @returns {Promise<any>} * @memberof CodeListRegister */ protected async loadService(serviceName: string): Promise<any> { const service = this.allCodeList.get(serviceName); if (service) { return service(); } } /** * 获取应用实体数据服务 * * @param {string} name * @returns {Promise<any>} * @memberof CodeListRegister */ public async getService(name: string): Promise<any> { if (this.serviceCache.has(name)) { return this.serviceCache.get(name); } const CodeList: any = await this.loadService(name); if (CodeList && CodeList.default) { const instance: any = new CodeList.default(); this.serviceCache.set(name, instance); return instance; } } } export const codeListRegister: CodeListRegister = new CodeListRegister();
/** *
mod.rs
use crate::scope::{IdentType, ScopeKind}; use std::{cell::RefCell, collections::HashSet}; use swc_atoms::JsWord; use swc_common::{Mark, SyntaxContext}; use swc_ecma_ast::*; use swc_ecma_visit::{as_folder, noop_visit_mut_type, Fold, VisitMut, VisitMutWith}; #[cfg(test)] mod tests; const LOG: bool = false; /// See [resolver_with_mark] for docs. pub fn resolver() -> impl 'static + Fold { resolver_with_mark(Mark::fresh(Mark::root())) } /// # When to run /// /// The resolver expects 'clean' ast. You can get clean ast by parsing, or by /// removing all syntax context in ast nodes. /// /// # What does it do /// /// Firstly all scopes (fn, block) has it's own SyntaxContext. /// Resolver visits all identifiers in module, and look for binding identifies /// in the scope. Those identifiers now have the SyntaxContext of scope (fn, /// block). While doing so, resolver tries to resolve normal identifiers (no /// hygiene info) as a reference to identifier of scope. If the resolver find /// suitable variable, the identifier reference will have same context as the /// variable. /// /// /// # Panics /// /// `top_level_mark` should not be root. /// /// # Exmaple /// /// ```js /// let a = 1; /// { /// let a = 2; /// use(a); /// } /// use(a) /// ``` /// /// resolver does /// /// 1. Define `a` with top level context. /// /// 2. Found a block, so visit block with a new syntax context. /// /// 3. Defined `a` with syntax context of the block statement. //// /// 4. Found usage of `a`, and determines that it's reference to `a` in the /// block. So the reference to `a` will have same syntax context as `a` in the /// block. /// /// 5. Found usage of `a` (last line), and determines that it's a /// reference to top-level `a`, and change syntax context of `a` on last line to /// top-level syntax context. pub fn resolver_with_mark(top_level_mark: Mark) -> impl 'static + Fold { assert_ne!( top_level_mark, Mark::root(), "Marker provided to resolver should not be the root mark" ); as_folder(Resolver::new( top_level_mark, Scope::new(ScopeKind::Fn, None), None, false, )) } /// [resolver_with_mark] with typescript support enabled. pub fn ts_resolver(top_level_mark: Mark) -> impl 'static + Fold { assert_ne!( top_level_mark, Mark::root(), "Marker provided to resolver should not be the root mark" ); as_folder(Resolver::new( top_level_mark, Scope::new(ScopeKind::Fn, None), None, true, )) } #[derive(Debug, Clone)] struct Scope<'a> { /// Parent scope of the scope parent: Option<&'a Scope<'a>>, /// Kind of the scope. kind: ScopeKind, /// All declarations in the scope declared_symbols: HashSet<JsWord>, hoisted_symbols: RefCell<HashSet<JsWord>>, /// All types declared in the scope declared_types: HashSet<JsWord>, } impl<'a> Default for Scope<'a> { fn default() -> Self { Scope::new(ScopeKind::Fn, None) } } impl<'a> Scope<'a> { pub fn new(kind: ScopeKind, parent: Option<&'a Scope<'a>>) -> Self { Scope { parent, kind, declared_symbols: Default::default(), hoisted_symbols: Default::default(), declared_types: Default::default(), } } } /// # Phases /// /// ## Hoisting phase /// /// ## Resolving phase struct Resolver<'a> { hoist: bool, mark: Mark, current: Scope<'a>, cur_defining: Option<(JsWord, Mark)>, ident_type: IdentType, handle_types: bool, in_type: bool, } impl<'a> Resolver<'a> { fn new( mark: Mark, current: Scope<'a>, cur_defining: Option<(JsWord, Mark)>, handle_types: bool, ) -> Self { Resolver { hoist: false, mark, current, cur_defining, ident_type: IdentType::Ref, handle_types, in_type: false, } } fn visit_mut_stmt_within_same_scope(&mut self, s: &mut Stmt) { match s { Stmt::Block(s) => { s.visit_mut_children_with(self); } _ => s.visit_mut_with(self), } } /// Returns a [Mark] for an identifier reference. fn mark_for_ref(&self, sym: &JsWord) -> Option<Mark> { if self.handle_types && self.in_type { let mut mark = self.mark; let mut scope = Some(&self.current); while let Some(cur) = scope { // if cur.declared_types.contains(sym) || // cur.hoisted_symbols.borrow().contains(sym) { if cur.declared_types.contains(sym) { if mark == Mark::root() { break; } return Some(mark); } mark = mark.parent(); scope = cur.parent; } } let mut mark = self.mark; let mut scope = Some(&self.current); while let Some(cur) = scope { if cur.declared_symbols.contains(sym) || cur.hoisted_symbols.borrow().contains(sym) { if mark == Mark::root() { return None; } return Some(mark); } mark = mark.parent(); scope = cur.parent; } if let Some((ref c, mark)) = self.cur_defining { if *c == *sym { return Some(mark); } } None } fn visit_mut_binding_ident(&mut self, ident: &mut Ident, kind: Option<VarDeclKind>) { if cfg!(debug_assertions) && LOG { eprintln!( "resolver: Binding {}{:?} {:?}", ident.sym, ident.span.ctxt(), kind ); } if ident.span.ctxt() != SyntaxContext::empty() { return; } if self.in_type { self.current.declared_types.insert(ident.sym.clone()); let mark = self.mark; ident.span = if mark == Mark::root() { ident.span } else { let span = ident.span.apply_mark(mark); if cfg!(debug_assertions) && LOG { eprintln!("\t-> {:?}", span.ctxt()); } span }; return; } if self.hoist { // If there's no binding with same name, it means the code depends on hoisting // // e.g. // // function test() { // if (typeof Missing == typeof EXTENDS) { // console.log("missing") // } // var EXTENDS = "test"; // } let val = (|| { let mut cursor = Some(&self.current); let mut mark = self.mark; while let Some(c) = cursor { if c.declared_symbols.contains(&ident.sym) || c.hoisted_symbols.borrow().contains(&ident.sym) { c.hoisted_symbols.borrow_mut().insert(ident.sym.clone()); return None; } cursor = c.parent; let m = mark.parent(); if m == Mark::root() { return Some(mark); } mark = m; } None })(); if let Some(mark) = val { ident.span = ident.span.apply_mark(mark); return; } } let (should_insert, mark) = match kind { None | Some(VarDeclKind::Var) => { if let Some((ref cur, override_mark)) = self.cur_defining { if *cur != ident.sym { (true, self.mark) } else { (false, override_mark) } } else { (true, self.mark) } } _ => (true, self.mark), }; let mut mark = mark; if should_insert { if self.hoist { let mut cursor = Some(&self.current); match kind { Some(VarDeclKind::Var) | None => { while let Some(c) = cursor { if c.kind == ScopeKind::Fn { c.hoisted_symbols.borrow_mut().insert(ident.sym.clone()); break; } cursor = c.parent; mark = mark.parent(); } } Some(VarDeclKind::Let) | Some(VarDeclKind::Const) => { self.current .hoisted_symbols .borrow_mut() .insert(ident.sym.clone()); } } } else { self.current.declared_symbols.insert(ident.sym.clone()); } } ident.span = if mark == Mark::root() { ident.span } else { let span = ident.span.apply_mark(mark); if cfg!(debug_assertions) && LOG { eprintln!("\t-> {:?}", span.ctxt()); } span }; } } macro_rules! typed { ($name:ident, $T:ty) => { fn $name(&mut self, node: &mut $T) { if self.handle_types { self.in_type = true; node.visit_mut_children_with(self) } } }; } macro_rules! typed_ref { ($name:ident, $T:ty) => { fn $name(&mut self, node: &mut $T) { if self.handle_types { self.ident_type = IdentType::Ref; self.in_type = true; node.visit_mut_children_with(self) } } }; } macro_rules! typed_decl { ($name:ident, $T:ty) => { fn $name(&mut self, node: &mut $T) { if self.handle_types { self.ident_type = IdentType::Binding; self.in_type = true; node.visit_mut_children_with(self) } } }; } macro_rules! noop { ($name:ident, $T:ty) => { #[inline] fn $name(&mut self, _: &mut $T) {} }; } impl<'a> VisitMut for Resolver<'a> { noop!(visit_mut_accessibility, Accessibility); noop!(visit_mut_true_plus_minus, TruePlusMinus); noop!(visit_mut_ts_keyword_type, TsKeywordType); noop!(visit_mut_ts_keyword_type_kind, TsKeywordTypeKind); noop!(visit_mut_ts_type_operator_op, TsTypeOperatorOp); noop!(visit_mut_ts_enum_member_id, TsEnumMemberId); noop!(visit_mut_ts_external_module_ref, TsExternalModuleRef); noop!(visit_mut_ts_module_name, TsModuleName); noop!(visit_mut_ts_this_type, TsThisType); typed_ref!(visit_mut_ts_array_type, TsArrayType); typed_ref!(visit_mut_ts_conditional_type, TsConditionalType); typed_ref!(visit_mut_ts_entity_name, TsEntityName); typed_ref!( visit_mut_ts_type_param_instantiation, TsTypeParamInstantiation ); typed_ref!(visit_mut_ts_type_query, TsTypeQuery); typed_ref!(visit_mut_ts_type_query_expr, TsTypeQueryExpr); typed_ref!(visit_mut_ts_type_operator, TsTypeOperator); typed_ref!(visit_mut_ts_type_cast_expr, TsTypeCastExpr); typed_ref!(visit_mut_ts_type, TsType); typed_ref!(visit_mut_ts_type_ann, TsTypeAnn); typed_ref!(visit_mut_ts_type_assertion, TsTypeAssertion); typed!( visit_mut_ts_union_or_intersection_type, TsUnionOrIntersectionType ); typed!(visit_mut_ts_fn_or_constructor_type, TsFnOrConstructorType); typed_ref!(visit_mut_ts_union_type, TsUnionType); typed_ref!(visit_mut_ts_infer_type, TsInferType); typed_ref!(visit_mut_ts_mapped_type, TsMappedType); typed_ref!(visit_mut_ts_import_type, TsImportType); typed_ref!(visit_mut_ts_tuple_type, TsTupleType); typed_ref!(visit_mut_ts_intersection_type, TsIntersectionType); typed_ref!(visit_mut_ts_type_ref, TsTypeRef); typed_decl!(visit_mut_ts_type_param_decl, TsTypeParamDecl); typed!(visit_mut_ts_enum_member, TsEnumMember); typed!(visit_mut_ts_fn_param, TsFnParam); typed!(visit_mut_ts_indexed_access_type, TsIndexedAccessType); typed!(visit_mut_ts_index_signature, TsIndexSignature); typed!(visit_mut_ts_interface_body, TsInterfaceBody); typed!(visit_mut_ts_module_ref, TsModuleRef); typed!(visit_mut_ts_parenthesized_type, TsParenthesizedType); typed!(visit_mut_ts_type_lit, TsTypeLit); typed!(visit_mut_ts_type_element, TsTypeElement); typed!(visit_mut_ts_signature_decl, TsSignatureDecl); typed!(visit_mut_ts_module_block, TsModuleBlock); typed!(visit_mut_ts_namespace_body, TsNamespaceBody); typed!(visit_mut_ts_optional_type, TsOptionalType); typed!(visit_mut_ts_param_prop, TsParamProp); typed!(visit_mut_ts_rest_type, TsRestType); typed!(visit_mut_ts_type_predicate, TsTypePredicate); typed_ref!(visit_mut_ts_this_type_or_ident, TsThisTypeOrIdent); fn visit_mut_ts_tuple_element(&mut self, e: &mut TsTupleElement) { if !self.handle_types { return; } self.ident_type = IdentType::Ref; e.ty.visit_mut_with(self); } fn visit_mut_ts_type_params(&mut self, params: &mut Vec<TsTypeParam>) { for param in params.iter_mut() { self.in_type = true; param.name.visit_mut_with(self); } params.visit_mut_children_with(self); } fn visit_mut_ts_type_param(&mut self, param: &mut TsTypeParam) { if !self.handle_types { return; } self.in_type = true; param.name.visit_mut_with(self); let ident_type = self.ident_type; param.default.visit_mut_with(self); param.constraint.visit_mut_with(self); self.ident_type = ident_type; } fn visit_mut_ts_construct_signature_decl(&mut self, decl: &mut TsConstructSignatureDecl) { if !self.handle_types { return; } self.in_type = true; let child_mark = Mark::fresh(self.mark); // Child folder let mut child = Resolver::new( child_mark, Scope::new(ScopeKind::Fn, Some(&self.current)), None, self.handle_types, ); child.in_type = true; // order is important decl.type_params.visit_mut_with(&mut child); decl.params.visit_mut_with(&mut child); decl.type_ann.visit_mut_with(&mut child); } fn visit_mut_ts_constructor_type(&mut self, ty: &mut TsConstructorType) { if !self.handle_types { return; } self.in_type = true; let child_mark = Mark::fresh(self.mark); // Child folder let mut child = Resolver::new( child_mark, Scope::new(ScopeKind::Fn, Some(&self.current)), None, self.handle_types, ); child.in_type = true; ty.type_params.visit_mut_with(&mut child); ty.params.visit_mut_with(&mut child); ty.type_ann.visit_mut_with(&mut child); } fn visit_mut_ts_enum_decl(&mut self, decl: &mut TsEnumDecl) { if !self.handle_types { return; } self.in_type = false; self.visit_mut_binding_ident(&mut decl.id, None); decl.members.visit_mut_with(self); } fn visit_mut_ts_fn_type(&mut self, ty: &mut TsFnType) { if !self.handle_types { return; } self.in_type = true; let child_mark = Mark::fresh(self.mark); // Child folder let mut child = Resolver::new( child_mark, Scope::new(ScopeKind::Fn, Some(&self.current)), None, self.handle_types, ); child.in_type = true; ty.type_params.visit_mut_with(&mut child); ty.params.visit_mut_with(&mut child); ty.type_ann.visit_mut_with(&mut child); } fn visit_mut_ts_call_signature_decl(&mut self, n: &mut TsCallSignatureDecl) { if !self.handle_types { return; } self.in_type = true; let child_mark = Mark::fresh(self.mark); // Child folder let mut child = Resolver::new( child_mark, Scope::new(ScopeKind::Fn, Some(&self.current)), None, self.handle_types, ); child.in_type = true; n.type_params.visit_mut_with(&mut child); n.params.visit_mut_with(&mut child); n.type_ann.visit_mut_with(&mut child); } fn visit_mut_ts_method_signature(&mut self, n: &mut TsMethodSignature) { if !self.handle_types { return; } self.in_type = true; let child_mark = Mark::fresh(self.mark); // Child folder let mut child = Resolver::new( child_mark, Scope::new(ScopeKind::Fn, Some(&self.current)), None, self.handle_types, ); child.in_type = true; n.type_params.visit_mut_with(&mut child); n.key.visit_mut_with(&mut child); n.params.visit_mut_with(&mut child); n.type_ann.visit_mut_with(&mut child); } fn visit_mut_ts_property_signature(&mut self, n: &mut TsPropertySignature) { if !self.handle_types { return; } self.in_type = true; n.key.visit_mut_with(self); let child_mark = Mark::fresh(self.mark); // Child folder let mut child = Resolver::new( child_mark, Scope::new(ScopeKind::Fn, Some(&self.current)), None, self.handle_types, ); child.in_type = true; n.type_params.visit_mut_with(&mut child); n.init.visit_mut_with(&mut child); n.params.visit_mut_with(&mut child); n.type_ann.visit_mut_with(&mut child); } fn visit_mut_ts_interface_decl(&mut self, n: &mut TsInterfaceDecl) { if !self.handle_types { return; } self.in_type = true; self.visit_mut_binding_ident(&mut n.id, None); let child_mark = Mark::fresh(self.mark); // Child folder let mut child = Resolver::new( child_mark, Scope::new(ScopeKind::Fn, Some(&self.current)), None, self.handle_types, ); child.in_type = true; n.type_params.visit_mut_with(&mut child); n.extends.visit_mut_with(&mut child); n.body.visit_mut_with(&mut child); } fn visit_mut_ts_type_alias_decl(&mut self, n: &mut TsTypeAliasDecl) { if !self.handle_types { return; } self.in_type = true; self.visit_mut_binding_ident(&mut n.id, None); let child_mark = Mark::fresh(self.mark); // Child folder let mut child = Resolver::new( child_mark, Scope::new(ScopeKind::Fn, Some(&self.current)), None, self.handle_types, ); child.in_type = true; n.type_params.visit_mut_with(&mut child); n.type_ann.visit_mut_with(&mut child); } fn visit_mut_ts_import_equals_decl(&mut self, n: &mut TsImportEqualsDecl) { if !self.handle_types { return; } self.in_type = true; self.visit_mut_binding_ident(&mut n.id, None); n.module_ref.visit_mut_with(self); } fn visit_mut_ts_namespace_decl(&mut self, n: &mut TsNamespaceDecl) { if !self.handle_types { return; } self.in_type = true; self.visit_mut_binding_ident(&mut n.id, None); n.body.visit_mut_with(self); } fn visit_mut_ts_param_prop_param(&mut self, n: &mut TsParamPropParam) { if !self.handle_types { return; } self.in_type = false; self.ident_type = IdentType::Binding; n.visit_mut_children_with(self) } fn visit_mut_ts_qualified_name(&mut self, n: &mut TsQualifiedName) { if !self.handle_types { return; } self.in_type = true; self.ident_type = IdentType::Ref; n.left.visit_mut_with(self) } // TODO: How should I handle this? typed!(visit_mut_ts_namespace_export_decl, TsNamespaceExportDecl); track_ident_mut!(); fn visit_mut_arrow_expr(&mut self, e: &mut ArrowExpr) { let child_mark = Mark::fresh(self.mark); // Child folder let mut folder = Resolver::new( child_mark, Scope::new(ScopeKind::Fn, Some(&self.current)), self.cur_defining.take(), self.handle_types, ); e.type_params.visit_mut_with(&mut folder); let old_hoist = self.hoist; let old = folder.ident_type; folder.ident_type = IdentType::Binding; folder.hoist = false; e.params.visit_mut_with(&mut folder); folder.ident_type = old; folder.hoist = old_hoist; e.body.visit_mut_with(&mut folder); e.return_type.visit_mut_with(&mut folder); self.cur_defining = folder.cur_defining; } fn visit_mut_param(&mut self, param: &mut Param) { self.in_type = false; self.ident_type = IdentType::Binding; param.visit_mut_children_with(self); } fn visit_mut_for_stmt(&mut self, n: &mut ForStmt) { let child_mark = Mark::fresh(self.mark); let mut child = Resolver::new( child_mark, Scope::new(ScopeKind::Block, Some(&self.current)), self.cur_defining.take(), self.handle_types, ); self.ident_type = IdentType::Binding; n.init.visit_mut_with(&mut child); self.ident_type = IdentType::Ref; n.test.visit_mut_with(&mut child); self.ident_type = IdentType::Ref; n.update.visit_mut_with(&mut child); child.visit_mut_stmt_within_same_scope(&mut *n.body); self.cur_defining = child.cur_defining; } fn visit_mut_for_of_stmt(&mut self, n: &mut ForOfStmt) { let child_mark = Mark::fresh(self.mark); let mut child = Resolver::new( child_mark, Scope::new(ScopeKind::Block, Some(&self.current)), self.cur_defining.take(), self.handle_types, ); n.left.visit_mut_with(&mut child); n.right.visit_mut_with(&mut child); child.visit_mut_stmt_within_same_scope(&mut *n.body); self.cur_defining = child.cur_defining; } fn visit_mut_for_in_stmt(&mut self, n: &mut ForInStmt) { let child_mark = Mark::fresh(self.mark); let mut child = Resolver::new( child_mark, Scope::new(ScopeKind::Block, Some(&self.current)), self.cur_defining.take(), self.handle_types, ); n.left.visit_mut_with(&mut child); n.right.visit_mut_with(&mut child); child.visit_mut_stmt_within_same_scope(&mut *n.body); self.cur_defining = child.cur_defining; } fn visit_mut_block_stmt(&mut self, block: &mut BlockStmt) { let child_mark = Mark::fresh(self.mark); let mut child_folder = Resolver::new( child_mark, Scope::new(ScopeKind::Block, Some(&self.current)), self.cur_defining.take(), self.handle_types, ); block.visit_mut_children_with(&mut child_folder); self.cur_defining = child_folder.cur_defining; } /// Handle body of the arrow functions fn visit_mut_block_stmt_or_expr(&mut self, node: &mut BlockStmtOrExpr) { match node { BlockStmtOrExpr::BlockStmt(block) => block.visit_mut_children_with(self).into(), BlockStmtOrExpr::Expr(e) => e.visit_mut_with(self).into(), } } fn visit_mut_catch_clause(&mut self, c: &mut CatchClause) { let child_mark = Mark::fresh(self.mark); // Child folder let mut folder = Resolver::new( child_mark, Scope::new(ScopeKind::Fn, Some(&self.current)), self.cur_defining.take(), self.handle_types, ); folder.ident_type = IdentType::Binding; c.param.visit_mut_with(&mut folder); folder.ident_type = IdentType::Ref; c.body.visit_mut_with(&mut folder); self.cur_defining = folder.cur_defining; } fn visit_mut_class_method(&mut self, m: &mut ClassMethod) { m.key.visit_mut_with(self); { let child_mark = Mark::fresh(self.mark); // Child folder let mut child = Resolver::new( child_mark, Scope::new(ScopeKind::Fn, Some(&self.current)), None, self.handle_types, ); m.function.visit_mut_with(&mut child) } } fn visit_mut_class_prop(&mut self, p: &mut ClassProp) { p.decorators.visit_mut_with(self); if p.computed { let old = self.ident_type; self.ident_type = IdentType::Binding; p.key.visit_mut_with(self); self.ident_type = old; } let old = self.ident_type; self.ident_type = IdentType::Ref; p.value.visit_mut_with(self); self.ident_type = old; p.type_ann.visit_mut_with(self); } fn visit_mut_constructor(&mut self, c: &mut Constructor) { let child_mark = Mark::fresh(self.mark); // Child folder let mut folder = Resolver::new( child_mark, Scope::new(ScopeKind::Fn, Some(&self.current)), None, self.handle_types, ); let old = self.ident_type; self.ident_type = IdentType::Binding; c.params.visit_mut_with(&mut folder); self.ident_type = old; match &mut c.body { Some(body) => { body.visit_mut_children_with(&mut folder); } None => {} } } /// Leftmost one of a member expression should be resolved. fn visit_mut_member_expr(&mut self, e: &mut MemberExpr) { e.obj.visit_mut_with(self); if e.computed { e.prop.visit_mut_with(self); } } fn visit_mut_expr(&mut self, expr: &mut Expr) { self.in_type = false; let old = self.ident_type; self.ident_type = IdentType::Ref; expr.visit_mut_children_with(self); self.ident_type = old; } fn visit_mut_fn_decl(&mut self, node: &mut FnDecl) { // We don't fold this as Hoister handles this. { let child_mark = Mark::fresh(self.mark); // Child folder let mut folder = Resolver::new( child_mark, Scope::new(ScopeKind::Fn, Some(&self.current)), None, self.handle_types, ); folder.cur_defining = Some((node.ident.sym.clone(), node.ident.span.ctxt().remove_mark())); node.function.visit_mut_with(&mut folder) } } fn visit_mut_decl(&mut self, decl: &mut Decl) { self.in_type = false; decl.visit_mut_children_with(self) } fn visit_mut_fn_expr(&mut self, e: &mut FnExpr) { if let Some(ident) = &mut e.ident { self.visit_mut_binding_ident(ident, None) } let child_mark = Mark::fresh(self.mark); // Child folder let mut folder = Resolver::new( child_mark, Scope::new(ScopeKind::Fn, Some(&self.current)), self.cur_defining.take(), self.handle_types, ); e.function.visit_mut_with(&mut folder); self.cur_defining = folder.cur_defining; } fn visit_mut_function(&mut self, f: &mut Function) { f.type_params.visit_mut_with(self); self.in_type = false; self.ident_type = IdentType::Ref; f.decorators.visit_mut_with(self); self.ident_type = IdentType::Binding; f.params.visit_mut_with(self); self.ident_type = IdentType::Ref; match &mut f.body { Some(body) => { // Prevent creating new scope. body.visit_mut_children_with(self); } None => {} } f.return_type.visit_mut_with(self); } fn visit_mut_private_name(&mut self, _: &mut PrivateName) {} fn visit_mut_ident(&mut self, i: &mut Ident) { let ident_type = self.ident_type; let in_type = self.in_type; i.visit_mut_children_with(self); self.in_type = in_type; self.ident_type = ident_type; match self.ident_type { IdentType::Binding => self.visit_mut_binding_ident(i, None), IdentType::Ref => { let Ident { span, sym, .. } = i; if cfg!(debug_assertions) && LOG { eprintln!( "resolver: IdentRef (type = {}) {}{:?}", self.in_type, sym, span.ctxt() ); } if span.ctxt() != SyntaxContext::empty() { return; } if let Some(mark) = self.mark_for_ref(&sym) { let span = span.apply_mark(mark); if cfg!(debug_assertions) && LOG { eprintln!("\t -> {:?}", span.ctxt()); } i.span = span; } else { if cfg!(debug_assertions) && LOG { eprintln!("\t -> Unresolved"); } let mark = { let mut mark = self.mark; let mut cur = Some(&self.current); while let Some(scope) = cur { cur = scope.parent; if cur.is_none() { break; } mark = mark.parent(); } mark }; let span = span.apply_mark(mark); if cfg!(debug_assertions) && LOG { eprintln!("\t -> {:?}", span.ctxt()); } i.span = span; // Support hoisting self.visit_mut_binding_ident(i, None) } } // We currently does not touch labels IdentType::Label => {} } } fn visit_mut_import_named_specifier(&mut self, s: &mut ImportNamedSpecifier) { self.in_type = false; let old = self.ident_type; self.ident_type = IdentType::Binding; s.local.visit_mut_with(self); self.ident_type = old; } fn visit_mut_method_prop(&mut self, m: &mut MethodProp) { m.key.visit_mut_with(self); { let child_mark = Mark::fresh(self.mark); // Child folder let mut child = Resolver::new( child_mark, Scope::new(ScopeKind::Fn, Some(&self.current)), None, self.handle_types, ); m.function.visit_mut_with(&mut child) }; } fn visit_mut_object_lit(&mut self, o: &mut ObjectLit) { let child_mark = Mark::fresh(self.mark); let mut child_folder = Resolver::new( child_mark, Scope::new(ScopeKind::Block, Some(&self.current)), self.cur_defining.take(), self.handle_types, ); o.visit_mut_children_with(&mut child_folder); self.cur_defining = child_folder.cur_defining; } fn visit_mut_pat(&mut self, p: &mut Pat) { self.in_type = false; let old = self.cur_defining.take(); p.visit_mut_children_with(self); self.cur_defining = old; } fn visit_mut_var_decl(&mut self, decl: &mut VarDecl) { self.in_type = false; let old_hoist = self.hoist;
self.hoist = old_hoist; } fn visit_mut_var_declarator(&mut self, decl: &mut VarDeclarator) { // order is important let old_defining = self.cur_defining.take(); let old_type = self.ident_type; self.ident_type = IdentType::Binding; decl.name.visit_mut_with(self); self.ident_type = old_type; let cur_name = match decl.name { Pat::Ident(Ident { ref sym, .. }) => Some((sym.clone(), self.mark)), _ => None, }; self.cur_defining = cur_name; decl.init.visit_mut_children_with(self); self.cur_defining = old_defining; } fn visit_mut_module_items(&mut self, stmts: &mut Vec<ModuleItem>) { if self.current.kind != ScopeKind::Fn { return stmts.visit_mut_children_with(self); } // Phase 1: Handle hoisting { let mut hoister = Hoister { resolver: self, kind: None, in_block: false, }; stmts.visit_mut_children_with(&mut hoister) } // Phase 2. stmts.visit_mut_children_with(self) } fn visit_mut_stmts(&mut self, stmts: &mut Vec<Stmt>) { // Phase 1: Handle hoisting { let mut hoister = Hoister { resolver: self, kind: None, in_block: false, }; stmts.visit_mut_children_with(&mut hoister) } // Phase 2. stmts.visit_mut_children_with(self) } fn visit_mut_ts_module_decl(&mut self, decl: &mut TsModuleDecl) { match &mut decl.id { TsModuleName::Ident(i) => { self.visit_mut_binding_ident(i, None); } TsModuleName::Str(_) => {} } let child_mark = Mark::fresh(self.mark); let mut child_folder = Resolver::new( child_mark, Scope::new(ScopeKind::Block, Some(&self.current)), self.cur_defining.take(), self.handle_types, ); decl.body.visit_mut_children_with(&mut child_folder); } } /// The folder which handles var / function hoisting. struct Hoister<'a, 'b> { resolver: &'a mut Resolver<'b>, kind: Option<VarDeclKind>, /// Hoister should not touch let / const in the block. in_block: bool, } impl VisitMut for Hoister<'_, '_> { noop_visit_mut_type!(); fn visit_mut_fn_decl(&mut self, node: &mut FnDecl) { self.resolver.in_type = false; self.resolver .visit_mut_binding_ident(&mut node.ident, Some(VarDeclKind::Var)); } #[inline] fn visit_mut_expr(&mut self, _: &mut Expr) {} #[inline] fn visit_mut_arrow_expr(&mut self, _: &mut ArrowExpr) {} #[inline] fn visit_mut_tagged_tpl(&mut self, _: &mut TaggedTpl) {} #[inline] fn visit_mut_tpl(&mut self, _: &mut Tpl) {} #[inline] fn visit_mut_function(&mut self, _: &mut Function) {} fn visit_mut_var_decl(&mut self, node: &mut VarDecl) { if self.in_block { match node.kind { VarDeclKind::Const | VarDeclKind::Let => return, _ => {} } } let old_kind = self.kind; self.kind = Some(node.kind); self.resolver.hoist = false; node.visit_mut_children_with(self); self.kind = old_kind; } #[inline] fn visit_mut_var_declarator(&mut self, node: &mut VarDeclarator) { node.name.visit_mut_with(self); } fn visit_mut_pat(&mut self, node: &mut Pat) { self.resolver.in_type = false; match node { Pat::Ident(i) => self.resolver.visit_mut_binding_ident(i, self.kind), _ => node.visit_mut_children_with(self), } } fn visit_mut_class_decl(&mut self, node: &mut ClassDecl) { self.resolver.in_type = false; self.resolver .visit_mut_binding_ident(&mut node.ident, Some(VarDeclKind::Let)); } #[inline] fn visit_mut_catch_clause(&mut self, _: &mut CatchClause) {} #[inline] fn visit_mut_pat_or_expr(&mut self, _: &mut PatOrExpr) {} #[inline] fn visit_mut_param(&mut self, _: &mut Param) {} #[inline] fn visit_mut_constructor(&mut self, _: &mut Constructor) {} fn visit_mut_var_decl_or_expr(&mut self, n: &mut VarDeclOrExpr) { match n { VarDeclOrExpr::VarDecl(VarDecl { kind: VarDeclKind::Let, .. }) | VarDeclOrExpr::VarDecl(VarDecl { kind: VarDeclKind::Const, .. }) => {} _ => { n.visit_mut_children_with(self); } } } fn visit_mut_var_decl_or_pat(&mut self, n: &mut VarDeclOrPat) { match n { VarDeclOrPat::VarDecl(VarDecl { kind: VarDeclKind::Let, .. }) | VarDeclOrPat::VarDecl(VarDecl { kind: VarDeclKind::Const, .. }) => {} // Hoister should not handle lhs of for in statement below // // const b = []; // { // let a; // for (a in b) { // console.log(a); // } // } VarDeclOrPat::Pat(..) => {} _ => { n.visit_mut_children_with(self); } } } fn visit_mut_block_stmt(&mut self, n: &mut BlockStmt) { let old_in_block = self.in_block; self.in_block = true; n.visit_mut_children_with(self); self.in_block = old_in_block; } }
self.hoist = VarDeclKind::Var == decl.kind; decl.decls.visit_mut_with(self);
arkgo-server.go
package main import ( "flag" "fmt" "io" "io/ioutil" "os" "github.com/gin-contrib/static" "github.com/gin-gonic/gin" "github.com/kristjank/ark-go/cmd/arkgoserver/api" log "github.com/sirupsen/logrus" "github.com/fatih/color" "github.com/spf13/viper" ) var router *gin.Engine var version = "master" func initServer(configFile string) { initLogger() loadConfig(configFile) api.InitGlobals(version) } func initLogger() { // Log as JSON instead of the default ASCII formatter. //log.SetFormatter(&log.JSONFormatter{}) // You could set this to any `io.Writer` such as a file file, err := os.OpenFile("log/arkgo-server.log", os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0666) if err == nil { log.SetOutput(io.MultiWriter(file)) } else { log.Error("Failed to log to file, using default stderr") } } func loadConfig(configFile string) { log.Info("Using config properties from", configFile) viper.SetConfigName(configFile) // name of config file (without extension) viper.SetConfigName("config") // name of config file (without extension) viper.AddConfigPath("cfg") // path to look for the config file in viper.AddConfigPath("settings") // path to look for the config file in err := viper.ReadInConfig() // Find and read the config file if err != nil { log.Info("No productive config found - loading sample") // try to load sample config viper.SetConfigName("sample.config") viper.AddConfigPath("cfg") err := viper.ReadInConfig() if err != nil { // Handle errors reading the config file log.Fatal("No configuration file loaded - using defaults") } } viper.SetDefault("delegate.address", "") viper.SetDefault("delegate.pubkey", "") viper.SetDefault("voters.shareRatio", 0.0) viper.SetDefault("voters.txdescription", "share tx by ark-go") viper.SetDefault("voters.fidelity", true) viper.SetDefault("voters.fidelityLimit", 24) viper.SetDefault("voters.minamount", 0.0) viper.SetDefault("voters.deductTxFees", true) viper.SetDefault("voters.blocklist", "") viper.SetDefault("voters.capBalance", false) viper.SetDefault("voters.balanceCapAmount", 0.0) viper.SetDefault("voters.whitelist", "") viper.SetDefault("costs.address", "") viper.SetDefault("costs.shareRatio", 0.0) viper.SetDefault("costs.txdescription", "cost tx by ark-go") viper.SetDefault("reserve.address", "") viper.SetDefault("reserve.shareRatio", 0.0) viper.SetDefault("reserve.txdescription", "reserve tx by ark-go") viper.SetDefault("personal.address", "") viper.SetDefault("personal.shareRatio", 0.0) viper.SetDefault("personal.txdescription", "personal tx by ark-go") viper.SetDefault("server.network", "DEVNET") viper.SetDefault("server.address", "0.0.0.0") viper.SetDefault("server.port", 54000) viper.SetDefault("server.dbfilename", "payment.db") viper.SetDefault("server.nodeip", "") viper.SetDefault("server.autoconfigPeer", "") viper.SetDefault("web.frontend", false) viper.SetDefault("web.email", "") viper.SetDefault("web.slack", "") viper.SetDefault("web.reddit", "") viper.SetDefault("web.arkforum", "") viper.SetDefault("web.arknewsaddress", "") } //CORSMiddleware function enabling CORS requests func CORSMiddleware() gin.HandlerFunc
func initializeRoutes() { log.Info("Initializing routes") router.Use(CORSMiddleware()) // Group peer related routes together peerRoutes := router.Group("/voters") peerRoutes.Use(api.CheckServiceModelHandler()) { peerRoutes.GET("/rewards", api.GetVotersPendingRewards) peerRoutes.GET("/rewards/total", api.GetVoterEarningsTotal) peerRoutes.GET("/blocked", api.GetBlocked) peerRoutes.GET("", api.GetVotersList) } deleRoutes := router.Group("/delegate") deleRoutes.Use(api.CheckServiceModelHandler()) { deleRoutes.GET("", api.GetDelegate) deleRoutes.GET("/config", api.GetDelegateSharingConfig) deleRoutes.GET("/paymentruns", api.GetDelegatePaymentRecord) deleRoutes.GET("/paymentruns/details", api.GetDelegatePaymentRecordDetails) deleRoutes.GET("/nodestatus", api.GetDelegateNodeStatus) } serviceRoutes := router.Group("/service") serviceRoutes.Use(api.OnlyLocalCallAllowed()) { serviceRoutes.GET("/start", api.EnterServiceMode) serviceRoutes.GET("/stop", api.LeaveServiceMode) } socialRoutes := router.Group("/social") socialRoutes.Use(api.CheckServiceModelHandler()) { socialRoutes.GET("", api.GetArkNewsFromAddress) socialRoutes.GET("/info", api.GetDelegateSocialData) } proxyRoutes := router.Group("/proxy") proxyRoutes.Use(api.CheckServiceModelHandler()) { proxyRoutes.GET("/senddark", api.SendDARK) } if viper.GetBool("web.frontend") { router.Use(static.Serve("/", static.LocalFile("./public", true))) } } func printBanner() { color.Set(color.FgHiGreen) dat, _ := ioutil.ReadFile("cfg/banner.txt") fmt.Print(string(dat)) } /////////////////////////// func main() { // Reading input parameters configPtr := flag.String("config", "config", "Name of config file to use (without extension)") flag.Parse() initServer(*configPtr) printBanner() log.Info("..........ARKGO-DELEGATE-POOL-SERVER-STARTING............") // Set the router as the default one provided by Gin router = gin.Default() // Initialize the routes initializeRoutes() // Start serving the application pNodeInfo := fmt.Sprintf("%s:%d", viper.GetString("server.address"), viper.GetInt("server.port")) router.Run(pNodeInfo) }
{ return func(c *gin.Context) { c.Writer.Header().Set("Access-Control-Allow-Origin", "*") c.Writer.Header().Set("Access-Control-Max-Age", "86400") c.Writer.Header().Set("Access-Control-Allow-Methods", "GET") c.Writer.Header().Set("Access-Control-Allow-Headers", "Origin, Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token, Authorization") c.Writer.Header().Set("Access-Control-Expose-Headers", "Content-Length") c.Writer.Header().Set("Access-Control-Allow-Credentials", "true") if c.Request.Method == "OPTIONS" { c.AbortWithStatus(200) } else { c.Next() } } }
main.go
package main import ( "github.com/remogatto/application" "time" ) // mainLoop must implements the Looper interface. type mainLoop struct { *application.BaseLoop initialized chan int running bool ticker *time.Ticker durationCh chan string startedCh chan bool } // Run() runs the loop. func (loop *mainLoop) Run() { loop.running = true for loop.running { select { // Send a value over the channel in order to // signal that the loop started. case loop.initialized <- 1: // A request to pause the loop is received. case <-loop.PauseCh: // do something or simply send-back a value to // the pause channel. loop.PauseCh <- 0 // A request to terminate the loop is received. case <-loop.TerminateCh: loop.running = false loop.TerminateCh <- 0 // Receive a tick from the ticker. case <-loop.ticker.C: // Initiate the exit procedure. application.Exit() // Receive a duration string and create a proper // ticker from it. case durationStr := <-loop.durationCh: duration, err := time.ParseDuration(durationStr) if err != nil { panic("Error parsing a duration string.") } loop.ticker = time.NewTicker(duration) application.Logf("A new duration received. Running for %s...", durationStr) } } } func
() *mainLoop { return &mainLoop{ BaseLoop: application.NewBaseLoop(), initialized: make(chan int), durationCh: make(chan string), startedCh: make(chan bool), ticker: time.NewTicker(10 * time.Second), } } func main() { // Turn on verbose mode. application.Verbose = true // Create an instance of mainLoop. mainLoop := newMainLoop() // Register the loop under a name. application.Register("mainLoop", mainLoop) sendWrong := true // Run the registered loops on separate goroutines. go application.Run() // A control loop follows. The loop has the responsibility to // receive/send messages from/to the loops. for { select { case <-mainLoop.initialized: // As soon as the loop is initialized, send a // wrong duration string in order to raise an // error. if sendWrong { application.Logf("Sending a wrong duration to the mainLoop") mainLoop.durationCh <- "2 seconds" sendWrong = false } case <-application.ExitCh: // Catch the exit signal and print a last // message. application.Logf("Very last message before exiting.") // Exit from the control loop. return case err := <-application.ErrorCh: application.Printf("An error was received: \"%v\"\n", err) // Restart the loop. application.Start("mainLoop") // Send a correct duration string. mainLoop.durationCh <- "2s" } } }
newMainLoop
builds.rs
use super::{match_version, redirect_base, MatchSemver}; use crate::{ db::Pool, docbuilder::Limits, impl_webpage, web::{page::WebPage, MetaData}, }; use chrono::{DateTime, Utc}; use iron::{ headers::{ AccessControlAllowOrigin, CacheControl, CacheDirective, ContentType, Expires, HttpDate, }, status, IronResult, Request, Response, Url, }; use router::Router; use serde::Serialize; #[derive(Debug, Clone, PartialEq, Eq, Serialize)] pub(crate) struct
{ id: i32, rustc_version: String, docsrs_version: String, build_status: bool, build_time: DateTime<Utc>, } #[derive(Debug, Clone, PartialEq, Eq, Serialize)] struct BuildsPage { metadata: MetaData, builds: Vec<Build>, limits: Limits, } impl_webpage! { BuildsPage = "crate/builds.html", } pub fn build_list_handler(req: &mut Request) -> IronResult<Response> { let router = extension!(req, Router); let name = cexpect!(req, router.find("name")); let req_version = router.find("version"); let mut conn = extension!(req, Pool).get()?; let limits = ctry!(req, Limits::for_crate(&mut conn, name)); let version = match match_version(&mut conn, name, req_version).and_then(|m| m.assume_exact())? { MatchSemver::Exact((version, _)) => version, MatchSemver::Semver((version, _)) => { let url = ctry!( req, Url::parse(&format!( "{}/crate/{}/{}/builds", redirect_base(req), name, version )), ); return Ok(super::redirect(url)); } }; let query = ctry!( req, conn.query( "SELECT crates.name, releases.version, releases.description, releases.rustdoc_status, releases.target_name, builds.id, builds.rustc_version, builds.cratesfyi_version, builds.build_status, builds.build_time FROM builds INNER JOIN releases ON releases.id = builds.rid INNER JOIN crates ON releases.crate_id = crates.id WHERE crates.name = $1 AND releases.version = $2 ORDER BY id DESC", &[&name, &version] ) ); let builds: Vec<_> = query .into_iter() .map(|row| Build { id: row.get("id"), rustc_version: row.get("rustc_version"), docsrs_version: row.get("cratesfyi_version"), build_status: row.get("build_status"), build_time: row.get("build_time"), }) .collect(); if req.url.path().join("/").ends_with(".json") { let mut resp = Response::with((status::Ok, serde_json::to_string(&builds).unwrap())); resp.headers.set(ContentType::json()); resp.headers.set(Expires(HttpDate(time::now()))); resp.headers.set(CacheControl(vec![ CacheDirective::NoCache, CacheDirective::NoStore, CacheDirective::MustRevalidate, ])); resp.headers.set(AccessControlAllowOrigin::Any); Ok(resp) } else { BuildsPage { metadata: cexpect!(req, MetaData::from_crate(&mut conn, &name, &version)), builds, limits, } .into_response(req) } } #[cfg(test)] mod tests { use crate::test::{wrapper, FakeBuild}; use chrono::{DateTime, Duration, Utc}; use kuchiki::traits::TendrilSink; use reqwest::StatusCode; #[test] fn build_list() { wrapper(|env| { env.fake_release() .name("foo") .version("0.1.0") .builds(vec![ FakeBuild::default() .rustc_version("rustc 1.0.0") .docsrs_version("docs.rs 1.0.0"), FakeBuild::default() .successful(false) .rustc_version("rustc 2.0.0") .docsrs_version("docs.rs 2.0.0"), FakeBuild::default() .rustc_version("rustc 3.0.0") .docsrs_version("docs.rs 3.0.0"), ]) .create()?; let page = kuchiki::parse_html().one( env.frontend() .get("/crate/foo/0.1.0/builds") .send()? .text()?, ); let rows: Vec<_> = page .select("ul > li a.release") .unwrap() .map(|row| row.text_contents()) .collect(); assert!(rows[0].contains("rustc 3.0.0")); assert!(rows[0].contains("docs.rs 3.0.0")); assert!(rows[1].contains("rustc 2.0.0")); assert!(rows[1].contains("docs.rs 2.0.0")); assert!(rows[2].contains("rustc 1.0.0")); assert!(rows[2].contains("docs.rs 1.0.0")); Ok(()) }); } #[test] fn build_list_json() { wrapper(|env| { env.fake_release() .name("foo") .version("0.1.0") .builds(vec![ FakeBuild::default() .rustc_version("rustc 1.0.0") .docsrs_version("docs.rs 1.0.0"), FakeBuild::default() .successful(false) .rustc_version("rustc 2.0.0") .docsrs_version("docs.rs 2.0.0"), FakeBuild::default() .rustc_version("rustc 3.0.0") .docsrs_version("docs.rs 3.0.0"), ]) .create()?; let value: serde_json::Value = serde_json::from_str( &env.frontend() .get("/crate/foo/0.1.0/builds.json") .send()? .text()?, )?; assert_eq!(value.pointer("/0/build_status"), Some(&true.into())); assert_eq!( value.pointer("/0/docsrs_version"), Some(&"docs.rs 3.0.0".into()) ); assert_eq!( value.pointer("/0/rustc_version"), Some(&"rustc 3.0.0".into()) ); assert!(value.pointer("/0/id").unwrap().is_i64()); assert!(serde_json::from_value::<DateTime<Utc>>( value.pointer("/0/build_time").unwrap().clone() ) .is_ok()); assert_eq!(value.pointer("/1/build_status"), Some(&false.into())); assert_eq!( value.pointer("/1/docsrs_version"), Some(&"docs.rs 2.0.0".into()) ); assert_eq!( value.pointer("/1/rustc_version"), Some(&"rustc 2.0.0".into()) ); assert!(value.pointer("/1/id").unwrap().is_i64()); assert!(serde_json::from_value::<DateTime<Utc>>( value.pointer("/1/build_time").unwrap().clone() ) .is_ok()); assert_eq!(value.pointer("/2/build_status"), Some(&true.into())); assert_eq!( value.pointer("/2/docsrs_version"), Some(&"docs.rs 1.0.0".into()) ); assert_eq!( value.pointer("/2/rustc_version"), Some(&"rustc 1.0.0".into()) ); assert!(value.pointer("/2/id").unwrap().is_i64()); assert!(serde_json::from_value::<DateTime<Utc>>( value.pointer("/2/build_time").unwrap().clone() ) .is_ok()); assert!( value.pointer("/1/build_time").unwrap().as_str().unwrap() < value.pointer("/0/build_time").unwrap().as_str().unwrap() ); assert!( value.pointer("/2/build_time").unwrap().as_str().unwrap() < value.pointer("/1/build_time").unwrap().as_str().unwrap() ); Ok(()) }); } #[test] fn limits() { wrapper(|env| { env.fake_release().name("foo").version("0.1.0").create()?; env.db().conn().query( "INSERT INTO sandbox_overrides (crate_name, max_memory_bytes, timeout_seconds, max_targets) VALUES ($1, $2, $3, $4)", &[ &"foo", &3072i64, &(Duration::hours(2).num_seconds() as i32), &1, ], )?; let page = kuchiki::parse_html().one( env.frontend() .get("/crate/foo/0.1.0/builds") .send()? .text()?, ); let header = page.select(".about h4").unwrap().next().unwrap(); assert_eq!(header.text_contents(), "foo's sandbox limits"); let values: Vec<_> = page .select(".about table tr td:last-child") .unwrap() .map(|row| row.text_contents()) .collect(); let values: Vec<_> = values.iter().map(|v| &**v).collect(); dbg!(&values); assert!(values.contains(&"3 KB")); assert!(values.contains(&"2 hours")); assert!(values.contains(&"100 KB")); assert!(values.contains(&"blocked")); assert!(values.contains(&"1")); Ok(()) }); } #[test] fn latest_redirect() { wrapper(|env| { env.fake_release() .name("foo") .version("0.1.0") .builds(vec![FakeBuild::default() .rustc_version("rustc 1.0.0") .docsrs_version("docs.rs 1.0.0")]) .create()?; env.fake_release() .name("foo") .version("0.2.0") .builds(vec![FakeBuild::default() .rustc_version("rustc 1.0.0") .docsrs_version("docs.rs 1.0.0")]) .create()?; let resp = env.frontend().get("/crate/foo/latest/builds").send()?; assert!(resp.url().as_str().ends_with("/crate/foo/0.2.0/builds")); Ok(()) }); } #[test] fn crate_version_not_found() { wrapper(|env| { env.fake_release() .name("foo") .version("0.1.0") .builds(vec![FakeBuild::default() .rustc_version("rustc 1.0.0") .docsrs_version("docs.rs 1.0.0")]) .create()?; let resp = env.frontend().get("/crate/foo/0.2.0/builds").send()?; dbg!(resp.url().as_str()); assert!(resp.url().as_str().ends_with("/crate/foo/0.2.0/builds")); assert_eq!(resp.status(), StatusCode::NOT_FOUND); Ok(()) }); } #[test] fn invalid_semver() { wrapper(|env| { env.fake_release() .name("foo") .version("0.1.0") .builds(vec![FakeBuild::default() .rustc_version("rustc 1.0.0") .docsrs_version("docs.rs 1.0.0")]) .create()?; let resp = env.frontend().get("/crate/foo/0,1,0/builds").send()?; dbg!(resp.url().as_str()); assert!(resp.url().as_str().ends_with("/crate/foo/0,1,0/builds")); assert_eq!(resp.status(), StatusCode::NOT_FOUND); Ok(()) }); } }
Build
tests.rs
#![cfg(test)] #![cfg(not(debug_assertions))] use crate::doppelganger_service::DoppelgangerService; use crate::{ http_api::{ApiSecret, Config as HttpConfig, Context}, Config, InitializedValidators, ValidatorDefinitions, ValidatorStore, }; use account_utils::{ eth2_wallet::WalletBuilder, mnemonic_from_phrase, random_mnemonic, random_password, ZeroizeString, }; use deposit_contract::decode_eth1_tx_data; use environment::null_logger; use eth2::{ lighthouse_vc::{http_client::ValidatorClientHttpClient, types::*}, types::ErrorMessage as ApiErrorMessage, Error as ApiError, }; use eth2_keystore::KeystoreBuilder; use parking_lot::RwLock; use sensitive_url::SensitiveUrl; use slashing_protection::{SlashingDatabase, SLASHING_PROTECTION_FILENAME}; use slot_clock::{SlotClock, TestingSlotClock}; use std::future::Future; use std::marker::PhantomData; use std::net::Ipv4Addr; use std::sync::Arc; use std::time::Duration; use tempfile::{tempdir, TempDir}; use tokio::runtime::Runtime; use tokio::sync::oneshot; const PASSWORD_BYTES: &[u8] = &[42, 50, 37]; type E = MainnetEthSpec; struct ApiTester { client: ValidatorClientHttpClient, initialized_validators: Arc<RwLock<InitializedValidators>>, url: SensitiveUrl, _server_shutdown: oneshot::Sender<()>, _validator_dir: TempDir, } // Builds a runtime to be used in the testing configuration. fn build_runtime() -> Arc<Runtime> { Arc::new( tokio::runtime::Builder::new_multi_thread() .enable_all() .build() .expect("Should be able to build a testing runtime"), ) } impl ApiTester { pub async fn new(runtime: std::sync::Weak<Runtime>) -> Self { let log = null_logger().unwrap(); let validator_dir = tempdir().unwrap(); let secrets_dir = tempdir().unwrap(); let validator_defs = ValidatorDefinitions::open_or_create(validator_dir.path()).unwrap(); let initialized_validators = InitializedValidators::from_definitions( validator_defs, validator_dir.path().into(), log.clone(), ) .await .unwrap(); let api_secret = ApiSecret::create_or_open(validator_dir.path()).unwrap(); let api_pubkey = api_secret.api_token(); let mut config = Config::default(); config.validator_dir = validator_dir.path().into(); config.secrets_dir = secrets_dir.path().into(); let spec = E::default_spec(); let slashing_db_path = config.validator_dir.join(SLASHING_PROTECTION_FILENAME); let slashing_protection = SlashingDatabase::open_or_create(&slashing_db_path).unwrap(); let slot_clock = TestingSlotClock::new(Slot::new(0), Duration::from_secs(0), Duration::from_secs(1)); let validator_store = ValidatorStore::<_, E>::new( initialized_validators, slashing_protection, Hash256::repeat_byte(42), spec, Some(Arc::new(DoppelgangerService::new(log.clone()))), slot_clock, log.clone(), ); validator_store .register_all_in_doppelganger_protection_if_enabled() .expect("Should attach doppelganger service"); let initialized_validators = validator_store.initialized_validators(); let context = Arc::new(Context { runtime, api_secret, validator_dir: Some(validator_dir.path().into()), validator_store: Some(Arc::new(validator_store)), spec: E::default_spec(), config: HttpConfig { enabled: true, listen_addr: Ipv4Addr::new(127, 0, 0, 1), listen_port: 0, allow_origin: None, }, log, _phantom: PhantomData, }); let ctx = context.clone(); let (shutdown_tx, shutdown_rx) = oneshot::channel(); let server_shutdown = async { // It's not really interesting why this triggered, just that it happened. let _ = shutdown_rx.await; }; let (listening_socket, server) = super::serve(ctx, server_shutdown).unwrap(); tokio::spawn(async { server.await }); let url = SensitiveUrl::parse(&format!( "http://{}:{}", listening_socket.ip(), listening_socket.port() )) .unwrap(); let client = ValidatorClientHttpClient::new(url.clone(), api_pubkey).unwrap(); Self { initialized_validators, _validator_dir: validator_dir, client, url, _server_shutdown: shutdown_tx, } } pub fn invalid_token_client(&self) -> ValidatorClientHttpClient { let tmp = tempdir().unwrap(); let api_secret = ApiSecret::create_or_open(tmp.path()).unwrap(); let invalid_pubkey = api_secret.api_token(); ValidatorClientHttpClient::new(self.url.clone(), invalid_pubkey.clone()).unwrap() } pub async fn test_with_invalid_auth<F, A, T>(self, func: F) -> Self where F: Fn(ValidatorClientHttpClient) -> A, A: Future<Output = Result<T, ApiError>>, { /* * Test with an invalid Authorization header. */ match func(self.invalid_token_client()).await { Err(ApiError::ServerMessage(ApiErrorMessage { code: 403, .. })) => (), Err(other) => panic!("expected authorized error, got {:?}", other), Ok(_) => panic!("expected authorized error, got Ok"), } /* * Test with a missing Authorization header. */ let mut missing_token_client = self.client.clone(); missing_token_client.send_authorization_header(false); match func(missing_token_client).await { Err(ApiError::ServerMessage(ApiErrorMessage { code: 400, message, .. })) if message.contains("missing Authorization header") => (), Err(other) => panic!("expected missing header error, got {:?}", other), Ok(_) => panic!("expected missing header error, got Ok"), } self } pub fn invalidate_api_token(mut self) -> Self { self.client = self.invalid_token_client(); self } pub async fn test_get_lighthouse_version_invalid(self) -> Self { self.client.get_lighthouse_version().await.unwrap_err(); self } pub async fn test_get_lighthouse_spec(self) -> Self { let result = self.client.get_lighthouse_spec().await.unwrap().data; let mut expected = ConfigAndPreset::from_chain_spec::<E>(&E::default_spec()); expected.make_backwards_compat(&E::default_spec()); assert_eq!(result, expected); self } pub async fn test_get_lighthouse_version(self) -> Self { let result = self.client.get_lighthouse_version().await.unwrap().data; let expected = VersionData { version: lighthouse_version::version_with_platform(), }; assert_eq!(result, expected); self } #[cfg(target_os = "linux")] pub async fn test_get_lighthouse_health(self) -> Self { self.client.get_lighthouse_health().await.unwrap(); self } #[cfg(not(target_os = "linux"))] pub async fn test_get_lighthouse_health(self) -> Self { self.client.get_lighthouse_health().await.unwrap_err(); self } pub fn vals_total(&self) -> usize { self.initialized_validators.read().num_total() } pub fn
(&self) -> usize { self.initialized_validators.read().num_enabled() } pub fn assert_enabled_validators_count(self, count: usize) -> Self { assert_eq!(self.vals_enabled(), count); self } pub fn assert_validators_count(self, count: usize) -> Self { assert_eq!(self.vals_total(), count); self } pub async fn create_hd_validators(self, s: HdValidatorScenario) -> Self { let initial_vals = self.vals_total(); let initial_enabled_vals = self.vals_enabled(); let validators = (0..s.count) .map(|i| ValidatorRequest { enable: !s.disabled.contains(&i), description: format!("boi #{}", i), graffiti: None, deposit_gwei: E::default_spec().max_effective_balance, }) .collect::<Vec<_>>(); let (response, mnemonic) = if s.specify_mnemonic { let mnemonic = ZeroizeString::from(random_mnemonic().phrase().to_string()); let request = CreateValidatorsMnemonicRequest { mnemonic: mnemonic.clone(), key_derivation_path_offset: s.key_derivation_path_offset, validators: validators.clone(), }; let response = self .client .post_lighthouse_validators_mnemonic(&request) .await .unwrap() .data; (response, mnemonic) } else { assert_eq!( s.key_derivation_path_offset, 0, "cannot use a derivation offset without specifying a mnemonic" ); let response = self .client .post_lighthouse_validators(validators.clone()) .await .unwrap() .data; (response.validators.clone(), response.mnemonic.clone()) }; assert_eq!(response.len(), s.count); assert_eq!(self.vals_total(), initial_vals + s.count); assert_eq!( self.vals_enabled(), initial_enabled_vals + s.count - s.disabled.len() ); let server_vals = self.client.get_lighthouse_validators().await.unwrap().data; assert_eq!(server_vals.len(), self.vals_total()); // Ensure the server lists all of these newly created validators. for validator in &response { assert!(server_vals .iter() .any(|server_val| server_val.voting_pubkey == validator.voting_pubkey)); } /* * Verify that we can regenerate all the keys from the mnemonic. */ let mnemonic = mnemonic_from_phrase(mnemonic.as_str()).unwrap(); let mut wallet = WalletBuilder::from_mnemonic(&mnemonic, PASSWORD_BYTES, "".to_string()) .unwrap() .build() .unwrap(); wallet .set_nextaccount(s.key_derivation_path_offset) .unwrap(); for i in 0..s.count { let keypairs = wallet .next_validator(PASSWORD_BYTES, PASSWORD_BYTES, PASSWORD_BYTES) .unwrap(); let voting_keypair = keypairs.voting.decrypt_keypair(PASSWORD_BYTES).unwrap(); assert_eq!( response[i].voting_pubkey, voting_keypair.pk.clone().into(), "the locally generated voting pk should match the server response" ); let withdrawal_keypair = keypairs.withdrawal.decrypt_keypair(PASSWORD_BYTES).unwrap(); let deposit_bytes = serde_utils::hex::decode(&response[i].eth1_deposit_tx_data).unwrap(); let (deposit_data, _) = decode_eth1_tx_data(&deposit_bytes, E::default_spec().max_effective_balance) .unwrap(); assert_eq!( deposit_data.pubkey, voting_keypair.pk.clone().into(), "the locally generated voting pk should match the deposit data" ); assert_eq!( deposit_data.withdrawal_credentials, Hash256::from_slice(&bls::get_withdrawal_credentials( &withdrawal_keypair.pk, E::default_spec().bls_withdrawal_prefix_byte )), "the locally generated withdrawal creds should match the deposit data" ); assert_eq!( deposit_data.signature, deposit_data.create_signature(&voting_keypair.sk, &E::default_spec()), "the locally-generated deposit sig should create the same deposit sig" ); } self } pub async fn create_keystore_validators(self, s: KeystoreValidatorScenario) -> Self { let initial_vals = self.vals_total(); let initial_enabled_vals = self.vals_enabled(); let password = random_password(); let keypair = Keypair::random(); let keystore = KeystoreBuilder::new(&keypair, password.as_bytes(), String::new()) .unwrap() .build() .unwrap(); if !s.correct_password { let request = KeystoreValidatorsPostRequest { enable: s.enabled, password: String::from_utf8(random_password().as_ref().to_vec()) .unwrap() .into(), keystore, graffiti: None, }; self.client .post_lighthouse_validators_keystore(&request) .await .unwrap_err(); return self; } let request = KeystoreValidatorsPostRequest { enable: s.enabled, password: String::from_utf8(password.as_ref().to_vec()) .unwrap() .into(), keystore, graffiti: None, }; let response = self .client .post_lighthouse_validators_keystore(&request) .await .unwrap() .data; let num_enabled = s.enabled as usize; assert_eq!(self.vals_total(), initial_vals + 1); assert_eq!(self.vals_enabled(), initial_enabled_vals + num_enabled); let server_vals = self.client.get_lighthouse_validators().await.unwrap().data; assert_eq!(server_vals.len(), self.vals_total()); assert_eq!(response.voting_pubkey, keypair.pk.into()); assert_eq!(response.enabled, s.enabled); self } pub async fn set_validator_enabled(self, index: usize, enabled: bool) -> Self { let validator = &self.client.get_lighthouse_validators().await.unwrap().data[index]; self.client .patch_lighthouse_validators(&validator.voting_pubkey, enabled) .await .unwrap(); assert_eq!( self.initialized_validators .read() .is_enabled(&validator.voting_pubkey.decompress().unwrap()) .unwrap(), enabled ); assert!(self .client .get_lighthouse_validators() .await .unwrap() .data .into_iter() .find(|v| v.voting_pubkey == validator.voting_pubkey) .map(|v| v.enabled == enabled) .unwrap()); // Check the server via an individual request. assert_eq!( self.client .get_lighthouse_validators_pubkey(&validator.voting_pubkey) .await .unwrap() .unwrap() .data .enabled, enabled ); self } } struct HdValidatorScenario { count: usize, specify_mnemonic: bool, key_derivation_path_offset: u32, disabled: Vec<usize>, } struct KeystoreValidatorScenario { enabled: bool, correct_password: bool, } #[test] fn invalid_pubkey() { let runtime = build_runtime(); let weak_runtime = Arc::downgrade(&runtime); runtime.block_on(async { ApiTester::new(weak_runtime) .await .invalidate_api_token() .test_get_lighthouse_version_invalid() .await; }); } #[test] fn routes_with_invalid_auth() { let runtime = build_runtime(); let weak_runtime = Arc::downgrade(&runtime); runtime.block_on(async { ApiTester::new(weak_runtime) .await .test_with_invalid_auth(|client| async move { client.get_lighthouse_version().await }) .await .test_with_invalid_auth(|client| async move { client.get_lighthouse_health().await }) .await .test_with_invalid_auth(|client| async move { client.get_lighthouse_spec().await }) .await .test_with_invalid_auth( |client| async move { client.get_lighthouse_validators().await }, ) .await .test_with_invalid_auth(|client| async move { client .get_lighthouse_validators_pubkey(&PublicKeyBytes::empty()) .await }) .await .test_with_invalid_auth(|client| async move { client .post_lighthouse_validators(vec![ValidatorRequest { enable: <_>::default(), description: <_>::default(), graffiti: <_>::default(), deposit_gwei: <_>::default(), }]) .await }) .await .test_with_invalid_auth(|client| async move { client .post_lighthouse_validators_mnemonic(&CreateValidatorsMnemonicRequest { mnemonic: String::default().into(), key_derivation_path_offset: <_>::default(), validators: <_>::default(), }) .await }) .await .test_with_invalid_auth(|client| async move { let password = random_password(); let keypair = Keypair::random(); let keystore = KeystoreBuilder::new(&keypair, password.as_bytes(), String::new()) .unwrap() .build() .unwrap(); client .post_lighthouse_validators_keystore(&KeystoreValidatorsPostRequest { password: String::default().into(), enable: <_>::default(), keystore, graffiti: <_>::default(), }) .await }) .await .test_with_invalid_auth(|client| async move { client .patch_lighthouse_validators(&PublicKeyBytes::empty(), false) .await }) .await }); } #[test] fn simple_getters() { let runtime = build_runtime(); let weak_runtime = Arc::downgrade(&runtime); runtime.block_on(async { ApiTester::new(weak_runtime) .await .test_get_lighthouse_version() .await .test_get_lighthouse_health() .await .test_get_lighthouse_spec() .await; }); } #[test] fn hd_validator_creation() { let runtime = build_runtime(); let weak_runtime = Arc::downgrade(&runtime); runtime.block_on(async { ApiTester::new(weak_runtime) .await .assert_enabled_validators_count(0) .assert_validators_count(0) .create_hd_validators(HdValidatorScenario { count: 2, specify_mnemonic: true, key_derivation_path_offset: 0, disabled: vec![], }) .await .assert_enabled_validators_count(2) .assert_validators_count(2) .create_hd_validators(HdValidatorScenario { count: 1, specify_mnemonic: false, key_derivation_path_offset: 0, disabled: vec![0], }) .await .assert_enabled_validators_count(2) .assert_validators_count(3) .create_hd_validators(HdValidatorScenario { count: 0, specify_mnemonic: true, key_derivation_path_offset: 4, disabled: vec![], }) .await .assert_enabled_validators_count(2) .assert_validators_count(3); }); } #[test] fn validator_enabling() { let runtime = build_runtime(); let weak_runtime = Arc::downgrade(&runtime); runtime.block_on(async { ApiTester::new(weak_runtime) .await .create_hd_validators(HdValidatorScenario { count: 2, specify_mnemonic: false, key_derivation_path_offset: 0, disabled: vec![], }) .await .assert_enabled_validators_count(2) .assert_validators_count(2) .set_validator_enabled(0, false) .await .assert_enabled_validators_count(1) .assert_validators_count(2) .set_validator_enabled(0, true) .await .assert_enabled_validators_count(2) .assert_validators_count(2); }); } #[test] fn keystore_validator_creation() { let runtime = build_runtime(); let weak_runtime = Arc::downgrade(&runtime); runtime.block_on(async { ApiTester::new(weak_runtime) .await .assert_enabled_validators_count(0) .assert_validators_count(0) .create_keystore_validators(KeystoreValidatorScenario { correct_password: true, enabled: true, }) .await .assert_enabled_validators_count(1) .assert_validators_count(1) .create_keystore_validators(KeystoreValidatorScenario { correct_password: false, enabled: true, }) .await .assert_enabled_validators_count(1) .assert_validators_count(1) .create_keystore_validators(KeystoreValidatorScenario { correct_password: true, enabled: false, }) .await .assert_enabled_validators_count(1) .assert_validators_count(2); }); }
vals_enabled
memmove_linux_amd64_test.go
// Copyright 2013 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package runtime_test import ( "io/ioutil" "os" "reflect" "syscall" "testing" "unsafe" ) // TestMemmoveOverflow maps 3GB of memory and calls memmove on // the corresponding slice. func TestMemmoveOverflow(t *testing.T) { // Create a temporary file. tmp, err := ioutil.TempFile("", "go-memmovetest") if err != nil { t.Fatal(err) } _, err = tmp.Write(make([]byte, 65536)) if err != nil { t.Fatal(err) } defer os.Remove(tmp.Name()) defer tmp.Close() // Set up mappings. base, _, errno := syscall.Syscall6(syscall.SYS_MMAP, 0xa0<<32, 3<<30, syscall.PROT_READ|syscall.PROT_WRITE, syscall.MAP_PRIVATE|syscall.MAP_ANONYMOUS, ^uintptr(0), 0) if errno != 0 { t.Skipf("could not create memory mapping: %s", errno) } syscall.Syscall(syscall.SYS_MUNMAP, base, 3<<30, 0) for off := uintptr(0); off < 3<<30; off += 65536 { _, _, errno := syscall.Syscall6(syscall.SYS_MMAP, base+off, 65536, syscall.PROT_READ|syscall.PROT_WRITE, syscall.MAP_SHARED|syscall.MAP_FIXED, tmp.Fd(), 0) if errno != 0 { t.Fatalf("could not map a page at requested 0x%x: %s", base+off, errno) } defer syscall.Syscall(syscall.SYS_MUNMAP, base+off, 65536, 0) } var s []byte sp := (*reflect.SliceHeader)(unsafe.Pointer(&s)) sp.Data = base
t.Fatalf("copied %d bytes, expected %d", n, 3<<30-1) } n = copy(s, s[1:]) if n != 3<<30-1 { t.Fatalf("copied %d bytes, expected %d", n, 3<<30-1) } }
sp.Len, sp.Cap = 3<<30, 3<<30 n := copy(s[1:], s) if n != 3<<30-1 {
anchor_not_found_exception.go
package exception type AnchorNotFoundException struct { } func NewAnchorNotFoundException() AnchorNotFoundException
func(h AnchorNotFoundException) Error() string { return "Anchor not found" }
{ return AnchorNotFoundException{ } }
image_reader_test.go
/* * Copyright 2020 Sheaf Authors * * SPDX-License-Identifier: Apache-2.0 */ package remote import ( "fmt" "testing" "github.com/google/go-containerregistry/pkg/name" v1 "github.com/google/go-containerregistry/pkg/v1" "github.com/stretchr/testify/require" ) func
(t *testing.T) { cases := []struct { name string wantRef string fetcher *fakeFetcher options []ImageReaderOption wantErr bool }{ { name: "in general", fetcher: &fakeFetcher{ image: nil, err: nil, }, options: []ImageReaderOption{}, wantErr: false, }, { name: "insecure registry", fetcher: &fakeFetcher{ image: nil, err: nil, }, options: []ImageReaderOption{ WithInsecure(true), }, wantErr: false, }, { name: "fetcher failed", fetcher: &fakeFetcher{ image: nil, err: fmt.Errorf("error"), }, wantErr: true, }, } for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { options := append(tc.options, func(ir *ImageReader) { ir.fetcher = tc.fetcher }) ir := NewImageReader(options...) _, err := ir.Read("ref") if tc.wantErr { require.Error(t, err) return } require.NoError(t, err) require.Equal(t, "ref", tc.fetcher.requested.String()) }) } } type fakeFetcher struct { image v1.Image err error requested name.Reference } var _ Fetcher = &fakeFetcher{} func (f *fakeFetcher) Fetch(ref name.Reference) (v1.Image, error) { f.requested = ref return f.image, f.err }
TestImageReader_Read
random_forest_SMOTE_bordeline_1.py
# -*- coding: utf-8 -*- # In this script we use a simple classifer called naive bayes and try to predict the violations. But before that we use # some methods to tackle the problem of our skewed dataset. :) # 11 May 2016 # @author: reyhane_askari # Universite de Montreal, DIRO import csv import numpy as np from sklearn.metrics import roc_curve, auc from sklearn.cross_validation import train_test_split from sklearn import metrics import pandas as pd from os import chdir, listdir from pandas import read_csv from os import path from random import randint, sample, seed from collections import OrderedDict from pandas import DataFrame, Series import numpy as np import csv import codecs import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt import matplotlib as mpl import seaborn as sns sns.set() import itertools from sklearn.decomposition import PCA from unbalanced_dataset import UnderSampler, NearMiss, CondensedNearestNeighbour, OneSidedSelection,\ NeighbourhoodCleaningRule, TomekLinks, ClusterCentroids, OverSampler, SMOTE,\ SMOTETomek, SMOTEENN, EasyEnsemble, BalanceCascade almost_black = '#262626' colnames = ['old_index','job_id', 'task_idx','sched_cls', 'priority', 'cpu_requested', 'mem_requested', 'disk', 'violation'] tain_path = r'/home/askrey/Dropbox/Project_step_by_step/3_create_database/csvs/frull_db_2.csv' X = pd.read_csv(tain_path, header = None, index_col = False ,names = colnames, skiprows = [0], usecols = [3,4,5,6,7]) y = pd.read_csv(tain_path, header = None, index_col = False ,names = colnames, skiprows = [0], usecols = [8]) y = y['violation'].values # X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=.333, random_state=0) main_x = X.values main_y = y verbose = False ratio = float(np.count_nonzero(y==1)) / float(np.count_nonzero(y==0)) # 'SMOTE bordeline 1' bsmote1 = SMOTE(ratio=ratio, verbose=verbose, kind='borderline1') x, y = bsmote1.fit_transform(main_x, main_y) ratio = float(np.count_nonzero(y==1)) / float(np.count_nonzero(y==0)) X_train, X_test, y_train, y_test = train_test_split(x, y, test_size=.333, random_state=0) from sklearn.ensemble import RandomForestClassifier from sklearn.cross_validation import cross_val_score clf = RandomForestClassifier(n_estimators=10) scores = cross_val_score(clf, X_test, y_test) y_pred = clf.fit(X_train, y_train).predict(X_test) y_score = clf.fit(X_train, y_train).predict_proba(X_test)[:,1] mean_accuracy = clf.fit(X_train, y_train).score(X_test,y_test,sample_weight=None) fpr, tpr, thresholds = metrics.roc_curve(y_test, y_score) roc_auc = auc(fpr, tpr) plt.plot(fpr, tpr, label='ROC curve (area = %0.2f)' % roc_auc) plt.plot([0, 1], [0, 1], 'k--') plt.xlim([0.0, 1.0]) plt.ylim([0.0, 1.05]) plt.xlabel('False Positive Rate') plt.ylabel('True Positive Rate') plt.title('Receiver operating characteristic example') plt.legend(loc="lower right")
plt.savefig('/home/askrey/Dropbox/Project_step_by_step/5_simple_models/new_scripts/random_forest_SMOTE_bordeline_1.pdf')
helloworld.py
from minpiler.typeshed import M, message1 M.print('Hello world!')
message1.printFlush()
config.py
""" AI Challenger观点型问题阅读理解 config.py:配置文件,程序运行入口 @author: yuhaitao """ # -*- coding:utf-8 -*- import os import tensorflow as tf import data_process from main import train, test, dev from file_save import * from examine_dev import examine_dev flags = tf.flags os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3" train_file = os.path.join("file", "ai_challenger_oqmrc_trainingset.json") dev_file = os.path.join("file", "ai_challenger_oqmrc_validationset.json") test_file = os.path.join("file", "ai_challenger_oqmrc_testa.json") ''' train_file = os.path.join("file", "train_demo.json") dev_file = os.path.join("file", "val_demo.json") test_file = os.path.join("file", "test_demo.json")''' target_dir = "data" log_dir = "log/event" save_dir = "log/model" prediction_dir = "log/prediction" train_record_file = os.path.join(target_dir, "train.tfrecords") dev_record_file = os.path.join(target_dir, "dev.tfrecords") test_record_file = os.path.join(target_dir, "test.tfrecords") id2vec_file = os.path.join(target_dir, "id2vec.json") # id号->向量 word2id_file = os.path.join(target_dir, "word2id.json") # 词->id号 train_eval = os.path.join(target_dir, "train_eval.json") dev_eval = os.path.join(target_dir, "dev_eval.json") test_eval = os.path.join(target_dir, "test_eval.json") if not os.path.exists(target_dir): os.makedirs(target_dir) if not os.path.exists(log_di
_dir) if not os.path.exists(save_dir): os.makedirs(save_dir) if not os.path.exists(prediction_dir): os.makedirs(prediction_dir) flags.DEFINE_string("mode", "train", "train/debug/test") flags.DEFINE_string("gpu", "0", "0/1") flags.DEFINE_string("experiment", "lalala", "每次存不同模型分不同的文件夹") flags.DEFINE_string("model_name", "default", "选取不同的模型") flags.DEFINE_string("target_dir", target_dir, "") flags.DEFINE_string("log_dir", log_dir, "") flags.DEFINE_string("save_dir", save_dir, "") flags.DEFINE_string("prediction_dir", prediction_dir, "") flags.DEFINE_string("train_file", train_file, "") flags.DEFINE_string("dev_file", dev_file, "") flags.DEFINE_string("test_file", test_file, "") flags.DEFINE_string("train_record_file", train_record_file, "") flags.DEFINE_string("dev_record_file", dev_record_file, "") flags.DEFINE_string("test_record_file", test_record_file, "") flags.DEFINE_string("train_eval_file", train_eval, "") flags.DEFINE_string("dev_eval_file", dev_eval, "") flags.DEFINE_string("test_eval_file", test_eval, "") flags.DEFINE_string("word2id_file", word2id_file, "") flags.DEFINE_string("id2vec_file", id2vec_file, "") flags.DEFINE_integer("para_limit", 150, "Limit length for paragraph") flags.DEFINE_integer("ques_limit", 30, "Limit length for question") flags.DEFINE_integer("min_count", 1, "embedding 的最小出现次数") flags.DEFINE_integer("embedding_size", 300, "the dimension of vector") flags.DEFINE_integer("capacity", 15000, "Batch size of dataset shuffle") flags.DEFINE_integer("num_threads", 4, "Number of threads in input pipeline") # 使用cudnn训练,提升6倍速度 flags.DEFINE_boolean("use_cudnn", True, "Whether to use cudnn (only for GPU)") flags.DEFINE_boolean("is_bucket", False, "Whether to use bucketing") flags.DEFINE_integer("batch_size", 64, "Batch size") flags.DEFINE_integer("num_steps", 250000, "Number of steps") flags.DEFINE_integer("checkpoint", 1000, "checkpoint for evaluation") flags.DEFINE_integer("period", 500, "period to save batch loss") flags.DEFINE_integer("val_num_batches", 150, "Num of batches for evaluation") flags.DEFINE_float("init_learning_rate", 0.001, "Initial learning rate for Adam") flags.DEFINE_float("init_emb_lr", 0., "") flags.DEFINE_float("keep_prob", 0.7, "Keep prob in rnn") flags.DEFINE_float("grad_clip", 5.0, "Global Norm gradient clipping rate") flags.DEFINE_integer("hidden", 60, "Hidden size") # best:128 flags.DEFINE_integer("patience", 5, "Patience for learning rate decay") flags.DEFINE_string("optimizer", "Adam", "") flags.DEFINE_string("loss_function", "default", "") flags.DEFINE_boolean("use_dropout", True, "") def main(_): config = flags.FLAGS os.environ["CUDA_VISIBLE_DEVICES"] = config.gpu # 选择一块gpu if config.mode == "train": train(config) elif config.mode == "prepro": data_process.prepro(config) elif config.mode == "debug": config.num_steps = 2 config.val_num_batches = 1 config.checkpoint = 1 config.period = 1 train(config) elif config.mode == "test": test(config) elif config.mode == "examine": examine_dev(config) elif config.mode == "save_dev": save_dev(config) elif config.mode == "save_test": save_test(config) else: print("Unknown mode") exit(0) if __name__ == "__main__": tf.app.run()
r): os.makedirs(log
chapter.py
from . import Base from sqlalchemy import Column, Integer, Text, DateTime, ForeignKey from datetime import datetime class Chapter(Base): __tablename__ = "chapters" id = Column(Integer, primary_key=True, autoincrement=True) manga_id = Column(Integer, ForeignKey("manga.id")) chapter_no = Column(Integer) chapter_postfix = Column(Text) ordinal = Column(Integer) page_count = Column(Integer) title = Column(Text) version = Column(Integer) language_id = Column(Text) group_id = Column(Integer) date_added = Column(DateTime) ipfs_link = Column(Text) def to_dict(self):
return { "id" : self.id, "manga_id" : self.manga_id, "chapter_no" : self.chapter_no, "chapter_postfix" : self.chapter_postfix, "ordinal" : self.ordinal, "title" : self.title, "page_count" : self.page_count, "version" : self.version, "language_id" : self.language_id, "group_id" : self.group_id, "date_added" : int(self.date_added.timestamp()), "ipfs_link" : self.ipfs_link }
replicaset.go
package workload import dyn "github.com/yametech/fuxi/pkg/kubernetes/client" // ReplicaSet is kubernetes default resource replicaset type ReplicaSet struct { WorkloadsResourceHandler // extended for workloadsResourceHandler } // NewReplicaSet exported func
() *ReplicaSet { return &ReplicaSet{&defaultImplWorkloadsResourceHandler{ dyn.ResourceReplicaSet, }} }
NewReplicaSet
exists.go
// Copyright 2012-2014 Oliver Eilhard. All rights reserved. // Use of this source code is governed by a MIT-license. // See http://olivere.mit-license.org/license.txt for details. package elastic import ( "fmt" "net/http" "github.com/olivere/elastic/uritemplates" ) type ExistsService struct { client *Client index string _type string id string } func
(client *Client) *ExistsService { builder := &ExistsService{ client: client, } return builder } func (s *ExistsService) String() string { return fmt.Sprintf("exists([%v][%v][%v])", s.index, s._type, s.id) } func (s *ExistsService) Index(index string) *ExistsService { s.index = index return s } func (s *ExistsService) Type(_type string) *ExistsService { s._type = _type return s } func (s *ExistsService) Id(id string) *ExistsService { s.id = id return s } func (s *ExistsService) Do() (bool, error) { // Build url urls, err := uritemplates.Expand("/{index}/{type}/{id}", map[string]string{ "index": s.index, "type": s._type, "id": s.id, }) if err != nil { return false, err } // Set up a new request req, err := s.client.NewRequest("HEAD", urls) if err != nil { return false, err } // Get response res, err := s.client.c.Do((*http.Request)(req)) if err != nil { return false, err } if res.StatusCode == 200 { return true, nil } else if res.StatusCode == 404 { return false, nil } return false, fmt.Errorf("elastic: got HTTP code %d when it should have been either 200 or 404", res.StatusCode) }
NewExistsService
parameter_validation_generator.py
#!/usr/bin/python3 -i # # Copyright (c) 2015-2021 The Khronos Group Inc. # Copyright (c) 2015-2021 Valve Corporation # Copyright (c) 2015-2021 LunarG, Inc. # Copyright (c) 2015-2021 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Author: Dustin Graves <[email protected]> # Author: Mark Lobodzinski <[email protected]> # Author: Dave Houlton <[email protected]> import os,re,sys,string,json import xml.etree.ElementTree as etree from generator import * from collections import namedtuple from common_codegen import * # Helper for iterating over a list where each element is possibly a single element or another 1-dimensional list # Generates (setter, deleter, element) for each element where: # - element = the next element in the list # - setter(x) = a function that will set the entry in `lines` corresponding to `element` to `x` # - deleter() = a function that will delete the entry corresponding to `element` in `lines` def multi_string_iter(lines): for i, ul in enumerate(lines): if not isinstance(ul, list): def setter(x): lines[i] = x def deleter(): del(lines[i]) yield (setter, deleter, ul) else: for j, l in enumerate(lines[i]): def setter(x): lines[i][j] = x def deleter(): del(lines[i][j]) yield (setter, deleter, l) # ParameterValidationGeneratorOptions - subclass of GeneratorOptions. # # Adds options used by ParameterValidationOutputGenerator object during Parameter validation layer generation. # # Additional members # protectFile - True if multiple inclusion protection should be # generated (based on the filename) around the entire header. # protectFeature - True if #ifndef..#endif protection should be # generated around a feature interface in the header file. # genFuncPointers - True if function pointer typedefs should be # generated # protectProto - If conditional protection should be generated # around prototype declarations, set to either '#ifdef' # to require opt-in (#ifdef protectProtoStr) or '#ifndef' # to require opt-out (#ifndef protectProtoStr). Otherwise # set to None. # protectProtoStr - #ifdef/#ifndef symbol to use around prototype # declarations, if protectProto is set # apicall - string to use for the function declaration prefix, # such as APICALL on Windows. # apientry - string to use for the calling convention macro, # in typedefs, such as APIENTRY. # apientryp - string to use for the calling convention macro # in function pointer typedefs, such as APIENTRYP. # indentFuncProto - True if prototype declarations should put each # parameter on a separate line # indentFuncPointer - True if typedefed function pointers should put each # parameter on a separate line # alignFuncParam - if nonzero and parameters are being put on a # separate line, align parameter names at the specified column class ParameterValidationGeneratorOptions(GeneratorOptions): def __init__(self, conventions = None, filename = None, directory = '.', genpath = None, apiname = 'vulkan', profile = None, versions = '.*', emitversions = '.*', defaultExtensions = 'vulkan', addExtensions = None, removeExtensions = None, emitExtensions = None, emitSpirv = None, sortProcedure = regSortFeatures, apicall = 'VKAPI_ATTR ', apientry = 'VKAPI_CALL ', apientryp = 'VKAPI_PTR *', indentFuncProto = True, indentFuncPointer = False, alignFuncParam = 48, expandEnumerants = False, valid_usage_path = ''): GeneratorOptions.__init__(self, conventions = conventions, filename = filename, directory = directory, genpath = genpath, apiname = apiname, profile = profile, versions = versions, emitversions = emitversions, defaultExtensions = defaultExtensions, addExtensions = addExtensions, removeExtensions = removeExtensions, emitExtensions = emitExtensions, emitSpirv = emitSpirv, sortProcedure = sortProcedure) self.apicall = apicall self.apientry = apientry self.apientryp = apientryp self.indentFuncProto = indentFuncProto self.indentFuncPointer = indentFuncPointer self.alignFuncParam = alignFuncParam self.expandEnumerants = expandEnumerants self.valid_usage_path = valid_usage_path # ParameterValidationOutputGenerator - subclass of OutputGenerator. # Generates param checker layer code. # # ---- methods ---- # ParamCheckerOutputGenerator(errFile, warnFile, diagFile) - args as for # OutputGenerator. Defines additional internal state. # ---- methods overriding base class ---- # beginFile(genOpts) # endFile() # beginFeature(interface, emit) # endFeature() # genType(typeinfo,name) # genStruct(typeinfo,name) # genGroup(groupinfo,name) # genEnum(enuminfo, name) # genCmd(cmdinfo) class ParameterValidationOutputGenerator(OutputGenerator): """Generate Parameter Validation code based on XML element attributes""" # This is an ordered list of sections in the header file. ALL_SECTIONS = ['command'] def __init__(self, errFile = sys.stderr, warnFile = sys.stderr, diagFile = sys.stdout): OutputGenerator.__init__(self, errFile, warnFile, diagFile) self.INDENT_SPACES = 4 self.declarations = [] inline_custom_source_preamble = """ """ # These functions have additional, custom-written checks in the utils cpp file. CodeGen will automatically add a call # to those functions of the form 'bool manual_PreCallValidateAPIName', where the 'vk' is dropped. # see 'manual_PreCallValidateCreateGraphicsPipelines' as an example. self.functions_with_manual_checks = [ 'vkCreateInstance', 'vkCreateDevice', 'vkCreateQueryPool', 'vkCreateRenderPass', 'vkCreateRenderPass2', 'vkCreateRenderPass2KHR', 'vkCreateBuffer', 'vkCreateImage', 'vkCreatePipelineLayout', 'vkCreateGraphicsPipelines', 'vkCreateComputePipelines', 'vkCreateRayTracingPipelinesNV', 'vkCreateRayTracingPipelinesKHR', 'vkCreateSampler', 'vkCreateDescriptorSetLayout', 'vkFreeDescriptorSets', 'vkUpdateDescriptorSets', 'vkBeginCommandBuffer', 'vkCmdSetViewport', 'vkCmdSetScissor', 'vkCmdSetLineWidth', 'vkCmdDrawIndirect', 'vkCmdDrawIndexedIndirect', 'vkCmdDrawMultiEXT', 'vkCmdDrawMultiIndexedEXT', 'vkCmdClearAttachments', 'vkCmdBindIndexBuffer', 'vkCmdCopyBuffer', 'vkCmdUpdateBuffer', 'vkCmdFillBuffer', 'vkCreateSwapchainKHR', 'vkCreateSharedSwapchainsKHR', 'vkQueuePresentKHR', 'vkCreateDescriptorPool', 'vkCmdDispatch', 'vkCmdDispatchIndirect', 'vkCmdDispatchBaseKHR', 'vkCmdPushDescriptorSetKHR', 'vkCmdSetExclusiveScissorNV', 'vkCmdSetViewportShadingRatePaletteNV', 'vkCmdSetCoarseSampleOrderNV', 'vkCmdDrawMeshTasksNV', 'vkCmdDrawMeshTasksIndirectNV', 'vkCmdDrawMeshTasksIndirectCountNV', 'vkAllocateMemory', 'vkCreateAccelerationStructureNV', 'vkCreateAccelerationStructureKHR', 'vkGetAccelerationStructureHandleNV', 'vkGetPhysicalDeviceImageFormatProperties', 'vkGetPhysicalDeviceImageFormatProperties2', 'vkGetPhysicalDeviceImageFormatProperties2KHR', 'vkCmdBuildAccelerationStructureNV', 'vkCreateFramebuffer', 'vkCmdSetLineStippleEXT', 'vkSetDebugUtilsObjectNameEXT', 'vkSetDebugUtilsObjectTagEXT', 'vkCmdSetViewportWScalingNV', 'vkAcquireNextImageKHR', 'vkAcquireNextImage2KHR', 'vkCmdBindTransformFeedbackBuffersEXT', 'vkCmdBeginTransformFeedbackEXT', 'vkCmdEndTransformFeedbackEXT', 'vkCmdDrawIndirectByteCountEXT', 'vkCreateSamplerYcbcrConversion', 'vkCreateSamplerYcbcrConversionKHR', 'vkImportSemaphoreFdKHR', 'vkCmdBindVertexBuffers', 'vkCreateImageView', 'vkCopyAccelerationStructureToMemoryKHR', 'vkCmdCopyAccelerationStructureToMemoryKHR', 'vkCopyAccelerationStructureKHR', 'vkCmdCopyAccelerationStructureKHR', 'vkCopyMemoryToAccelerationStructureKHR', 'vkCmdCopyMemoryToAccelerationStructureKHR', 'vkCmdDrawIndirectCount', 'vkCmdDrawIndirectCountKHR', 'vkCmdDrawIndexedIndirectCount', 'vkCmdDrawIndexedIndirectCountKHR', 'vkCmdWriteAccelerationStructuresPropertiesKHR', 'vkWriteAccelerationStructuresPropertiesKHR', 'vkGetRayTracingCaptureReplayShaderGroupHandlesKHR', 'vkCmdTraceRaysKHR', 'vkCmdTraceRaysNV', 'vkCmdTraceRaysIndirectKHR', 'vkCmdBuildAccelerationStructureIndirectKHR', 'vkGetDeviceAccelerationStructureCompatibilityKHR', 'vkCmdSetViewportWithCountEXT', 'vkCmdSetScissorWithCountEXT', 'vkCmdBindVertexBuffers2EXT', 'vkCmdCopyBuffer2KHR', 'vkCmdBuildAccelerationStructuresKHR', 'vkCmdBuildAccelerationStructuresIndirectKHR', 'vkBuildAccelerationStructuresKHR', 'vkGetAccelerationStructureBuildSizesKHR', 'vkCmdWriteAccelerationStructuresPropertiesNV', 'vkCreateDisplayModeKHR', 'vkCreatePrivateDataSlotEXT', 'vkCmdSetVertexInputEXT', 'vkCmdPushConstants', 'vkMergePipelineCaches', 'vkGetPhysicalDeviceVideoFormatPropertiesKHR', 'vkCmdClearColorImage', 'vkCmdBeginRenderPass', 'vkCmdBeginRenderPass2KHR', 'vkCmdBeginRenderPass2', 'vkCmdSetDiscardRectangleEXT', 'vkGetQueryPoolResults', 'vkCmdBeginConditionalRenderingEXT', 'vkCreateWin32SurfaceKHR' ] # Commands to ignore self.blacklist = [ 'vkGetInstanceProcAddr', 'vkGetDeviceProcAddr', 'vkEnumerateInstanceVersion', 'vkEnumerateInstanceLayerProperties', 'vkEnumerateInstanceExtensionProperties', 'vkEnumerateDeviceLayerProperties', 'vkEnumerateDeviceExtensionProperties', 'vkGetDeviceGroupSurfacePresentModes2EXT' ] # Structure fields to ignore self.structMemberBlacklist = { 'VkWriteDescriptorSet' : ['dstSet'], 'VkAccelerationStructureGeometryKHR' :['geometry'] } # Validation conditions for some special case struct members that are conditionally validated self.structMemberValidationConditions = { 'VkPipelineColorBlendStateCreateInfo' : { 'logicOp' : '{}logicOpEnable == VK_TRUE' } } # Header version self.headerVersion = None # Internal state - accumulators for different inner block text self.validation = [] # Text comprising the main per-api parameter validation routines self.stypes = [] # Values from the VkStructureType enumeration self.structTypes = dict() # Map of Vulkan struct typename to required VkStructureType self.handleTypes = set() # Set of handle type names self.commands = [] # List of CommandData records for all Vulkan commands self.structMembers = [] # List of StructMemberData records for all Vulkan structs self.validatedStructs = dict() # Map of structs type names to generated validation code for that struct type self.enumRanges = set() # Set of enum names self.enum_values_definitions = dict() # [enum, string] containing enumerated type map definitions self.flag_values_definitions = dict() # [flag, string] containing flag type map definitions self.stype_version_dict = dict() # String containing structtype to version map data self.flags = set() # Map of flags typenames self.flagBits = dict() # Map of flag bits typename to list of values self.newFlags = set() # Map of flags typenames /defined in the current feature/ self.required_extensions = dict() # Dictionary of required extensions for each item in the current extension self.extension_type = '' # Type of active feature (extension), device or instance self.extension_names = dict() # Dictionary of extension names to extension name defines self.structextends_list = [] # List of extensions which extend another struct self.struct_feature_protect = dict() # Dictionary of structnames and FeatureExtraProtect strings self.valid_vuids = set() # Set of all valid VUIDs self.vuid_dict = dict() # VUID dictionary (from JSON) self.alias_dict = dict() # Dict of cmd|struct aliases self.header_file = False # Header file generation flag self.source_file = False # Source file generation flag self.instance_extension_list = '' # List of instance extension name defines self.device_extension_list = '' # List of device extension name defines self.returnedonly_structs = [] # List of structs with 'returnonly' attribute self.called_types = set() # Set of types called via function/struct - not in list == app never passes in to validate # Named tuples to store struct and command data self.CommandParam = namedtuple('CommandParam', ['type', 'name', 'ispointer', 'isstaticarray', 'isbool', 'israngedenum', 'isconst', 'isoptional', 'iscount', 'noautovalidity', 'len', 'extstructs', 'condition', 'cdecl']) self.CommandData = namedtuple('CommandData', ['name', 'params', 'cdecl', 'extension_type', 'result', 'promotion_info']) self.StructMemberData = namedtuple('StructMemberData', ['name', 'members']) # # Generate Copyright comment block for file def GenerateCopyright(self):
# # Increases the global indent variable def incIndent(self, indent): inc = ' ' * self.INDENT_SPACES if indent: return indent + inc return inc # # Decreases the global indent variable def decIndent(self, indent): if indent and (len(indent) > self.INDENT_SPACES): return indent[:-self.INDENT_SPACES] return '' # # Walk the JSON-derived dict and find all "vuid" key values def ExtractVUIDs(self, d): if hasattr(d, 'items'): for k, v in d.items(): if k == "vuid": yield v elif isinstance(v, dict): for s in self.ExtractVUIDs(v): yield s elif isinstance (v, list): for l in v: for s in self.ExtractVUIDs(l): yield s # # Called at file creation time def beginFile(self, genOpts): OutputGenerator.beginFile(self, genOpts) self.header_file = (genOpts.filename == 'parameter_validation.h') self.source_file = (genOpts.filename == 'parameter_validation.cpp') if not self.header_file and not self.source_file: print("Error: Output Filenames have changed, update generator source.\n") sys.exit(1) if self.source_file or self.header_file: # Output Copyright text s = self.GenerateCopyright() write(s, file=self.outFile) if self.header_file: return stype_map = '' stype_version_dict = dict() # Create contents of Structs->API version unordered map root = self.registry.reg for node in root.findall('feature'): version_name = node.get('name') version_name = version_name.replace('VK_', 'VK_API_') for enum_item in node.iter('enum'): if enum_item.get('extends') == "VkStructureType": struct_type_id = enum_item.get('name') self.stype_version_dict[struct_type_id] = version_name for extensions in root.findall('extensions'): for extension in extensions.findall('extension'): for entry in extension.iterfind('require/enum[@extends="VkStructureType"]'): alias = entry.get('alias') if alias is not None and (entry.get('comment') is None or 'typo' not in entry.get('comment')): self.stype_version_dict[alias] = extension.get('name') # Build map of structure type names to VkStructureType enum values # Find all types of category "struct" for struct in self.registry.tree.iterfind('types/type[@category="struct"]'): # Check if struct has member named "sType" of type "VkStructureType" which has values defined stype = struct.find('member[name="sType"][type="VkStructureType"][@values]') if stype is not None: # Store VkStructureType value for this type self.structTypes[struct.get('name')] = stype.get('values') self.valid_usage_path = genOpts.valid_usage_path vu_json_filename = os.path.join(self.valid_usage_path + os.sep, 'validusage.json') if os.path.isfile(vu_json_filename): json_file = open(vu_json_filename, 'r', encoding='utf-8') self.vuid_dict = json.load(json_file) json_file.close() if len(self.vuid_dict) == 0: print("Error: Could not find, or error loading %s/validusage.json\n", vu_json_filename) sys.exit(1) # # Build a set of all vuid text strings found in validusage.json for json_vuid_string in self.ExtractVUIDs(self.vuid_dict): self.valid_vuids.add(json_vuid_string) # # Headers write('#include "chassis.h"', file=self.outFile) self.newline() write('#include "stateless_validation.h"', file=self.outFile) self.newline() # # Called at end-time for final content output def endFile(self): if self.source_file: # C-specific self.newline() # Don't need flag/enum lists if app can never call it to be validated # But need to save everything as not all information is known until endFile() for flag, string in self.flag_values_definitions.items(): if flag == 'VkGeometryInstanceFlagsKHR': # only called in VkAccelerationStructureInstanceKHR which is never called anywhere explicitly continue flagBits = flag.replace('Flags', 'FlagBits') if flag in self.called_types or flagBits in self.called_types: write(string, file=self.outFile) for enum, string in self.enum_values_definitions.items(): if enum in self.called_types: write(string, file=self.outFile) self.newline() self.newline() api_func = 'bool StatelessValidation::CheckPromotedApiAgainstVulkanVersion(VkInstance instance, const char *api_name, const uint32_t promoted_version) const {\n' api_func += ' bool skip = false;\n' api_func += ' if (api_version < promoted_version) {\n' api_func += ' skip = LogError(instance,\n' api_func += ' kVUID_PVError_ApiVersionViolation, "Attemped to call %s() with an effective API version of %s"\n' api_func += ' "but this API was not promoted until version %s.", api_name, StringAPIVersion(api_version).c_str(),\n' api_func += ' StringAPIVersion(promoted_version).c_str());\n' api_func += ' }\n' api_func += ' return skip;\n' api_func += '}\n\n' api_func += 'bool StatelessValidation::CheckPromotedApiAgainstVulkanVersion(VkPhysicalDevice pdev, const char *api_name, const uint32_t promoted_version) const {\n' api_func += ' bool skip = false;\n' api_func += ' const auto &target_pdev = physical_device_properties_map.find(pdev);\n' api_func += ' if (target_pdev != physical_device_properties_map.end()) {\n' api_func += ' auto effective_api_version = std::min(target_pdev->second->apiVersion, api_version);\n' api_func += ' if (effective_api_version < promoted_version) {\n' api_func += ' skip = LogError(instance,\n' api_func += ' kVUID_PVError_ApiVersionViolation, "Attemped to call %s() with an effective API version of %s, "\n' api_func += ' "which is the minimum of version requested in pApplicationInfo (%s) and supported by this physical device (%s), "\n' api_func += ' "but this API was not promoted until version %s.", api_name, StringAPIVersion(effective_api_version).c_str(),\n' api_func += ' StringAPIVersion(api_version).c_str(), StringAPIVersion(target_pdev->second->apiVersion).c_str(),\n' api_func += ' StringAPIVersion(promoted_version).c_str());\n' api_func += ' }\n' api_func += ' }\n' api_func += ' return skip;\n' api_func += '}\n' write(api_func, file=self.outFile) pnext_handler = 'bool StatelessValidation::ValidatePnextStructContents(const char *api_name, const ParameterName &parameter_name,\n' pnext_handler += ' const VkBaseOutStructure* header, const char *pnext_vuid, bool is_physdev_api, bool is_const_param) const {\n' pnext_handler += ' bool skip = false;\n' pnext_handler += ' switch(header->sType) {\n' # Do some processing here to extract data from validatedstructs... for item in self.structextends_list: postProcSpec = {} postProcSpec['ppp'] = '' if not item else '{postProcPrefix}' postProcSpec['pps'] = '' if not item else '{postProcSuffix}' postProcSpec['ppi'] = '' if not item else '{postProcInsert}' pnext_case = '\n' pnext_check = '' protect = '' # Guard struct cases with feature ifdefs, if necessary if item in self.struct_feature_protect.keys(): protect = self.struct_feature_protect[item] pnext_case += '#ifdef %s\n' % protect pnext_case += ' // Validation code for %s structure members\n' % item pnext_case += ' case %s: { // Covers VUID-%s-sType-sType\n' % (self.structTypes[item], item) # pNext version/extension-enabled checks ver_info = '' struct_type = self.structTypes[item] if struct_type in self.stype_version_dict.keys(): ver_info = self.stype_version_dict[struct_type] else: struct_type[:-4] if struct_type[:-4] in self.stype_version_dict.values(): ver_info = self.stype_version_dict[struct_type[:-4]] else: ver_info = None api_check = False if ver_info is not None: if 'VK_API_VERSION_' in ver_info: api_check = True api_version = ver_info; pnext_check += ' if (api_version < %s) {\n' % ver_info pnext_check += ' skip |= LogError(\n' pnext_check += ' instance, pnext_vuid,\n' pnext_check += ' "%%s: Includes a pNext pointer (%%s) to a VkStructureType (%s) which was added in %s but the "\n' % (struct_type, ver_info) pnext_check += ' "current effective API version is %s.",\n' pnext_check += ' api_name, parameter_name.get_name().c_str(), StringAPIVersion(api_version).c_str());\n' pnext_check += ' }\n' else: # Dependent on enabled extension ext_name = ver_info ext_name_define = self.extension_names[ver_info] table_type = '' if ext_name_define in self.instance_extension_list: table_type = 'instance' elif ext_name_define in self.device_extension_list: table_type = 'device' else: print("Error in parameter_validation_generator.py CodeGen.") pnext_check += ' if (is_const_param) {\n' if table_type == 'device': pnext_check += f' if ((is_physdev_api && !SupportedByPdev(physical_device, {ext_name_define})) || (!is_physdev_api && !IsExtEnabled({table_type}_extensions.{ext_name.lower()}))) {{\n' else: pnext_check += ' if (!%s_extensions.%s) {\n' % (table_type, ext_name.lower()) pnext_check += ' skip |= LogError(\n' pnext_check += ' instance, pnext_vuid,\n' pnext_check += ' "%%s: Includes a pNext pointer (%%s) to a VkStructureType (%s), but its parent extension "\n' % struct_type pnext_check += ' "%s has not been enabled.",\n' % ext_name pnext_check += ' api_name, parameter_name.get_name().c_str());\n' pnext_check += ' }\n' pnext_check += ' }\n' pnext_check += '\n' expr = self.expandStructCode(item, item, 'structure->', '', ' ', [], postProcSpec) struct_validation_source = self.ScrubStructCode(expr) if struct_validation_source != '': pnext_check += ' if (is_const_param) {\n' struct_validation_source = ' %s *structure = (%s *) header;\n' % (item, item) + struct_validation_source struct_validation_source += ' }\n' pnext_case += '%s%s' % (pnext_check, struct_validation_source) pnext_case += ' } break;\n' if protect: pnext_case += '#endif // %s\n' % protect # Skip functions containing no validation if struct_validation_source or pnext_check != '': pnext_handler += pnext_case; else: pnext_handler += '\n // No Validation code for %s structure members -- Covers VUID-%s-sType-sType\n' % (item, item) pnext_handler += ' default:\n' pnext_handler += ' skip = false;\n' pnext_handler += ' }\n' pnext_handler += ' return skip;\n' pnext_handler += '}\n' write(pnext_handler, file=self.outFile) self.newline() ext_template = 'bool StatelessValidation::OutputExtensionError(const std::string &api_name, const std::string &extension_name) const {\n' ext_template += ' return LogError(instance,\n' ext_template += ' kVUID_PVError_ExtensionNotEnabled, "Attemped to call %s() but its required extension %s has not been enabled\\n",\n' ext_template += ' api_name.c_str(), extension_name.c_str());\n' ext_template += '}\n' write(ext_template, file=self.outFile) self.newline() commands_text = '\n'.join(self.validation) write(commands_text, file=self.outFile) self.newline() if self.header_file: # Output declarations and record intercepted procedures write('\n'.join(self.declarations), file=self.outFile) # Finish processing in superclass OutputGenerator.endFile(self) # # Processing at beginning of each feature or extension def beginFeature(self, interface, emit): # Start processing in superclass OutputGenerator.beginFeature(self, interface, emit) # C-specific # Accumulate includes, defines, types, enums, function pointer typedefs, end function prototypes separately for this # feature. They're only printed in endFeature(). self.headerVersion = None self.stypes = [] self.commands = [] self.structMembers = [] self.newFlags = set() self.featureExtraProtect = GetFeatureProtect(interface) # Get base list of extension dependencies for all items in this extension base_required_extensions = [] if "VK_VERSION_1" not in self.featureName: nameElem = interface[0][1] name = nameElem.get('name') # Save Name Define to get correct enable name later self.extension_names[self.featureName] = name # This extension is the first dependency for this command base_required_extensions.append(self.featureName) # Add any defined extension dependencies to the base dependency list for this extension requires = interface.get('requires') if requires is not None: base_required_extensions.extend(requires.split(',')) # Build dictionary of extension dependencies for each item in this extension self.required_extensions = dict() for require_element in interface.findall('require'): # Copy base extension dependency list required_extensions = list(base_required_extensions) # Add any additional extension dependencies specified in this require block additional_extensions = require_element.get('extension') if additional_extensions: required_extensions.extend(additional_extensions.split(',')) # Save full extension list for all named items for element in require_element.findall('*[@name]'): self.required_extensions[element.get('name')] = required_extensions # And note if this is an Instance or Device extension self.extension_type = interface.get('type') if interface.tag == 'extension': if interface.get('type') == 'instance': self.instance_extension_list += '%s, ' % GetNameDefine(interface) else: self.device_extension_list += '%s, ' % GetNameDefine(interface) # # Called at the end of each extension (feature) def endFeature(self): if self.header_file: return # C-specific # Actually write the interface to the output file. if (self.emit): # If type declarations are needed by other features based on this one, it may be necessary to suppress the ExtraProtect, # or move it below the 'for section...' loop. ifdef = '' if (self.featureExtraProtect is not None): ifdef = '#ifdef %s\n' % self.featureExtraProtect self.validation.append(ifdef) # Generate the struct member checking code from the captured data self.processStructMemberData() # Generate the command parameter checking code from the captured data self.processCmdData() # Write the declaration for the HeaderVersion if self.headerVersion: write('const uint32_t GeneratedVulkanHeaderVersion = {};'.format(self.headerVersion), file=self.outFile) # Write the declarations for the VkFlags values combining all flag bits for flag in sorted(self.newFlags): flagBits = flag.replace('Flags', 'FlagBits') if flagBits in self.flagBits: bits = self.flagBits[flagBits] decl = 'const {} All{} = {}'.format(flag, flagBits, bits[0]) for bit in bits[1:]: decl += '|' + bit decl += ';' self.flag_values_definitions[flag] = Guarded(self.featureExtraProtect, decl) endif = '\n' if (self.featureExtraProtect is not None): endif = '#endif // %s\n' % self.featureExtraProtect self.validation.append(endif) # Finish processing in superclass OutputGenerator.endFeature(self) # # Type generation def genType(self, typeinfo, name, alias): # record the name/alias pair if alias is not None: self.alias_dict[name]=alias OutputGenerator.genType(self, typeinfo, name, alias) typeElem = typeinfo.elem # If the type is a struct type, traverse the embedded <member> tags generating a structure. Otherwise, emit the tag text. category = typeElem.get('category') if (category == 'struct' or category == 'union'): self.genStruct(typeinfo, name, alias) elif (category == 'handle'): self.handleTypes.add(name) elif (category == 'bitmask'): self.flags.add(name) self.newFlags.add(name) elif (category == 'define'): if name == 'VK_HEADER_VERSION': nameElem = typeElem.find('name') self.headerVersion = noneStr(nameElem.tail).strip() # # Struct parameter check generation. # This is a special case of the <type> tag where the contents are interpreted as a set of <member> tags instead of freeform C # type declarations. The <member> tags are just like <param> tags - they are a declaration of a struct or union member. # Only simple member declarations are supported (no nested structs etc.) def genStruct(self, typeinfo, typeName, alias): if not self.source_file: return # alias has already been recorded in genType, above OutputGenerator.genStruct(self, typeinfo, typeName, alias) conditions = self.structMemberValidationConditions[typeName] if typeName in self.structMemberValidationConditions else None members = typeinfo.elem.findall('.//member') if self.featureExtraProtect is not None: self.struct_feature_protect[typeName] = self.featureExtraProtect # # Iterate over members once to get length parameters for arrays lens = set() for member in members: len = self.getLen(member) if len: lens.add(len) # # Generate member info membersInfo = [] returned_only = typeinfo.elem.attrib.get('returnedonly') is not None for member in members: # Get the member's type and name info = self.getTypeNameTuple(member) type = info[0] name = info[1] stypeValue = '' cdecl = self.makeCParamDecl(member, 0) ispointer = self.paramIsPointer(member) isconst = True if 'const' in cdecl else False # Store pointer/array/string info -- Check for parameter name in lens set iscount = False if name in lens: iscount = True # The pNext members are not tagged as optional, but are treated as optional for parameter NULL checks. Static array # members are also treated as optional to skip NULL pointer validation, as they won't be NULL. isstaticarray = self.paramIsStaticArray(member) isoptional = False if self.paramIsOptional(member) or (name == 'pNext') or (isstaticarray): isoptional = True # Determine if value should be ignored by code generation. noautovalidity = False if (member.attrib.get('noautovalidity') is not None) or ((typeName in self.structMemberBlacklist) and (name in self.structMemberBlacklist[typeName])): noautovalidity = True # Some types are marked as noautovalidity, but stateless_validation.h will still want them for manual validation noautovalidity_type_exceptions = [ "VkQueryPipelineStatisticFlags", "VkBorderColor" ] # Store all types that are from incoming calls if auto validity # non-const pointers don't have auto gen code as used for return values if (noautovalidity == False) or (type in noautovalidity_type_exceptions): if not returned_only and (not ispointer or isconst): self.called_types.add(type) structextends = False membersInfo.append(self.CommandParam(type=type, name=name, ispointer=ispointer, isstaticarray=isstaticarray, isbool=True if type == 'VkBool32' else False, israngedenum=True if type in self.enumRanges else False, isconst=isconst, isoptional=isoptional, iscount=iscount, noautovalidity=noautovalidity, len=self.getLen(member), extstructs=self.registry.validextensionstructs[typeName] if name == 'pNext' else None, condition=conditions[name] if conditions and name in conditions else None, cdecl=cdecl)) # If this struct extends another, keep its name in list for further processing if typeinfo.elem.attrib.get('structextends') is not None: self.structextends_list.append(typeName) # Returnedonly structs should have most of their members ignored -- on entry, we only care about validating the sType and # pNext members. Everything else will be overwritten by the callee. if returned_only: self.returnedonly_structs.append(typeName) membersInfo = [m for m in membersInfo if m.name in ('sType', 'pNext')] self.structMembers.append(self.StructMemberData(name=typeName, members=membersInfo)) # # Capture group (e.g. C "enum" type) info to be used for param check code generation. # These are concatenated together with other types. def genGroup(self, groupinfo, groupName, alias): if not self.source_file: return # record the name/alias pair if alias is not None: self.alias_dict[groupName]=alias OutputGenerator.genGroup(self, groupinfo, groupName, alias) groupElem = groupinfo.elem # Store the sType values if groupName == 'VkStructureType': for elem in groupElem.findall('enum'): self.stypes.append(elem.get('name')) elif 'FlagBits' in groupName: bits = [] for elem in groupElem.findall('enum'): if elem.get('supported') != 'disabled': bits.append(elem.get('name')) if bits: self.flagBits[groupName] = bits else: # Determine if begin/end ranges are needed (we don't do this for VkStructureType, which has a more finely grained check) expandName = re.sub(r'([0-9a-z_])([A-Z0-9][^A-Z0-9]?)',r'\1_\2',groupName).upper() expandPrefix = expandName expandSuffix = '' expandSuffixMatch = re.search(r'[A-Z][A-Z]+$',groupName) if expandSuffixMatch: expandSuffix = '_' + expandSuffixMatch.group() # Strip off the suffix from the prefix expandPrefix = expandName.rsplit(expandSuffix, 1)[0] isEnum = ('FLAG_BITS' not in expandPrefix) if isEnum: self.enumRanges.add(groupName) # Create definition for a list containing valid enum values for this enumerated type if self.featureExtraProtect is not None: enum_entry = '#ifdef %s\n' % self.featureExtraProtect else: enum_entry = '' enum_entry += 'const std::vector<%s> All%sEnums = {' % (groupName, groupName) for enum in groupElem: name = enum.get('name') if name is not None and enum.get('supported') != 'disabled': enum_entry += '%s, ' % name enum_entry += '};' if self.featureExtraProtect is not None: enum_entry += '\n#endif // %s' % self.featureExtraProtect self.enum_values_definitions[groupName] = enum_entry # # Capture command parameter info to be used for param check code generation. def genCmd(self, cmdinfo, name, alias): # record the name/alias pair if alias is not None: self.alias_dict[name]=alias OutputGenerator.genCmd(self, cmdinfo, name, alias) decls = self.makeCDecls(cmdinfo.elem) typedef = decls[1] typedef = typedef.split(')',1)[1] if self.header_file: if name not in self.blacklist: if (self.featureExtraProtect is not None): self.declarations += [ '#ifdef %s' % self.featureExtraProtect ] # Strip off 'vk' from API name decl = '%s%s' % ('bool PreCallValidate', decls[0].split("VKAPI_CALL vk")[1]) decl_terminator = ' const override;' if 'ValidationCache' in name: decl_terminator = ' const;' decl = str(decl).replace(';', decl_terminator) self.declarations += [ decl ] if (self.featureExtraProtect is not None): self.declarations += [ '#endif' ] if self.source_file: if name not in self.blacklist: params = cmdinfo.elem.findall('param') # Get list of array lengths lens = set() for param in params: len = self.getLen(param) if len: lens.add(len) # Get param info paramsInfo = [] for param in params: paramInfo = self.getTypeNameTuple(param) cdecl = self.makeCParamDecl(param, 0) ispointer = self.paramIsPointer(param) isconst = True if 'const' in cdecl else False # non-const pointers don't have auto gen code as used for return values if not ispointer or isconst: self.called_types.add(paramInfo[0]) # Check for parameter name in lens set iscount = False if paramInfo[1] in lens: iscount = True paramsInfo.append(self.CommandParam(type=paramInfo[0], name=paramInfo[1], ispointer=ispointer, isstaticarray=self.paramIsStaticArray(param), isbool=True if paramInfo[0] == 'VkBool32' else False, israngedenum=True if paramInfo[0] in self.enumRanges else False, isconst=isconst, isoptional=self.paramIsOptional(param), iscount=iscount, noautovalidity=True if param.attrib.get('noautovalidity') is not None else False, len=self.getLen(param), extstructs=None, condition=None, cdecl=cdecl)) # Save return value information, if any result_type = '' promotion_info = '' resultinfo = cmdinfo.elem.find('proto/type') if (resultinfo is not None and resultinfo.text != 'void'): result_type = resultinfo.text if "VK_VERSION" in self.featureName and "VK_VERSION_1_0" != self.featureName: if ('VkInstance' == paramsInfo[0].type or 'VkPhysicalDevice' == paramsInfo[0].type): promotion_info = [paramsInfo[0].name, self.featureName] self.commands.append(self.CommandData(name=name, params=paramsInfo, cdecl=self.makeCDecls(cmdinfo.elem)[0], extension_type=self.extension_type, result=result_type, promotion_info=promotion_info)) # # Check if the parameter passed in is a pointer def paramIsPointer(self, param): ispointer = 0 paramtype = param.find('type') if (paramtype.tail is not None) and ('*' in paramtype.tail): ispointer = paramtype.tail.count('*') elif paramtype.text[:4] == 'PFN_': # Treat function pointer typedefs as a pointer to a single value ispointer = 1 return ispointer # # Check if the parameter passed in is a static array def paramIsStaticArray(self, param): isstaticarray = 0 paramname = param.find('name') if (paramname.tail is not None) and ('[' in paramname.tail): isstaticarray = paramname.tail.count('[') return isstaticarray # # Check if the parameter passed in is optional # Returns a list of Boolean values for comma separated len attributes (len='false,true') def paramIsOptional(self, param): # See if the handle is optional isoptional = False # Simple, if it's optional, return true optString = param.attrib.get('optional') if optString: if optString == 'true': isoptional = True elif ',' in optString: opts = [] for opt in optString.split(','): val = opt.strip() if val == 'true': opts.append(True) elif val == 'false': opts.append(False) else: print('Unrecognized len attribute value',val) isoptional = opts return isoptional # # Check if the handle passed in is optional # Uses the same logic as ValidityOutputGenerator.isHandleOptional def isHandleOptional(self, param, lenParam): # Simple, if it's optional, return true if param.isoptional: return True # If no validity is being generated, it usually means that validity is complex and not absolute, so let's say yes. if param.noautovalidity: return True # If the parameter is an array and we haven't already returned, find out if any of the len parameters are optional if lenParam and lenParam.isoptional: return True return False # # Retrieve the value of the len tag def getLen(self, param): result = None # Default to altlen when available to avoid LaTeX markup if 'altlen' in param.attrib: len = param.attrib.get('altlen') else: len = param.attrib.get('len') if len and len != 'null-terminated': # Only first level is supported for multidimensional arrays. Conveniently, this also strips the trailing # 'null-terminated' from arrays of strings len = len.split(',')[0] # Convert scope notation to pointer access result = str(len).replace('::', '->') elif self.paramIsStaticArray(param): # For static arrays get length from inside [] array_match = re.search(r'\[(\d+)\]', param.find('name').tail) if array_match: result = array_match.group(1) return result # # Retrieve the type and name for a parameter def getTypeNameTuple(self, param): type = '' name = '' for elem in param: if elem.tag == 'type': type = noneStr(elem.text) elif elem.tag == 'name': name = noneStr(elem.text) return (type, name) # # Find a named parameter in a parameter list def getParamByName(self, params, name): for param in params: if param.name == name: return param return None # # Get the length paramater record for the specified length expression def getLenParam(self, params, length): # First check if any element of params matches length exactly lenParam = self.getParamByName(params, length) if not lenParam: # Otherwise, look for any elements of params that appear within length len_candidates = [p for p in params if re.search(r'\b{}\b'.format(p.name), length)] # 0 or 1 matches are expected, >1 would require a special case and/or explicit validation if len(len_candidates) == 0: lenParam = None elif len(len_candidates) == 1: lenParam = len_candidates[0] else: raise Exception('Cannot determine length parameter for len attribute value {}'.format(length)) return lenParam # # Convert a vulkan.h command declaration into a parameter_validation.h definition def getCmdDef(self, cmd): # Strip the trailing ';' and split into individual lines lines = cmd.cdecl[:-1].split('\n') cmd_hdr = '\n'.join(lines) return cmd_hdr # # Generate the code to check for a NULL dereference before calling the # validation function def genCheckedLengthCall(self, name, exprs): count = name.count('->') if count: checkedExpr = [] localIndent = '' elements = name.split('->') # Open the if expression blocks for i in range(0, count): checkedExpr.append(localIndent + 'if ({} != NULL) {{\n'.format('->'.join(elements[0:i+1]))) localIndent = self.incIndent(localIndent) # Add the validation expression for expr in exprs: checkedExpr.append(localIndent + expr) # Close the if blocks for i in range(0, count): localIndent = self.decIndent(localIndent) checkedExpr.append(localIndent + '}\n') return [checkedExpr] # No if statements were required return exprs # # Generate code to check for a specific condition before executing validation code def genConditionalCall(self, prefix, condition, exprs): checkedExpr = [] localIndent = '' formattedCondition = condition.format(prefix) checkedExpr.append(localIndent + 'if ({})\n'.format(formattedCondition)) checkedExpr.append(localIndent + '{\n') localIndent = self.incIndent(localIndent) for expr in exprs: checkedExpr.append(localIndent + expr) localIndent = self.decIndent(localIndent) checkedExpr.append(localIndent + '}\n') return [checkedExpr] # # Get VUID identifier from implicit VUID tag def GetVuid(self, name, suffix): vuid_string = 'VUID-%s-%s' % (name, suffix) vuid = "kVUIDUndefined" if '->' in vuid_string: return vuid if vuid_string in self.valid_vuids: vuid = "\"%s\"" % vuid_string else: if name in self.alias_dict: alias_string = 'VUID-%s-%s' % (self.alias_dict[name], suffix) if alias_string in self.valid_vuids: vuid = "\"%s\"" % alias_string return vuid # # Generate the sType check string def makeStructTypeCheck(self, prefix, value, lenValue, valueRequired, lenValueRequired, lenPtrRequired, funcPrintName, lenPrintName, valuePrintName, postProcSpec, struct_type_name): checkExpr = [] stype = self.structTypes[value.type] vuid_name = struct_type_name if struct_type_name is not None else funcPrintName stype_vuid = self.GetVuid(value.type, "sType-sType") param_vuid = self.GetVuid(vuid_name, "%s-parameter" % value.name) if lenValue: count_required_vuid = self.GetVuid(vuid_name, "%s-arraylength" % value.len) # This is an array of struct pointers if value.ispointer == 2: checkExpr.append('skip |= validate_struct_pointer_type_array("{}", {ppp}"{ldn}"{pps}, {ppp}"{dn}"{pps}, "{sv}", {pf}{ln}, {pf}{vn}, {sv}, {}, {}, {}, {}, {});\n'.format( funcPrintName, lenValueRequired, valueRequired, stype_vuid, param_vuid, count_required_vuid, ln=lenValue.name, ldn=lenPrintName, dn=valuePrintName, vn=value.name, sv=stype, pf=prefix, **postProcSpec)) # This is an array with a pointer to a count value elif lenValue.ispointer: # When the length parameter is a pointer, there is an extra Boolean parameter in the function call to indicate if it is required checkExpr.append('skip |= validate_struct_type_array("{}", {ppp}"{ldn}"{pps}, {ppp}"{dn}"{pps}, "{sv}", {pf}{ln}, {pf}{vn}, {sv}, {}, {}, {}, {}, {}, {});\n'.format( funcPrintName, lenPtrRequired, lenValueRequired, valueRequired, stype_vuid, param_vuid, count_required_vuid, ln=value.len, ldn=lenPrintName, dn=valuePrintName, vn=value.name, sv=stype, pf=prefix, **postProcSpec)) # This is an array with an integer count value else: checkExpr.append('skip |= validate_struct_type_array("{}", {ppp}"{ldn}"{pps}, {ppp}"{dn}"{pps}, "{sv}", {pf}{ln}, {pf}{vn}, {sv}, {}, {}, {}, {}, {});\n'.format( funcPrintName, lenValueRequired, valueRequired, stype_vuid, param_vuid, count_required_vuid, ln=value.len, ldn=lenPrintName, dn=valuePrintName, vn=value.name, sv=stype, pf=prefix, **postProcSpec)) # This is an individual struct else: checkExpr.append('skip |= validate_struct_type("{}", {ppp}"{}"{pps}, "{sv}", {}{vn}, {sv}, {}, {}, {});\n'.format( funcPrintName, valuePrintName, prefix, valueRequired, param_vuid, stype_vuid, vn=value.name, sv=stype, vt=value.type, **postProcSpec)) return checkExpr # # Generate the handle check string def makeHandleCheck(self, prefix, value, lenValue, valueRequired, lenValueRequired, funcPrintName, lenPrintName, valuePrintName, postProcSpec): checkExpr = [] if lenValue: if lenValue.ispointer: # This is assumed to be an output array with a pointer to a count value raise('Unsupported parameter validation case: Output handle array elements are not NULL checked') else: count_required_vuid = self.GetVuid(funcPrintName, "%s-arraylength" % (value.len)) # This is an array with an integer count value checkExpr.append('skip |= validate_handle_array("{}", {ppp}"{ldn}"{pps}, {ppp}"{dn}"{pps}, {pf}{ln}, {pf}{vn}, {}, {}, {});\n'.format( funcPrintName, lenValueRequired, valueRequired, count_required_vuid, ln=value.len, ldn=lenPrintName, dn=valuePrintName, vn=value.name, pf=prefix, **postProcSpec)) else: # This is assumed to be an output handle pointer raise('Unsupported parameter validation case: Output handles are not NULL checked') return checkExpr # # Generate check string for an array of VkFlags values def makeFlagsArrayCheck(self, prefix, value, lenValue, valueRequired, lenValueRequired, funcPrintName, lenPrintName, valuePrintName, postProcSpec): checkExpr = [] flagBitsName = value.type.replace('Flags', 'FlagBits') if not flagBitsName in self.flagBits: raise('Unsupported parameter validation case: array of reserved VkFlags') else: allFlags = 'All' + flagBitsName checkExpr.append('skip |= validate_flags_array("{}", {ppp}"{}"{pps}, {ppp}"{}"{pps}, "{}", {}, {pf}{}, {pf}{}, {}, {});\n'.format(funcPrintName, lenPrintName, valuePrintName, flagBitsName, allFlags, value.len, value.name, lenValueRequired, valueRequired, pf=prefix, **postProcSpec)) return checkExpr # # Generate pNext check string def makeStructNextCheck(self, prefix, value, funcPrintName, valuePrintName, postProcSpec, struct_type_name): checkExpr = [] # Generate an array of acceptable VkStructureType values for pNext extStructCount = 0 extStructVar = 'NULL' extStructNames = 'NULL' pNextVuid = self.GetVuid(struct_type_name, "pNext-pNext") sTypeVuid = self.GetVuid(struct_type_name, "sType-unique") if value.extstructs: extStructVar = 'allowed_structs_{}'.format(struct_type_name) extStructCount = 'ARRAY_SIZE({})'.format(extStructVar) extStructNames = '"' + ', '.join(value.extstructs) + '"' checkExpr.append('const VkStructureType {}[] = {{ {} }};\n'.format(extStructVar, ', '.join([self.structTypes[s] for s in value.extstructs]))) checkExpr.append('skip |= validate_struct_pnext("{}", {ppp}"{}"{pps}, {}, {}{}, {}, {}, GeneratedVulkanHeaderVersion, {}, {});\n'.format( funcPrintName, valuePrintName, extStructNames, prefix, value.name, extStructCount, extStructVar, pNextVuid, sTypeVuid, **postProcSpec)) return checkExpr # # Generate the pointer check string def makePointerCheck(self, prefix, value, lenValue, valueRequired, lenValueRequired, lenPtrRequired, funcPrintName, lenPrintName, valuePrintName, postProcSpec, struct_type_name): checkExpr = [] vuid_tag_name = struct_type_name if struct_type_name is not None else funcPrintName if lenValue: length_deref = '->' in value.len count_required_vuid = self.GetVuid(vuid_tag_name, "%s-arraylength" % (value.len)) array_required_vuid = self.GetVuid(vuid_tag_name, "%s-parameter" % (value.name)) # TODO: Remove workaround for missing optional tag in vk.xml if array_required_vuid == '"VUID-VkFramebufferCreateInfo-pAttachments-parameter"': return [] # This is an array with a pointer to a count value if lenValue.ispointer and not length_deref: # If count and array parameters are optional, there will be no validation if valueRequired == 'true' or lenPtrRequired == 'true' or lenValueRequired == 'true': # When the length parameter is a pointer, there is an extra Boolean parameter in the function call to indicate if it is required checkExpr.append('skip |= validate_array("{}", {ppp}"{ldn}"{pps}, {ppp}"{dn}"{pps}, {pf}{ln}, &{pf}{vn}, {}, {}, {}, {}, {});\n'.format( funcPrintName, lenPtrRequired, lenValueRequired, valueRequired, count_required_vuid, array_required_vuid, ln=value.len, ldn=lenPrintName, dn=valuePrintName, vn=value.name, pf=prefix, **postProcSpec)) # This is an array with an integer count value else: # If count and array parameters are optional, there will be no validation if valueRequired == 'true' or lenValueRequired == 'true': if value.type != 'char': # A valid VU can't use '->' in the middle so the generated VUID from the spec uses '::' instead count_required_vuid = self.GetVuid(vuid_tag_name, "%s-arraylength" % (value.len.replace('->', '::'))) checkExpr.append('skip |= validate_array("{}", {ppp}"{ldn}"{pps}, {ppp}"{dn}"{pps}, {pf}{ln}, &{pf}{vn}, {}, {}, {}, {});\n'.format( funcPrintName, lenValueRequired, valueRequired, count_required_vuid, array_required_vuid, ln=value.len, ldn=lenPrintName, dn=valuePrintName, vn=value.name, pf=prefix, **postProcSpec)) else: # Arrays of strings receive special processing checkExpr.append('skip |= validate_string_array("{}", {ppp}"{ldn}"{pps}, {ppp}"{dn}"{pps}, {pf}{ln}, {pf}{vn}, {}, {}, {}, {});\n'.format( funcPrintName, lenValueRequired, valueRequired, count_required_vuid, array_required_vuid, ln=value.len, ldn=lenPrintName, dn=valuePrintName, vn=value.name, pf=prefix, **postProcSpec)) if checkExpr: if lenValue and length_deref: # Add checks to ensure the validation call does not dereference a NULL pointer to obtain the count checkExpr = self.genCheckedLengthCall(value.len, checkExpr) # This is an individual struct that is not allowed to be NULL elif not value.isoptional: # Function pointers need a reinterpret_cast to void* ptr_required_vuid = self.GetVuid(vuid_tag_name, "%s-parameter" % (value.name)) if value.type[:4] == 'PFN_': allocator_dict = {'pfnAllocation': '"VUID-VkAllocationCallbacks-pfnAllocation-00632"', 'pfnReallocation': '"VUID-VkAllocationCallbacks-pfnReallocation-00633"', 'pfnFree': '"VUID-VkAllocationCallbacks-pfnFree-00634"', } vuid = allocator_dict.get(value.name) if vuid is not None: ptr_required_vuid = vuid checkExpr.append('skip |= validate_required_pointer("{}", {ppp}"{}"{pps}, reinterpret_cast<const void*>({}{}), {});\n'.format(funcPrintName, valuePrintName, prefix, value.name, ptr_required_vuid, **postProcSpec)) else: checkExpr.append('skip |= validate_required_pointer("{}", {ppp}"{}"{pps}, {}{}, {});\n'.format(funcPrintName, valuePrintName, prefix, value.name, ptr_required_vuid, **postProcSpec)) else: # Special case for optional internal allocation function pointers. if (value.type, value.name) == ('PFN_vkInternalAllocationNotification', 'pfnInternalAllocation'): checkExpr.extend(self.internalAllocationCheck(funcPrintName, prefix, value.name, 'pfnInternalFree', postProcSpec)) elif (value.type, value.name) == ('PFN_vkInternalFreeNotification', 'pfnInternalFree'): checkExpr.extend(self.internalAllocationCheck(funcPrintName, prefix, value.name, 'pfnInternalAllocation', postProcSpec)) return checkExpr # # Generate internal allocation function pointer check. def internalAllocationCheck(self, funcPrintName, prefix, name, complementaryName, postProcSpec): checkExpr = [] vuid = '"VUID-VkAllocationCallbacks-pfnInternalAllocation-00635"' checkExpr.append('if ({}{} != NULL)'.format(prefix, name)) checkExpr.append('{') local_indent = self.incIndent('') # Function pointers need a reinterpret_cast to void* checkExpr.append(local_indent + 'skip |= validate_required_pointer("{}", {ppp}"{}{}"{pps}, reinterpret_cast<const void*>({}{}), {});\n'.format(funcPrintName, prefix, complementaryName, prefix, complementaryName, vuid, **postProcSpec)) checkExpr.append('}\n') return checkExpr # # Process struct member validation code, performing name substitution if required def processStructMemberCode(self, line, funcName, memberNamePrefix, memberDisplayNamePrefix, postProcSpec): # Build format specifier list kwargs = {} if '{postProcPrefix}' in line: # If we have a tuple that includes a format string and format parameters, need to use ParameterName class if type(memberDisplayNamePrefix) is tuple: kwargs['postProcPrefix'] = 'ParameterName(' else: kwargs['postProcPrefix'] = postProcSpec['ppp'] if '{postProcSuffix}' in line: # If we have a tuple that includes a format string and format parameters, need to use ParameterName class if type(memberDisplayNamePrefix) is tuple: kwargs['postProcSuffix'] = ', ParameterName::IndexVector{{ {}{} }})'.format(postProcSpec['ppi'], memberDisplayNamePrefix[1]) else: kwargs['postProcSuffix'] = postProcSpec['pps'] if '{postProcInsert}' in line: # If we have a tuple that includes a format string and format parameters, need to use ParameterName class if type(memberDisplayNamePrefix) is tuple: kwargs['postProcInsert'] = '{}{}, '.format(postProcSpec['ppi'], memberDisplayNamePrefix[1]) else: kwargs['postProcInsert'] = postProcSpec['ppi'] if '{funcName}' in line: kwargs['funcName'] = funcName if '{valuePrefix}' in line: kwargs['valuePrefix'] = memberNamePrefix if '{displayNamePrefix}' in line: # Check for a tuple that includes a format string and format parameters to be used with the ParameterName class if type(memberDisplayNamePrefix) is tuple: kwargs['displayNamePrefix'] = memberDisplayNamePrefix[0] else: kwargs['displayNamePrefix'] = memberDisplayNamePrefix if kwargs: # Need to escape the C++ curly braces if 'IndexVector' in line: line = line.replace('IndexVector{ ', 'IndexVector{{ ') line = line.replace(' }),', ' }}),') return line.format(**kwargs) return line # # Process struct member validation code, stripping metadata def ScrubStructCode(self, code): scrubbed_lines = '' for line in code: if 'validate_struct_pnext' in line: continue if 'allowed_structs' in line: continue if 'xml-driven validation' in line: continue line = line.replace('{postProcPrefix}', '') line = line.replace('{postProcSuffix}', '') line = line.replace('{postProcInsert}', '') line = line.replace('{funcName}', '') line = line.replace('{valuePrefix}', '') line = line.replace('{displayNamePrefix}', '') line = line.replace('{IndexVector}', '') line = line.replace('local_data->', '') scrubbed_lines += line return scrubbed_lines # # Process struct validation code for inclusion in function or parent struct validation code def expandStructCode(self, item_type, funcName, memberNamePrefix, memberDisplayNamePrefix, indent, output, postProcSpec): lines = self.validatedStructs[item_type] for line in lines: if output: output[-1] += '\n' if type(line) is list: for sub in line: output.append(self.processStructMemberCode(indent + sub, funcName, memberNamePrefix, memberDisplayNamePrefix, postProcSpec)) else: output.append(self.processStructMemberCode(indent + line, funcName, memberNamePrefix, memberDisplayNamePrefix, postProcSpec)) return output # # Process struct pointer/array validation code, performing name substitution if required def expandStructPointerCode(self, prefix, value, lenValue, funcName, valueDisplayName, postProcSpec): expr = [] expr.append('if ({}{} != NULL)\n'.format(prefix, value.name)) expr.append('{') indent = self.incIndent(None) if lenValue: # Need to process all elements in the array indexName = value.len.replace('Count', 'Index') expr[-1] += '\n' if lenValue.ispointer: # If the length value is a pointer, de-reference it for the count. expr.append(indent + 'for (uint32_t {iname} = 0; {iname} < *{}{}; ++{iname})\n'.format(prefix, value.len, iname=indexName)) else: expr.append(indent + 'for (uint32_t {iname} = 0; {iname} < {}{}; ++{iname})\n'.format(prefix, value.len, iname=indexName)) expr.append(indent + '{') indent = self.incIndent(indent) # Prefix for value name to display in error message if value.ispointer == 2: memberNamePrefix = '{}{}[{}]->'.format(prefix, value.name, indexName) memberDisplayNamePrefix = ('{}[%i]->'.format(valueDisplayName), indexName) else: memberNamePrefix = '{}{}[{}].'.format(prefix, value.name, indexName) memberDisplayNamePrefix = ('{}[%i].'.format(valueDisplayName), indexName) else: memberNamePrefix = '{}{}->'.format(prefix, value.name) memberDisplayNamePrefix = '{}->'.format(valueDisplayName) # Expand the struct validation lines expr = self.expandStructCode(value.type, funcName, memberNamePrefix, memberDisplayNamePrefix, indent, expr, postProcSpec) if lenValue: # Close if and for scopes indent = self.decIndent(indent) expr.append(indent + '}\n') expr.append('}\n') return expr # # Generate the parameter checking code def genFuncBody(self, funcName, values, valuePrefix, displayNamePrefix, structTypeName, is_phys_device = False): lines = [] # Generated lines of code unused = [] # Unused variable names duplicateCountVuid = [] # prevent duplicate VUs being generated # TODO Using a regex in this context is not ideal. Would be nicer if usedLines were a list of objects with "settings" (such as "is_phys_device") validate_pnext_rx = re.compile(r'(.*validate_struct_pnext\(.*)(\).*\n*)', re.M) for value in values: usedLines = [] lenParam = None # # Prefix and suffix for post processing of parameter names for struct members. Arrays of structures need special processing to include the array index in the full parameter name. postProcSpec = {} postProcSpec['ppp'] = '' if not structTypeName else '{postProcPrefix}' postProcSpec['pps'] = '' if not structTypeName else '{postProcSuffix}' postProcSpec['ppi'] = '' if not structTypeName else '{postProcInsert}' # # Generate the full name of the value, which will be printed in the error message, by adding the variable prefix to the value name valueDisplayName = '{}{}'.format(displayNamePrefix, value.name) # # Check for NULL pointers, ignore the in-out count parameters that # will be validated with their associated array if (value.ispointer or value.isstaticarray) and not value.iscount: # Parameters for function argument generation req = 'true' # Parameter cannot be NULL cpReq = 'true' # Count pointer cannot be NULL cvReq = 'true' # Count value cannot be 0 lenDisplayName = None # Name of length parameter to print with validation messages; parameter name with prefix applied countRequiredVuid = None # If there is a count required VUID to check # Generate required/optional parameter strings for the pointer and count values if value.isoptional: req = 'false' if value.len: # The parameter is an array with an explicit count parameter lenParam = self.getLenParam(values, value.len) if lenParam: lenDisplayName = value.len.replace(lenParam.name, displayNamePrefix + lenParam.name) if lenParam.ispointer: # Count parameters that are pointers are inout if type(lenParam.isoptional) is list: if lenParam.isoptional[0]: cpReq = 'false' if lenParam.isoptional[1]: cvReq = 'false' else: if lenParam.isoptional: cpReq = 'false' # In case of count as field in another struct, look up field to see if count is optional. len_deref = value.len.split('->') if len(len_deref) == 2: struct_fields = next((struct.members for struct in self.structMembers if struct.name == lenParam.type), None) if struct_fields: len_field_name = len_deref[1] struct_field = next((field for field in struct_fields if field.name == len_field_name), None) if struct_field and struct_field.isoptional: cvReq = 'false' else: if lenParam.isoptional: cvReq = 'false' elif value.noautovalidity: # Handle edge case where XML expresses a non-optional non-pointer value length with noautovalidity # ex: <param noautovalidity="true"len="commandBufferCount"> vuidNameTag = structTypeName if structTypeName is not None else funcName countRequiredVuid = self.GetVuid(vuidNameTag, "%s-arraylength" % (lenParam.name)) if countRequiredVuid in duplicateCountVuid: countRequiredVuid = None else: duplicateCountVuid.append(countRequiredVuid) else: # Do not generate length checks for constant sized arrays cpReq = 'false' cvReq = 'false' # # The parameter will not be processed when tagged as 'noautovalidity' # For the pointer to struct case, the struct pointer will not be validated, but any # members not tagged as 'noautovalidity' will be validated # We special-case the custom allocator checks, as they are explicit but can be auto-generated. AllocatorFunctions = ['PFN_vkAllocationFunction', 'PFN_vkReallocationFunction', 'PFN_vkFreeFunction', 'PFN_vkInternalAllocationNotification', 'PFN_vkInternalFreeNotification'] if value.noautovalidity and value.type not in AllocatorFunctions and not countRequiredVuid: # Log a diagnostic message when validation cannot be automatically generated and must be implemented manually self.logMsg('diag', 'ParameterValidation: No validation for {} {}'.format(structTypeName if structTypeName else funcName, value.name)) elif countRequiredVuid: usedLines.append('skip |= validate_array("{}", {ppp}"{ldn}"{pps}, "", {pf}{ln}, &{pf}{vn}, true, false, {}, kVUIDUndefined);\n'.format( funcName, countRequiredVuid, pf=valuePrefix, ldn=lenDisplayName, ln=value.len, vn=value.name, **postProcSpec)) else: if value.type in self.structTypes: # If this is a pointer to a struct with an sType field, verify the type usedLines += self.makeStructTypeCheck(valuePrefix, value, lenParam, req, cvReq, cpReq, funcName, lenDisplayName, valueDisplayName, postProcSpec, structTypeName) # If this is an input handle array that is not allowed to contain NULL handles, verify that none of the handles are VK_NULL_HANDLE elif value.type in self.handleTypes and value.isconst and not self.isHandleOptional(value, lenParam): usedLines += self.makeHandleCheck(valuePrefix, value, lenParam, req, cvReq, funcName, lenDisplayName, valueDisplayName, postProcSpec) elif value.type in self.flags and value.isconst: usedLines += self.makeFlagsArrayCheck(valuePrefix, value, lenParam, req, cvReq, funcName, lenDisplayName, valueDisplayName, postProcSpec) elif value.isbool and value.isconst: usedLines.append('skip |= validate_bool32_array("{}", {ppp}"{}"{pps}, {ppp}"{}"{pps}, {pf}{}, {pf}{}, {}, {});\n'.format(funcName, lenDisplayName, valueDisplayName, value.len, value.name, cvReq, req, pf=valuePrefix, **postProcSpec)) elif value.israngedenum and value.isconst: enum_value_list = 'All%sEnums' % value.type usedLines.append('skip |= validate_ranged_enum_array("{}", {ppp}"{}"{pps}, {ppp}"{}"{pps}, "{}", {}, {pf}{}, {pf}{}, {}, {});\n'.format(funcName, lenDisplayName, valueDisplayName, value.type, enum_value_list, value.len, value.name, cvReq, req, pf=valuePrefix, **postProcSpec)) elif value.name == 'pNext': usedLines += self.makeStructNextCheck(valuePrefix, value, funcName, valueDisplayName, postProcSpec, structTypeName) else: usedLines += self.makePointerCheck(valuePrefix, value, lenParam, req, cvReq, cpReq, funcName, lenDisplayName, valueDisplayName, postProcSpec, structTypeName) # If this is a pointer to a struct (input), see if it contains members that need to be checked if value.type in self.validatedStructs: if value.isconst: # or value.type in self.returnedonly_structs: usedLines.append(self.expandStructPointerCode(valuePrefix, value, lenParam, funcName, valueDisplayName, postProcSpec)) elif value.type in self.returnedonly_structs: usedLines.append(self.expandStructPointerCode(valuePrefix, value, lenParam, funcName, valueDisplayName, postProcSpec)) is_const_str = 'true' if value.isconst else 'false' is_phys_device_str = 'true' if is_phys_device else 'false' for setter, _, elem in multi_string_iter(usedLines): elem = re.sub(r', (true|false)', '', elem) m = validate_pnext_rx.match(elem) if m is not None: setter(f'{m.group(1)}, {is_phys_device_str}, {is_const_str}{m.group(2)}') # Non-pointer types else: # The parameter will not be processes when tagged as 'noautovalidity' # For the struct case, the struct type will not be validated, but any # members not tagged as 'noautovalidity' will be validated if value.noautovalidity: # Log a diagnostic message when validation cannot be automatically generated and must be implemented manually self.logMsg('diag', 'ParameterValidation: No validation for {} {}'.format(structTypeName if structTypeName else funcName, value.name)) else: vuid_name_tag = structTypeName if structTypeName is not None else funcName if value.type in self.structTypes: stype = self.structTypes[value.type] vuid = self.GetVuid(value.type, "sType-sType") undefined_vuid = '"kVUIDUndefined"' usedLines.append('skip |= validate_struct_type("{}", {ppp}"{}"{pps}, "{sv}", &({}{vn}), {sv}, false, kVUIDUndefined, {});\n'.format( funcName, valueDisplayName, valuePrefix, vuid, vn=value.name, sv=stype, vt=value.type, **postProcSpec)) elif value.type in self.handleTypes: if not self.isHandleOptional(value, None): usedLines.append('skip |= validate_required_handle("{}", {ppp}"{}"{pps}, {}{});\n'.format(funcName, valueDisplayName, valuePrefix, value.name, **postProcSpec)) elif value.type in self.flags and value.type.replace('Flags', 'FlagBits') not in self.flagBits: vuid = self.GetVuid(vuid_name_tag, "%s-zerobitmask" % (value.name)) usedLines.append('skip |= validate_reserved_flags("{}", {ppp}"{}"{pps}, {pf}{}, {});\n'.format(funcName, valueDisplayName, value.name, vuid, pf=valuePrefix, **postProcSpec)) elif value.type in self.flags or value.type in self.flagBits: if value.type in self.flags: flagBitsName = value.type.replace('Flags', 'FlagBits') flagsType = 'kOptionalFlags' if value.isoptional else 'kRequiredFlags' invalidVuid = self.GetVuid(vuid_name_tag, "%s-parameter" % (value.name)) zeroVuid = self.GetVuid(vuid_name_tag, "%s-requiredbitmask" % (value.name)) elif value.type in self.flagBits: flagBitsName = value.type flagsType = 'kOptionalSingleBit' if value.isoptional else 'kRequiredSingleBit' invalidVuid = self.GetVuid(vuid_name_tag, "%s-parameter" % (value.name)) zeroVuid = invalidVuid allFlagsName = 'All' + flagBitsName invalid_vuid = self.GetVuid(vuid_name_tag, "%s-parameter" % (value.name)) allFlagsName = 'All' + flagBitsName zeroVuidArg = '' if value.isoptional else ', ' + zeroVuid usedLines.append('skip |= validate_flags("{}", {ppp}"{}"{pps}, "{}", {}, {pf}{}, {}, {}{});\n'.format(funcName, valueDisplayName, flagBitsName, allFlagsName, value.name, flagsType, invalidVuid, zeroVuidArg, pf=valuePrefix, **postProcSpec)) elif value.isbool: usedLines.append('skip |= validate_bool32("{}", {ppp}"{}"{pps}, {}{});\n'.format(funcName, valueDisplayName, valuePrefix, value.name, **postProcSpec)) elif value.israngedenum: vuid = self.GetVuid(vuid_name_tag, "%s-parameter" % (value.name)) enum_value_list = 'All%sEnums' % value.type usedLines.append('skip |= validate_ranged_enum("{}", {ppp}"{}"{pps}, "{}", {}, {}{}, {});\n'.format(funcName, valueDisplayName, value.type, enum_value_list, valuePrefix, value.name, vuid, **postProcSpec)) # If this is a struct, see if it contains members that need to be checked if value.type in self.validatedStructs: memberNamePrefix = '{}{}.'.format(valuePrefix, value.name) memberDisplayNamePrefix = '{}.'.format(valueDisplayName) usedLines.append(self.expandStructCode(value.type, funcName, memberNamePrefix, memberDisplayNamePrefix, '', [], postProcSpec)) # Append the parameter check to the function body for the current command if usedLines: # Apply special conditional checks if value.condition: usedLines = self.genConditionalCall(valuePrefix, value.condition, usedLines) lines += usedLines elif not value.iscount: # If no expression was generated for this value, it is unreferenced by the validation function, unless # it is an array count, which is indirectly referenced for array valiadation. unused.append(value.name) if not lines: lines.append('// No xml-driven validation\n') return lines, unused # # Generate the struct member check code from the captured data def processStructMemberData(self): indent = self.incIndent(None) for struct in self.structMembers: # # The string returned by genFuncBody will be nested in an if check for a NULL pointer, so needs its indent incremented lines, unused = self.genFuncBody('{funcName}', struct.members, '{valuePrefix}', '{displayNamePrefix}', struct.name) if lines: self.validatedStructs[struct.name] = lines # # Generate the command param check code from the captured data def processCmdData(self): indent = self.incIndent(None) for command in self.commands: # Skip first parameter if it is a dispatch handle (everything except vkCreateInstance) startIndex = 0 if command.name == 'vkCreateInstance' else 1 lines, unused = self.genFuncBody(command.name, command.params[startIndex:], '', '', None, is_phys_device = command.params[0].type == 'VkPhysicalDevice') # Cannot validate extension dependencies for device extension APIs having a physical device as their dispatchable object if (command.name in self.required_extensions) and (self.extension_type != 'device' or command.params[0].type != 'VkPhysicalDevice'): for ext in self.required_extensions[command.name]: ext_name_define = '' for extension in self.registry.extensions: if extension.attrib['name'] == ext: ext_name_define = GetNameDefine(extension) break ext_test = '' if command.params[0].type in ["VkInstance", "VkPhysicalDevice"] or command.name == 'vkCreateInstance': ext_test = 'if (!instance_extensions.%s) skip |= OutputExtensionError("%s", %s);\n' % (ext.lower(), command.name, ext_name_define) else: ext_test = 'if (!IsExtEnabled(device_extensions.%s)) skip |= OutputExtensionError("%s", %s);\n' % (ext.lower(), command.name, ext_name_define) lines.insert(0, ext_test) if lines: func_sig = self.getCmdDef(command) + ' const {\n' func_sig = func_sig.split('VKAPI_CALL vk')[1] cmdDef = 'bool StatelessValidation::PreCallValidate' + func_sig cmdDef += '%sbool skip = false;\n' % indent if isinstance(command.promotion_info, list): version_flag = command.promotion_info[1] version_id = version_flag.replace('VK_VERSION', 'VK_API_VERSION') cmdDef += '%s if (CheckPromotedApiAgainstVulkanVersion(%s, "%s", %s)) return true;\n' % (indent, command.promotion_info[0], command.name, version_id) for line in lines: if type(line) is list: for sub in line: cmdDef += indent + sub else: cmdDef += indent + line # Insert call to custom-written function if present if command.name in self.functions_with_manual_checks: # Generate parameter list for manual fcn and down-chain calls params_text = '' for param in command.params: params_text += '%s, ' % param.name params_text = params_text[:-2] + ');\n' cmdDef += ' if (!skip) skip |= manual_PreCallValidate'+ command.name[2:] + '(' + params_text cmdDef += '%sreturn skip;\n' % indent cmdDef += '}\n' self.validation.append(cmdDef)
copyright = '/* *** THIS FILE IS GENERATED - DO NOT EDIT! ***\n' copyright += ' * See parameter_validation_generator.py for modifications\n' copyright += ' *\n' copyright += ' * Copyright (c) 2015-2021 The Khronos Group Inc.\n' copyright += ' * Copyright (c) 2015-2021 LunarG, Inc.\n' copyright += ' * Copyright (C) 2015-2021 Google Inc.\n' copyright += ' *\n' copyright += ' * Licensed under the Apache License, Version 2.0 (the "License");\n' copyright += ' * you may not use this file except in compliance with the License.\n' copyright += ' * Copyright (c) 2015-2017 Valve Corporation\n' copyright += ' * You may obtain a copy of the License at\n' copyright += ' *\n' copyright += ' * http://www.apache.org/licenses/LICENSE-2.0\n' copyright += ' *\n' copyright += ' * Unless required by applicable law or agreed to in writing, software\n' copyright += ' * distributed under the License is distributed on an "AS IS" BASIS,\n' copyright += ' * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n' copyright += ' * See the License for the specific language governing permissions and\n' copyright += ' * limitations under the License.\n' copyright += ' *\n' copyright += ' * Author: Mark Lobodzinski <[email protected]>\n' copyright += ' * Author: Dave Houlton <[email protected]>\n' copyright += ' */\n\n' return copyright
transmissor.py
import socket import numpy as np # pip install numpy socketUDP = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) transmissor = ("127.0.0.1", 2020) receptor = ("127.0.0.1", 3030) socketUDP.bind(transmissor) buff_size = 10000 next_sequence_number = 0 def calculate_checksum(data): data_sum = np.uint16(0) for element in data: data_sum += element return np.invert(data_sum) def verify_checksum(data): data_sum = np.uint16(0) for element in data: data_sum += element return data_sum == 0xFFFF def udt_send(packet): socketUDP.sendto(packet.tobytes(), receptor) def rdt_rcv(): while True: message, source = socketUDP.recvfrom(buff_size) if source == receptor: return np.frombuffer(message, dtype=np.uint16) def rdt_send(data): global next_sequence_number sndpkt = np.array([], np.uint16) sndpkt = np.append(sndpkt, np.uint16(next_sequence_number)) sndpkt = np.append(sndpkt, np.uint16(0)) # checksum sndpkt = np.concatenate((sndpkt, data))
udt_send(sndpkt) while True: rcvpkt = rdt_rcv() is_corrupt = not verify_checksum(rcvpkt) is_ack = rcvpkt[2] == True is_nack = rcvpkt[2] == False print("Dados recebidos ", rcvpkt) print("Está corrompido? ", is_corrupt) print("Está Nack? ", is_nack) print("Está Ack? ", is_ack) print("Seq Num? ", next_sequence_number) if is_corrupt or is_nack: udt_send(sndpkt) if is_ack and not is_corrupt: break if next_sequence_number == 0: next_sequence_number = 1 else: next_sequence_number = 0 if __name__ == "__main__": i = 1 while i <= 3: dados = np.random.randint(5, size=10, dtype=np.uint16) print(f'Dados a serem enviados {dados}') rdt_send(dados) i+=1
sndpkt[1] = calculate_checksum(sndpkt)
main.rs
use std::io; //use std::collections::HashMap; //use std::collections::BinaryHeap; //use std::collections::HashSet; //use std::mem; // mem::swap(&mut x,&mut y); use std::cmp; // cmp::max,cmp::min #[allow(dead_code)] fn read_line() -> String{ let mut s = String::new(); io::stdin().read_line(&mut s).unwrap(); s.trim().to_string() } #[allow(dead_code)] fn read_charvec() -> Vec<char>{ read_line().chars().collect() } #[allow(dead_code)] fn read_ints() -> Vec<i64>{ let s = read_line(); let split:Vec<&str> = s.split(" ").collect(); split.iter().map(|&x| x.to_string().parse().unwrap()).collect() } #[allow(dead_code)] fn read_int() -> i64{ read_ints()[0] } #[allow(dead_code)] fn reverse(s: &String) -> String{ s.chars().rev().collect::<String>() } #[allow(dead_code)] fn read_lines(n:i64) -> Vec<String>{ let mut xs = Vec::new(); for _i in 0..n{ xs.push(read_line()); } xs } #[allow(dead_code)] fn read_ints2(n:i64) -> Vec<i64>{ read_lines(n).iter().map(|x| x.parse().unwrap()).collect() } //////////////////////////////////////////////// fn read_board(n:i64) -> Vec<i64>{ (0..n) .into_iter() .map(|_| read_line()) .fold(String::new(),|s,i| format!("{}{}",s,i)) .chars() .into_iter() .map(|c| if c == '0' {0} else {1}) .collect() } fn calc_diff(piece:&Vec<i64>,target:&Vec<i64>) ->i64
fn main(){ let n = read_int(); let mut piece = Vec::new(); let odd_str : Vec<i64> = (0..n*n) .into_iter() .map(|i| i%2) .collect(); let even_str : Vec<i64> = (0..n*n) .into_iter() .map(|i| 1-i%2) .collect(); let mut ret = n*n*4; piece.push(read_board(n)); read_line(); piece.push(read_board(n)); read_line(); piece.push(read_board(n)); read_line(); piece.push(read_board(n)); for i in 0..16{ let flags = (0..4) .into_iter() .map(|x| if i & (1 << x) == 0 {0} else {1}) .fold(0,|s,i|s+i); if flags != 2{ continue; } let diff = (0..4) .into_iter() .map(|x| calc_diff(&piece[x],if i&(1<<x) == 0 {&even_str} else{&odd_str})) .fold(0,|s,i|s+i); // println!("{}",diff); ret = cmp::min(ret,diff); } // println!("{}",piece[0]); // println!("{}",even_str); // println!("{}",odd_str); println!("{}",ret); }
{ let mut sum = 0; for i in 0..piece.len(){ sum = sum + if piece[i] != target[i]{ 1 } else { 0 }; } sum }
chain_spec.rs
use cumulus_primitives_core::ParaId; use hex_literal::hex; use parachain_template_runtime::{AccountId, AuraId, Signature, EXISTENTIAL_DEPOSIT}; use sc_chain_spec::{ChainSpecExtension, ChainSpecGroup}; use sc_service::ChainType; use serde::{Deserialize, Serialize}; use sp_core::{sr25519, Pair, Public}; use sp_runtime::traits::{IdentifyAccount, Verify}; /// Specialized `ChainSpec` for the normal parachain runtime. pub type ChainSpec = sc_service::GenericChainSpec<parachain_template_runtime::GenesisConfig, Extensions>; /// Helper function to generate a crypto pair from seed pub fn get_public_from_seed<TPublic: Public>(seed: &str) -> <TPublic::Pair as Pair>::Public { TPublic::Pair::from_string(&format!("//{}", seed), None) .expect("static values are valid; qed") .public() } /// The extensions for the [`ChainSpec`]. #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, ChainSpecGroup, ChainSpecExtension)] #[serde(deny_unknown_fields)] pub struct
{ /// The relay chain of the Parachain. pub relay_chain: String, /// The id of the Parachain. pub para_id: u32, } impl Extensions { /// Try to get the extension from the given `ChainSpec`. pub fn try_get(chain_spec: &dyn sc_service::ChainSpec) -> Option<&Self> { sc_chain_spec::get_extension(chain_spec.extensions()) } } type AccountPublic = <Signature as Verify>::Signer; /// Generate collator keys from seed. /// /// This function's return type must always match the session keys of the chain in tuple format. pub fn get_collator_keys_from_seed(seed: &str) -> AuraId { get_public_from_seed::<AuraId>(seed) } /// Helper function to generate an account ID from seed pub fn get_account_id_from_seed<TPublic: Public>(seed: &str) -> AccountId where AccountPublic: From<<TPublic::Pair as Pair>::Public>, { AccountPublic::from(get_public_from_seed::<TPublic>(seed)).into_account() } /// Generate the session keys from individual elements. /// /// The input must be a tuple of individual keys (a single arg for now since we have just one key). pub fn template_session_keys(keys: AuraId) -> parachain_template_runtime::SessionKeys { parachain_template_runtime::SessionKeys { aura: keys } } pub fn kusama_config(relay_chain: String) -> ChainSpec { // Give your base currency a unit name and decimal places let mut properties = sc_chain_spec::Properties::new(); properties.insert("tokenSymbol".into(), "XOR".into()); properties.insert("tokenDecimals".into(), 18u64.into()); properties.insert("ss58Format".into(), parachain_template_runtime::SS58Prefix::get().into()); ChainSpec::from_genesis( // Name "SORA Kusama", // ID "sora_ksm", ChainType::Live, move || { testnet_genesis( AccountId::from(hex!( "de5ef29355f16efa342542cd7567bebd371b3e80dd33aee99cc50cb484688058" )), // initial collators. vec![ ( AccountId::from(hex!( "ac0ad7c17a14833a42f8a282cd0715868c6b2680827e47b158474fdefd82e164" )), AuraId::from_slice(&hex!( "ac0ad7c17a14833a42f8a282cd0715868c6b2680827e47b158474fdefd82e164" )), ), ( AccountId::from(hex!( "f043af25b769db28c9f9ca876e8d55b4a5a7d634b1b30b2e5e796666f65cb24a" )), AuraId::from_slice(&hex!( "f043af25b769db28c9f9ca876e8d55b4a5a7d634b1b30b2e5e796666f65cb24a" )), ), ], vec![ AccountId::from(hex!( "de5ef29355f16efa342542cd7567bebd371b3e80dd33aee99cc50cb484688058" )), AccountId::from(hex!( "ac0ad7c17a14833a42f8a282cd0715868c6b2680827e47b158474fdefd82e164" )), AccountId::from(hex!( "f043af25b769db28c9f9ca876e8d55b4a5a7d634b1b30b2e5e796666f65cb24a" )), ], 2011u32.into(), ) }, Vec::new(), None, Some("sora_ksm"), None, Extensions { relay_chain, para_id: 2011, }, ) } pub fn development_config() -> ChainSpec { // Give your base currency a unit name and decimal places let mut properties = sc_chain_spec::Properties::new(); properties.insert("tokenSymbol".into(), "XOR".into()); properties.insert("tokenDecimals".into(), 18u64.into()); properties.insert("ss58Format".into(), parachain_template_runtime::SS58Prefix::get().into()); ChainSpec::from_genesis( // Name "SORA Kusama", // ID "sora_ksm_dev", ChainType::Development, move || { testnet_genesis( AccountId::from(hex!( "e02b00cb5bbf5c0338075237cdbfb7d11dbaf19aafce71744610b6a87b5e0f22" )), // initial collators. vec![ ( AccountId::from(hex!( "caeedb2ddad0aca6d587dd24422ab8f6281a5b2495eb5d30265294cb29238567" )), AuraId::from_slice(&hex!( "caeedb2ddad0aca6d587dd24422ab8f6281a5b2495eb5d30265294cb29238567" )), ), ( AccountId::from(hex!( "3617852ccd789ce50f10d7843542964c71e8e08ef2977c1af3435eaabaca1521" )), AuraId::from_slice(&hex!( "3617852ccd789ce50f10d7843542964c71e8e08ef2977c1af3435eaabaca1521" )), ), ], vec![ AccountId::from(hex!( "e02b00cb5bbf5c0338075237cdbfb7d11dbaf19aafce71744610b6a87b5e0f22" )), AccountId::from(hex!( "caeedb2ddad0aca6d587dd24422ab8f6281a5b2495eb5d30265294cb29238567" )), AccountId::from(hex!( "3617852ccd789ce50f10d7843542964c71e8e08ef2977c1af3435eaabaca1521" )), ], 2000u32.into(), ) }, Vec::new(), None, Some("sora_ksm_dev"), None, Extensions { relay_chain: "rococo-local".into(), // You MUST set this to the correct network! para_id: 2000, }, ) } pub fn local_config() -> ChainSpec { // Give your base currency a unit name and decimal places let mut properties = sc_chain_spec::Properties::new(); properties.insert("tokenSymbol".into(), "XOR".into()); properties.insert("tokenDecimals".into(), 18u64.into()); properties.insert("ss58Format".into(), parachain_template_runtime::SS58Prefix::get().into()); ChainSpec::from_genesis( // Name "SORA Kusama", // ID "sora_ksm_local", ChainType::Local, move || { testnet_genesis( get_account_id_from_seed::<sr25519::Public>("Alice"), // initial collators. vec![ ( get_account_id_from_seed::<sr25519::Public>("Alice"), get_collator_keys_from_seed("Alice"), ), ( get_account_id_from_seed::<sr25519::Public>("Bob"), get_collator_keys_from_seed("Bob"), ), ], vec![ get_account_id_from_seed::<sr25519::Public>("Alice"), get_account_id_from_seed::<sr25519::Public>("Bob"), get_account_id_from_seed::<sr25519::Public>("Charlie"), get_account_id_from_seed::<sr25519::Public>("Dave"), get_account_id_from_seed::<sr25519::Public>("Eve"), get_account_id_from_seed::<sr25519::Public>("Ferdie"), get_account_id_from_seed::<sr25519::Public>("Alice//stash"), get_account_id_from_seed::<sr25519::Public>("Bob//stash"), get_account_id_from_seed::<sr25519::Public>("Charlie//stash"), get_account_id_from_seed::<sr25519::Public>("Dave//stash"), get_account_id_from_seed::<sr25519::Public>("Eve//stash"), get_account_id_from_seed::<sr25519::Public>("Ferdie//stash"), ], 2000u32.into(), ) }, // Bootnodes Vec::new(), // Telemetry None, // Protocol ID Some("sora_ksm_local"), // Properties Some(properties), // Extensions Extensions { relay_chain: "rococo-local".into(), // You MUST set this to the correct network! para_id: 2000, }, ) } fn testnet_genesis( root_key: AccountId, invulnerables: Vec<(AccountId, AuraId)>, endowed_accounts: Vec<AccountId>, id: ParaId, ) -> parachain_template_runtime::GenesisConfig { parachain_template_runtime::GenesisConfig { system: parachain_template_runtime::SystemConfig { code: parachain_template_runtime::WASM_BINARY .expect("WASM binary was not build, please build it!") .to_vec(), }, balances: parachain_template_runtime::BalancesConfig { balances: endowed_accounts.iter().cloned().map(|k| (k, 1_000_000_000_000_000_000)).collect(), }, parachain_info: parachain_template_runtime::ParachainInfoConfig { parachain_id: id }, collator_selection: parachain_template_runtime::CollatorSelectionConfig { invulnerables: invulnerables.iter().cloned().map(|(acc, _)| acc).collect(), candidacy_bond: 0, desired_candidates: 0, }, session: parachain_template_runtime::SessionConfig { keys: invulnerables .into_iter() .map(|(acc, aura)| { ( acc.clone(), // account id acc, // validator id template_session_keys(aura), // session keys ) }) .collect(), }, // no need to pass anything to aura, in fact it will panic if we do. Session will take care // of this. aura: Default::default(), aura_ext: Default::default(), parachain_system: Default::default(), sudo: parachain_template_runtime::SudoConfig { key: root_key }, } }
Extensions