file_name
stringlengths
3
137
prefix
stringlengths
0
918k
suffix
stringlengths
0
962k
middle
stringlengths
0
812k
destroy.js
import socket from '@core/services/routes/emitter' import Filter from '@apps/maha/models/filter' const destroyRoute = async (req, res) => {
qb.where('code', req.params.code) qb.where('id', req.params.id) }).fetch({ transacting: req.trx }) if(!filter) return res.status(404).respond({ code: 404, message: 'Unable to load filter' }) const channels = [ `/admin/${filter.get('code')}/filters` ] await req.trx('maha_filter_accesses').where('filter_id', filter.get('id')).del() await filter.destroy({ transacting: req.trx }) await socket.refresh(req, channels) await res.status(200).respond(true) } export default destroyRoute
const filter = await Filter.query(qb => { qb.where('team_id', req.team.get('id')) qb.where('owner_id', req.user.get('id'))
simplyCountdown.js
/*! * Project : simply-countdown * File : simplyCountdown * Date : 27/06/2015 * License : MIT * Version : 1.3.2 * Author : Vincent Loy <[email protected]> * Contributors : * - Justin Beasley <[email protected]> * - Nathan Smith <[email protected]> */ /*global window, document*/ (function (exports) { 'use strict'; var // functions extend, createElements, createCountdownElt, simplyCountdown; /** * Function that merge user parameters with defaults one. * @param out * @returns {*|{}} */ extend = function (out) { var i, obj, key; out = out || {}; for (i = 1; i < arguments.length; i += 1) { obj = arguments[i]; if (obj) { for (key in obj) { if (obj.hasOwnProperty(key)) { if (typeof obj[key] === 'object') { extend(out[key], obj[key]); } else { out[key] = obj[key]; } } } } } return out; }; /** * Function that create a countdown section * @param countdown * @param parameters * @param typeClass * @returns {{full: (*|Element), amount: (*|Element), word: (*|Element)}} */ createCountdownElt = function (countdown, parameters, typeClass) { var innerSectionTag, sectionTag, amountTag, wordTag; sectionTag = document.createElement('div'); amountTag = document.createElement('span'); wordTag = document.createElement('span'); innerSectionTag = document.createElement('div'); innerSectionTag.appendChild(amountTag); innerSectionTag.appendChild(wordTag); sectionTag.appendChild(innerSectionTag); sectionTag.classList.add(parameters.sectionClass); sectionTag.classList.add(typeClass); amountTag.classList.add(parameters.amountClass); wordTag.classList.add(parameters.wordClass); countdown.appendChild(sectionTag); return { full: sectionTag, amount: amountTag, word: wordTag }; }; /** * Function that create full countdown DOM elements calling createCountdownElt * @param parameters * @param countdown * @returns {{days: (*|Element), hours: (*|Element), minutes: (*|Element), seconds: (*|Element)}} */ createElements = function (parameters, countdown) { var spanTag; if (!parameters.inline) { return { days: createCountdownElt(countdown, parameters, 'simply-days-section'), hours: createCountdownElt(countdown, parameters, 'simply-hours-section'), minutes: createCountdownElt(countdown, parameters, 'simply-minutes-section'), seconds: createCountdownElt(countdown, parameters, 'simply-seconds-section') }; } spanTag = document.createElement('span'); spanTag.classList.add(parameters.inlineClass); return spanTag; }; /** * simplyCountdown, create and display the coundtown. * @param elt * @param args (parameters) */ simplyCountdown = function (elt, args) { var parameters = extend({ year: 2021, month: 10, day: 23, hours: 9, minutes: 0, seconds: 0, words: { days: 'day', hours: 'hour', minutes: 'minute', seconds: 'second', pluralLetter: 's' }, plural: true, inline: false, enableUtc: true, onEnd: function () { return; }, refresh: 1000, inlineClass: 'simply-countdown-inline', sectionClass: 'simply-section',
interval, targetDate, targetTmpDate, now, nowUtc, secondsLeft, days, hours, minutes, seconds, cd = document.querySelectorAll(elt); targetTmpDate = new Date( parameters.year, parameters.month - 1, parameters.day, parameters.hours, parameters.minutes, parameters.seconds ); if (parameters.enableUtc) { targetDate = new Date( targetTmpDate.getUTCFullYear(), targetTmpDate.getUTCMonth(), targetTmpDate.getUTCDate(), targetTmpDate.getUTCHours(), targetTmpDate.getUTCMinutes(), targetTmpDate.getUTCSeconds() ); } else { targetDate = targetTmpDate; } Array.prototype.forEach.call(cd, function (countdown) { var fullCountDown = createElements(parameters, countdown), refresh; refresh = function () { var dayWord, hourWord, minuteWord, secondWord; now = new Date(); if (parameters.enableUtc) { nowUtc = new Date(now.getFullYear(), now.getMonth(), now.getDate(), now.getHours(), now.getMinutes(), now.getSeconds()); secondsLeft = (targetDate - nowUtc.getTime()) / 1000; } else { secondsLeft = (targetDate - now.getTime()) / 1000; } if (secondsLeft > 0) { days = parseInt(secondsLeft / 86400, 10); secondsLeft = secondsLeft % 86400; hours = parseInt(secondsLeft / 3600, 10); secondsLeft = secondsLeft % 3600; minutes = parseInt(secondsLeft / 60, 10); seconds = parseInt(secondsLeft % 60, 10); } else { days = 0; hours = 0; minutes = 0; seconds = 0; window.clearInterval(interval); parameters.onEnd(); } if (parameters.plural) { dayWord = days > 1 ? parameters.words.days + parameters.words.pluralLetter : parameters.words.days; hourWord = hours > 1 ? parameters.words.hours + parameters.words.pluralLetter : parameters.words.hours; minuteWord = minutes > 1 ? parameters.words.minutes + parameters.words.pluralLetter : parameters.words.minutes; secondWord = seconds > 1 ? parameters.words.seconds + parameters.words.pluralLetter : parameters.words.seconds; } else { dayWord = parameters.words.days; hourWord = parameters.words.hours; minuteWord = parameters.words.minutes; secondWord = parameters.words.seconds; } /* display an inline countdown into a span tag */ if (parameters.inline) { countdown.innerHTML = days + ' ' + dayWord + ', ' + hours + ' ' + hourWord + ', ' + minutes + ' ' + minuteWord + ', ' + seconds + ' ' + secondWord + '.'; } else { fullCountDown.days.amount.textContent = (parameters.zeroPad && days.toString().length < 2 ? '0' : '') + days; fullCountDown.days.word.textContent = dayWord; fullCountDown.hours.amount.textContent = (parameters.zeroPad && hours.toString().length < 2 ? '0' : '') + hours; fullCountDown.hours.word.textContent = hourWord; fullCountDown.minutes.amount.textContent = (parameters.zeroPad && minutes.toString().length < 2 ? '0' : '') + minutes; fullCountDown.minutes.word.textContent = minuteWord; fullCountDown.seconds.amount.textContent = (parameters.zeroPad && seconds.toString().length < 2 ? '0' : '') + seconds; fullCountDown.seconds.word.textContent = secondWord; } }; // Refresh immediately to prevent a Flash of Unstyled Content refresh(); interval = window.setInterval(refresh, parameters.refresh); }); }; exports.simplyCountdown = simplyCountdown; }(window)); /*global $, jQuery, simplyCountdown*/ if (window.jQuery) { (function ($, simplyCountdown) { 'use strict'; function simplyCountdownify(el, options) { simplyCountdown(el, options); } $.fn.simplyCountdown = function (options) { return simplyCountdownify(this.selector, options); }; }(jQuery, simplyCountdown)); }
amountClass: 'simply-amount', wordClass: 'simply-word', zeroPad: false }, args),
acib.py
# coding=utf-8 """ 从词霸中获取每日一句,带英文。 """ import requests from everyday_wechat.utils.common import ( is_json ) def get_acib_info(): """ 从词霸中获取每日一句,带英文。 :r
eturn:str ,返回每日一句(双语) """ print('获取格言信息(双语)...') try: resp = requests.get('http://open.iciba.com/dsapi') if resp.status_code == 200 and is_json(resp): content_dict = resp.json() content = content_dict.get('content') note = content_dict.get('note') return '{}{}'.format(content, note) print('没有获取到格言数据。') except requests.exceptions.RequestException as exception: print(exception) return None get_one_words = get_acib_info
lib.rs
use std::{ fmt::Write, os::raw::{c_char} }; #[repr(C)] struct tm { _private: u8 } #[cfg(target_pointer_width = "64")] type SizeType = u64; #[cfg(target_pointer_width = "32")] type SizeType = u32; extern "C" { fn gmtime(secs: *const i64) -> *mut tm; fn strftime(s: *mut c_char, max: SizeType, format: *const c_char, tm: *const tm) -> usize; } /// Convert a byte array to a ``String``. /// /// # Examples /// /// ``` /// let byte_array: Vec<u8> = vec!(0x0F, 0x5F, 0xAA); /// let repr: String = stringutils::byte_array_to_string(&byte_array); /// /// println!("Bytes: {}", repr);
for x in bytes { write!(&mut repr, "{:02x}", x).unwrap(); } repr } /// Get a String representation for a timestamp. /// /// # Example /// ``` /// let current_time = unsafe { libc::time(std::ptr::null_mut()) }; /// let repr = stringutils::timestamp_to_string(current_time).unwrap(); /// println!("Current time: {}", repr); /// ``` pub fn timestamp_to_string(timestamp_secs: i64) -> Result<String, std::str::Utf8Error> { let gmtime = unsafe { gmtime(&timestamp_secs) }; let mut buf = [0u8; 4096]; let format_str = "%Y-%m-%d %H:%M"; let format_cstr = std::ffi::CString::new(format_str).unwrap(); let len = unsafe { strftime(buf.as_mut_ptr() as *mut c_char, 4096, format_cstr.as_ptr(), gmtime) }; std::str::from_utf8(&buf[0..len]).map(|s| s.to_owned()) }
/// ``` pub fn byte_array_to_string(bytes: &[u8]) -> String { let mut repr = String::new();
error.rs
use actix_web::Resource; use actix_web::web::{resource, get}; use crate::handlers::website; pub fn route() -> Resource
{ // Error: /{lang}/error resource("/error").route( get().to(website::error_get::error_get) ) }
node.py
from functools import partial import six from graphql_relay import from_global_id, to_global_id from ..types import ID, Field, Interface, ObjectType from ..types.interface import InterfaceMeta def is_node(objecttype): ''' Check if the given objecttype has Node as an interface ''' assert issubclass(objecttype, ObjectType), ( 'Only ObjectTypes can have a Node interface.' ) for i in objecttype._meta.interfaces: if issubclass(i, Node): return True return False def get_default_connection(cls): from .connection import Connection assert issubclass(cls, ObjectType), ( 'Can only get connection type on implemented Nodes.' ) class
: node = cls return type('{}Connection'.format(cls.__name__), (Connection,), {'Meta': Meta}) class GlobalID(Field): def __init__(self, node, *args, **kwargs): super(GlobalID, self).__init__(ID, *args, **kwargs) self.node = node @staticmethod def id_resolver(parent_resolver, node, root, args, context, info): id = parent_resolver(root, args, context, info) return node.to_global_id(info.parent_type.name, id) # root._meta.name def get_resolver(self, parent_resolver): return partial(self.id_resolver, parent_resolver, self.node) class NodeMeta(InterfaceMeta): def __new__(cls, name, bases, attrs): cls = InterfaceMeta.__new__(cls, name, bases, attrs) cls._meta.fields['id'] = GlobalID(cls, required=True, description='The ID of the object.') return cls class NodeField(Field): def __init__(self, node, type=False, deprecation_reason=None, name=None, **kwargs): assert issubclass(node, Node), 'NodeField can only operate in Nodes' type = type or node super(NodeField, self).__init__( type, description='The ID of the object', id=ID(required=True), resolver=node.node_resolver ) class Node(six.with_metaclass(NodeMeta, Interface)): '''An object with an ID''' @classmethod def Field(cls, *args, **kwargs): # noqa: N802 return NodeField(cls, *args, **kwargs) @classmethod def node_resolver(cls, root, args, context, info): return cls.get_node_from_global_id(args.get('id'), context, info) @classmethod def get_node_from_global_id(cls, global_id, context, info): try: _type, _id = cls.from_global_id(global_id) graphene_type = info.schema.get_type(_type).graphene_type # We make sure the ObjectType implements the "Node" interface assert cls in graphene_type._meta.interfaces except: return None get_node = getattr(graphene_type, 'get_node', None) if get_node: return get_node(_id, context, info) @classmethod def from_global_id(cls, global_id): return from_global_id(global_id) @classmethod def to_global_id(cls, type, id): return to_global_id(type, id) @classmethod def implements(cls, objecttype): get_connection = getattr(objecttype, 'get_connection', None) if not get_connection: get_connection = partial(get_default_connection, objecttype) objecttype.Connection = get_connection()
Meta
extra_config.rs
use serde_derive::{Serialize, Deserialize}; extern crate serde_yaml; use std::fs::File; use std::io::prelude::*; extern crate failure; use super::super::erdh::erdh_data; #[derive(Debug, PartialEq, Serialize, Deserialize)] pub struct ExtraConfig { pub tables: Vec<Table>, } impl ExtraConfig { pub fn from_yaml_file(path: &str) -> Result<ExtraConfig, failure::Error> { let mut file = File::open(path)?; let mut contents = String::new(); file.read_to_string(&mut contents)?; Self::from_yaml(&contents) } pub fn from_yaml(yaml: &str) -> Result<ExtraConfig, failure::Error> { let result: ExtraConfig = serde_yaml::from_str(&yaml)?; Ok(result) } } #[derive(Debug, PartialEq, Serialize, Deserialize)] pub struct Table { pub table: String, pub is_master: Option<bool>, pub group: Option<String>, pub relations: Option<Vec<erdh_data::ExRelation>>, } #[test] fn parse_yaml_data_success() { let yaml = r#" tables:
- referenced_table_name: members columns: - from: "member_id" to: "id" this_conn: "one" that_conn: "zero-or-one" - referenced_table_name: items columns: - from: "item_id" to: "id" this_conn: "onlyone" that_conn: "many" - table: items is_master: true group: DATA - table: item_types is_master: true group: MASTER - table: member_items group: DATA - table: members group: DATA "#; let c = ExtraConfig::from_yaml(&yaml); assert_eq!(c.is_ok(), true); }
- table: member_items is_master: true group: DATA relations:
planner.py
from __future__ import division from bisect import bisect from collections import namedtuple from math import sqrt, hypot # a planner computes a motion profile for a list of (x, y) points class Planner(object): def __init__(self, acceleration, max_velocity, corner_factor): self.acceleration = acceleration self.max_velocity = max_velocity self.corner_factor = corner_factor def plan(self, points): return constant_acceleration_plan( points, self.acceleration, self.max_velocity, self.corner_factor ) def plan_all(self, paths): return [self.plan(path) for path in paths] # a plan is a motion profile generated by the planner class Plan(object): def __init__(self, blocks): self.blocks = blocks self.ts = [] # start time of each block self.ss = [] # start distance of each block t = 0 s = 0 for b in blocks: self.ts.append(t) self.ss.append(s) t += b.t s += b.s self.t = t # total time self.s = s # total duration def instant(self, t): t = max(0, min(self.t, t)) # clamp t i = bisect(self.ts, t) - 1 # find block for t return self.blocks[i].instant(t - self.ts[i], self.ts[i], self.ss[i]) # a block is a constant acceleration for a duration of time class Block(object): def __init__(self, a, t, vi, p1, p2): self.a = a self.t = t self.vi = vi self.p1 = p1 self.p2 = p2 self.s = p1.distance(p2) def instant(self, t, dt=0, ds=0): t = max(0, min(self.t, t)) # clamp t a = self.a v = self.vi + self.a * t s = self.vi * t + self.a * t * t / 2 s = max(0, min(self.s, s)) # clamp s p = self.p1.lerps(self.p2, s) return Instant(t + dt, p, s + ds, v, a) # an instant gives position, velocity, etc. at a single point in time Instant = namedtuple("Instant", ["t", "p", "s", "v", "a"]) # a = acceleration # v = velocity # s = distance # t = time # i = initial # f = final # vf = vi + a * t # s = (vf + vi) / 2 * t # s = vi * t + a * t * t / 2 # vf * vf = vi * vi + 2 * a * s EPS = 1e-9 _Point = namedtuple("Point", ["x", "y"]) class Point(_Point): def length(self): return hypot(self.x, self.y) def normalize(self): d = self.length() if d == 0: return Point(0, 0) return Point(self.x / d, self.y / d) def distance(self, other): return hypot(self.x - other.x, self.y - other.y) def distance_squared(self, other): return (self.x - other.x) ** 2 + (self.y - other.y) ** 2 def add(self, other): return Point(self.x + other.x, self.y + other.y) def sub(self, other): return Point(self.x - other.x, self.y - other.y) def mul(self, factor): return Point(self.x * factor, self.y * factor) def dot(self, other): return self.x * other.x + self.y * other.y def
(self, other, s): v = other.sub(self).normalize() return self.add(v.mul(s)) def segment_distance(self, v, w): p = self l2 = v.distance_squared(w) if l2 == 0: return p.distance(v) t = ((p.x - v.x) * (w.x - v.x) + (p.y - v.y) * (w.y - v.y)) / l2 t = max(0, min(1, t)) x = v.x + t * (w.x - v.x) y = v.y + t * (w.y - v.y) q = Point(x, y) return p.distance(q) Triangle = namedtuple("Triangle", ["s1", "s2", "t1", "t2", "vmax", "p1", "p2", "p3"]) def triangle(s, vi, vf, a, p1, p3): # compute a triangular profile: accelerating, decelerating s1 = (2 * a * s + vf * vf - vi * vi) / (4 * a) s2 = s - s1 vmax = (vi * vi + 2 * a * s1) ** 0.5 t1 = (vmax - vi) / a t2 = (vf - vmax) / -a p2 = p1.lerps(p3, s1) return Triangle(s1, s2, t1, t2, vmax, p1, p2, p3) Trapezoid = namedtuple( "Trapezoid", ["s1", "s2", "s3", "t1", "t2", "t3", "p1", "p2", "p3", "p4"] ) def trapezoid(s, vi, vmax, vf, a, p1, p4): # compute a trapezoidal profile: accelerating, cruising, decelerating t1 = (vmax - vi) / a s1 = (vmax + vi) / 2 * t1 t3 = (vf - vmax) / -a s3 = (vf + vmax) / 2 * t3 s2 = s - s1 - s3 t2 = s2 / vmax p2 = p1.lerps(p4, s1) p3 = p1.lerps(p4, s - s3) return Trapezoid(s1, s2, s3, t1, t2, t3, p1, p2, p3, p4) def corner_velocity(s1, s2, vmax, a, delta): # compute a maximum velocity at the corner of two segments # https://onehossshay.wordpress.com/2011/09/24/improving_grbl_cornering_algorithm/ cosine = -s1.vector.dot(s2.vector) if abs(cosine - 1) < EPS: return 0 sine = sqrt((1 - cosine) / 2) if abs(sine - 1) < EPS: return vmax v = sqrt((a * delta * sine) / (1 - sine)) return min(v, vmax) class Segment(object): # a segment is a line segment between two points, which will be broken # up into blocks by the planner def __init__(self, p1, p2): self.p1 = p1 self.p2 = p2 self.length = p1.distance(p2) self.vector = p2.sub(p1).normalize() self.max_entry_velocity = 0 self.entry_velocity = 0 self.blocks = [] class Throttler(object): def __init__(self, points, vmax, dt, threshold): self.points = points self.vmax = vmax self.dt = dt self.threshold = threshold self.distances = [] prev = points[0] d = 0 for point in points: d += prev.distance(point) self.distances.append(d) prev = point def lookup(self, d): return bisect(self.distances, d) - 1 def is_feasible(self, i0, v): d = v * self.dt x0 = self.distances[i0] x1 = x0 + d i1 = self.lookup(x1) if i0 == i1: return True p0 = self.points[i0] p10 = self.points[i1] try: p11 = self.points[i1 + 1] except IndexError: p11 = p10 s = x1 - self.distances[i1] p1 = p10.lerps(p11, s) i = i0 + 1 while i <= i1: p = self.points[i] if p.segment_distance(p0, p1) > self.threshold: return False i += 1 return True def compute_max_velocity(self, index): if self.is_feasible(index, self.vmax): return self.vmax lo = 0 hi = self.vmax for _ in range(16): v = (lo + hi) / 2 if self.is_feasible(index, v): lo = v else: hi = v v = lo return v def compute_max_velocities(self): return [self.compute_max_velocity(i) for i in range(len(self.points))] def constant_acceleration_plan(points, a, vmax, cf): # make sure points are Point objects points = [Point(x, y) for x, y in points] # the throttler reduces speeds based on the discrete timeslicing nature of # the device # TODO: expose parameters throttler = Throttler(points, vmax, 0.02, 0.001) max_velocities = throttler.compute_max_velocities() # create segments for each consecutive pair of points segments = [Segment(p1, p2) for p1, p2 in zip(points, points[1:])] # compute a max_entry_velocity for each segment # based on the angle formed by the two segments at the vertex for v, s1, s2 in zip(max_velocities, segments, segments[1:]): s1.max_entry_velocity = min(s1.max_entry_velocity, v) s2.max_entry_velocity = corner_velocity(s1, s2, vmax, a, cf) # add a dummy segment at the end to force a final velocity of zero segments.append(Segment(points[-1], points[-1])) # loop over segments i = 0 while i < len(segments) - 1: # pull out some variables segment = segments[i] next_segment = segments[i + 1] s = segment.length vi = segment.entry_velocity vexit = next_segment.max_entry_velocity p1 = segment.p1 p2 = segment.p2 # determine which profile to use for this segment m = triangle(s, vi, vexit, a, p1, p2) if m.s1 < -EPS: # too fast! update max_entry_velocity and backtrack segment.max_entry_velocity = sqrt(vexit * vexit + 2 * a * s) i -= 1 elif m.s2 < 0: # accelerate vf = sqrt(vi * vi + 2 * a * s) t = (vf - vi) / a segment.blocks = [ Block(a, t, vi, p1, p2), ] next_segment.entry_velocity = vf i += 1 elif m.vmax > vmax: # accelerate, cruise, decelerate z = trapezoid(s, vi, vmax, vexit, a, p1, p2) segment.blocks = [ Block(a, z.t1, vi, z.p1, z.p2), Block(0, z.t2, vmax, z.p2, z.p3), Block(-a, z.t3, vmax, z.p3, z.p4), ] next_segment.entry_velocity = vexit i += 1 else: # accelerate, decelerate segment.blocks = [ Block(a, m.t1, vi, m.p1, m.p2), Block(-a, m.t2, m.vmax, m.p2, m.p3), ] next_segment.entry_velocity = vexit i += 1 # concatenate all of the blocks blocks = [] for segment in segments: blocks.extend(segment.blocks) # filter out zero-duration blocks and return blocks = [b for b in blocks if b.t > EPS] return Plan(blocks)
lerps
missing_inline.rs
use crate::utils::span_lint; use rustc::declare_lint_pass; use rustc::hir; use rustc::lint::{self, LateContext, LateLintPass, LintArray, LintContext, LintPass}; use rustc_session::declare_tool_lint; use syntax::ast; use syntax::source_map::Span; declare_clippy_lint! { /// **What it does:** it lints if an exported function, method, trait method with default impl, /// or trait method impl is not `#[inline]`. /// /// **Why is this bad?** In general, it is not. Functions can be inlined across /// crates when that's profitable as long as any form of LTO is used. When LTO is disabled, /// functions that are not `#[inline]` cannot be inlined across crates. Certain types of crates /// might intend for most of the methods in their public API to be able to be inlined across /// crates even when LTO is disabled. For these types of crates, enabling this lint might make /// sense. It allows the crate to require all exported methods to be `#[inline]` by default, and /// then opt out for specific methods where this might not make sense. /// /// **Known problems:** None. /// /// **Example:** /// ```rust /// pub fn foo() {} // missing #[inline] /// fn ok() {} // ok /// #[inline] pub fn bar() {} // ok /// #[inline(always)] pub fn baz() {} // ok /// /// pub trait Bar { /// fn bar(); // ok /// fn def_bar() {} // missing #[inline] /// } /// /// struct Baz; /// impl Baz { /// fn private() {} // ok /// }
/// } /// /// pub struct PubBaz; /// impl PubBaz { /// fn private() {} // ok /// pub fn not_ptrivate() {} // missing #[inline] /// } /// /// impl Bar for PubBaz { /// fn bar() {} // missing #[inline] /// fn def_bar() {} // missing #[inline] /// } /// ``` pub MISSING_INLINE_IN_PUBLIC_ITEMS, restriction, "detects missing #[inline] attribute for public callables (functions, trait methods, methods...)" } fn check_missing_inline_attrs(cx: &LateContext<'_, '_>, attrs: &[ast::Attribute], sp: Span, desc: &'static str) { let has_inline = attrs.iter().any(|a| a.check_name(sym!(inline))); if !has_inline { span_lint( cx, MISSING_INLINE_IN_PUBLIC_ITEMS, sp, &format!("missing `#[inline]` for {}", desc), ); } } fn is_executable(cx: &LateContext<'_, '_>) -> bool { use rustc::session::config::CrateType; cx.tcx.sess.crate_types.get().iter().any(|t: &CrateType| match t { CrateType::Executable => true, _ => false, }) } declare_lint_pass!(MissingInline => [MISSING_INLINE_IN_PUBLIC_ITEMS]); impl<'a, 'tcx> LateLintPass<'a, 'tcx> for MissingInline { fn check_item(&mut self, cx: &LateContext<'a, 'tcx>, it: &'tcx hir::Item<'_>) { if lint::in_external_macro(cx.sess(), it.span) || is_executable(cx) { return; } if !cx.access_levels.is_exported(it.hir_id) { return; } match it.kind { hir::ItemKind::Fn(..) => { let desc = "a function"; check_missing_inline_attrs(cx, &it.attrs, it.span, desc); }, hir::ItemKind::Trait(ref _is_auto, ref _unsafe, ref _generics, ref _bounds, trait_items) => { // note: we need to check if the trait is exported so we can't use // `LateLintPass::check_trait_item` here. for tit in trait_items { let tit_ = cx.tcx.hir().trait_item(tit.id); match tit_.kind { hir::TraitItemKind::Const(..) | hir::TraitItemKind::Type(..) => {}, hir::TraitItemKind::Method(..) => { if tit.defaultness.has_value() { // trait method with default body needs inline in case // an impl is not provided let desc = "a default trait method"; let item = cx.tcx.hir().expect_trait_item(tit.id.hir_id); check_missing_inline_attrs(cx, &item.attrs, item.span, desc); } }, } } }, hir::ItemKind::Const(..) | hir::ItemKind::Enum(..) | hir::ItemKind::Mod(..) | hir::ItemKind::Static(..) | hir::ItemKind::Struct(..) | hir::ItemKind::TraitAlias(..) | hir::ItemKind::GlobalAsm(..) | hir::ItemKind::TyAlias(..) | hir::ItemKind::Union(..) | hir::ItemKind::OpaqueTy(..) | hir::ItemKind::ExternCrate(..) | hir::ItemKind::ForeignMod(..) | hir::ItemKind::Impl(..) | hir::ItemKind::Use(..) => {}, }; } fn check_impl_item(&mut self, cx: &LateContext<'a, 'tcx>, impl_item: &'tcx hir::ImplItem<'_>) { use rustc::ty::{ImplContainer, TraitContainer}; if lint::in_external_macro(cx.sess(), impl_item.span) || is_executable(cx) { return; } // If the item being implemented is not exported, then we don't need #[inline] if !cx.access_levels.is_exported(impl_item.hir_id) { return; } let desc = match impl_item.kind { hir::ImplItemKind::Method(..) => "a method", hir::ImplItemKind::Const(..) | hir::ImplItemKind::TyAlias(_) | hir::ImplItemKind::OpaqueTy(_) => return, }; let def_id = cx.tcx.hir().local_def_id(impl_item.hir_id); let trait_def_id = match cx.tcx.associated_item(def_id).container { TraitContainer(cid) => Some(cid), ImplContainer(cid) => cx.tcx.impl_trait_ref(cid).map(|t| t.def_id), }; if let Some(trait_def_id) = trait_def_id { if cx.tcx.hir().as_local_node_id(trait_def_id).is_some() && !cx.access_levels.is_exported(impl_item.hir_id) { // If a trait is being implemented for an item, and the // trait is not exported, we don't need #[inline] return; } } check_missing_inline_attrs(cx, &impl_item.attrs, impl_item.span, desc); } }
/// /// impl Bar for Baz { /// fn bar() {} // ok - Baz is not exported
null_person_birthdate.py
""" Null Person Table Birth Date Fields In the person table, the fields month_of_birth, day_of_birth, and birth_datetime should be nulled. The year_of_birth field should remain unchanged. Original Issue: DC-1356 """ # Python imports import logging # Project imports import constants.bq_utils as bq_consts from cdr_cleaner.cleaning_rules.base_cleaning_rule import BaseCleaningRule from constants.cdr_cleaner import clean_cdr as cdr_consts from common import JINJA_ENV, PERSON from utils import pipeline_logging LOGGER = logging.getLogger(__name__) NULL_DATE_QUERY = JINJA_ENV.from_string(""" UPDATE `{{project_id}}.{{dataset_id}}.{{person_table}}` SET birth_datetime = NULL, month_of_birth = NULL, day_of_birth = NULL WHERE TRUE """) class NullPersonBirthdate(BaseCleaningRule): def __init__(self, project_id, dataset_id, sandbox_dataset_id): """ Initialize the class with proper information. Set the issue numbers, description and affected datasets. As other tickets may affect this SQL, append them to the list of Jira Issues. DO NOT REMOVE ORIGINAL JIRA ISSUE NUMBERS! """ desc = 'Set Patient Birthdate Fields to NULL' super().__init__(issue_numbers=['DC1356'], description=desc, affected_datasets=[cdr_consts.CONTROLLED_TIER_DEID], affected_tables=PERSON, project_id=project_id, dataset_id=dataset_id, sandbox_dataset_id=sandbox_dataset_id) def setup_rule(self, client, *args, **keyword_args): """ Load required resources prior to executing cleaning rule queries. Method to run data upload options before executing the first cleaning rule of a class. For example, if your class requires loading a static table, that load operation should be defined here. It SHOULD NOT BE defined as part of get_query_specs(). :param client: :return: """ pass def get_query_specs(self, *args, **keyword_args):
def setup_validation(self, client, *args, **keyword_args): """ Run required steps for validation setup Method to run to setup validation on cleaning rules that will be updating or deleting the values. For example: if your class updates all the datetime fields you should be implementing the logic to get the initial list of values which adhere to a condition we are looking for. if your class deletes a subset of rows in the tables you should be implementing the logic to get the row counts of the tables prior to applying cleaning rule """ raise NotImplementedError("Please fix me.") def validate_rule(self, client, *args, **keyword_args): """ Validates the cleaning rule which deletes or updates the data from the tables Method to run validation on cleaning rules that will be updating the values. For example: if your class updates all the datetime fields you should be implementing the validation that checks if the date time values that needs to be updated no longer exists in the table. if your class deletes a subset of rows in the tables you should be implementing the validation that checks if the count of final final row counts + deleted rows should equals to initial row counts of the affected tables. Raises RunTimeError if the validation fails. """ raise NotImplementedError("Please fix me.") def get_sandbox_tablenames(self): return [self.sandbox_table_for(PERSON)] if __name__ == '__main__': import cdr_cleaner.args_parser as parser import cdr_cleaner.clean_cdr_engine as clean_engine ARGS = parser.parse_args() pipeline_logging.configure(level=logging.DEBUG, add_console_handler=True) if ARGS.list_queries: clean_engine.add_console_logging() query_list = clean_engine.get_query_list(ARGS.project_id, ARGS.dataset_id, ARGS.sandbox_dataset_id, [(NullPersonBirthdate,)]) for query in query_list: LOGGER.info(query) else: clean_engine.add_console_logging(ARGS.console_log) clean_engine.clean_dataset(ARGS.project_id, ARGS.dataset_id, ARGS.sandbox_dataset_id, [(NullPersonBirthdate,)])
""" Interface to return a list of query dictionaries. :returns: a list of query dictionaries. Each dictionary specifies the query to execute and how to execute. The dictionaries are stored in list order and returned in list order to maintain an ordering. """ update_query = dict() update_query[cdr_consts.QUERY] = NULL_DATE_QUERY.render( project_id=self.project_id, dataset_id=self.dataset_id, person_table=PERSON) return [update_query]
arena.rs
use elsa::FrozenVec; fn main() {
let rando = arena.add_thing("rando", vec![]); let _facebook = arena.add_thing("facebook", vec![rando, threes_a_crowd, lonely, best_friend]); assert!(cmp_ref(lonely, best_friend.friends[0])); assert!(cmp_ref(best_friend, threes_a_crowd.friends[1])); arena.dump(); } struct Arena<'arena> { things: FrozenVec<Box<Thing<'arena>>>, } struct Thing<'arena> { pub friends: Vec<ThingRef<'arena>>, pub name: &'static str, } type ThingRef<'arena> = &'arena Thing<'arena>; impl<'arena> Arena<'arena> { fn new() -> Arena<'arena> { Arena { things: FrozenVec::new(), } } fn add_thing( &'arena self, name: &'static str, friends: Vec<ThingRef<'arena>>, ) -> ThingRef<'arena> { let idx = self.things.len(); self.things.push(Box::new(Thing { name, friends })); &self.things[idx] } fn dump(&'arena self) { for thing in &self.things { println!("friends of {}:", thing.name); for friend in &thing.friends { println!("\t{}", friend.name); } } } } fn cmp_ref<T>(x: &T, y: &T) -> bool { x as *const T as usize == y as *const T as usize }
let arena = Arena::new(); let lonely = arena.add_thing("lonely", vec![]); let best_friend = arena.add_thing("best friend", vec![lonely]); let threes_a_crowd = arena.add_thing("threes a crowd", vec![lonely, best_friend]);
$component.ts
import { $Component } from '../interface/async/$Component' import { $EE } from '../interface/async/$EE' import { Async } from '../interface/async/Async' import { Component_ } from '../interface/Component' import { EE } from '../interface/EE' import { W } from '../interface/W' import { proxyWrap } from '../proxyWrap' import { Callback } from '../types/Callback' import { UnitClass } from '../types/UnitClass' import { $Child } from './Child' import { $Children } from './Children' export function $appendChild( component: Component_, Class: UnitClass<Component_>, callback: Callback<number> ): void { const i = component.appendChild(Class) callback(i) } export function $removeChild( component: Component_, { at }: { at: number }, callback: Callback<{ specId: string }> ): void { try { const Class = component.removeChild(at) const specId = Class.__bundle.unit.id callback({ specId }) } catch (err) { callback(undefined, err.message) } } export function $hasChild( component: Component_, { at }: { at: number }, callback: Callback<boolean> ): void { const has = component.hasChild(at) callback(has) } export function $child( component: Component_, { at }: { at: number }, callback: Callback<$Child> ): void { const child = component.refChild(at) // @ts-ignore const id = child.constructor.id callback(id) } export function $children( component: Component_, {}, callback: Callback<$Children> ): void { const children = component.refChildren() const _children = children.map((c) => { // @ts-ignore return { id: c.constructor.__bundle.unit.id } as $Child }) callback(_children) } export function $refChild( component: Component_, { at, _ }: { at: number; _: string[] } ): $Component { const child = component.refChild(at) const $child = Async(child, _) return proxyWrap($child, _) } export function $refEmitter(emitter: EE): $EE { const _emitter = emitter.refEmitter() const $_emitter = Async(_emitter, ['$EE']) return proxyWrap($_emitter, ['$EE']) } export function $refChildContainer( component: W, { at, _ }: { at: number; _: string[] } ): $Component { const container = component.refChildContainer(at) const $container = Async(container, _) return proxyWrap($container, _) } export function
( component: W, { at, _ }: { at: number; _: string[] } ): $Component { const container = component.refParentRootContainer(at) const local_child = Async(container, _) return proxyWrap(local_child, _) } export function $refParentChildContainer( component: W, { at, _ }: { at: number; _: string[] } ): $Component { const container = component.refParentChildContainer(at) const local_child = Async(container, _) return proxyWrap(local_child, _) }
$refParentRootContainer
446.py
"""446. Arithmetic Slices II - Subsequence""" class Solution(object): def numberOfArithmeticSlices(self, A):
######
""" :type A: List[int] :rtype: int """ dp = [collections.defaultdict(int) for _ in range(len(A))] total = 0 for i in range(len(A)): for j in range(i): k = A[i] - A[j] dp[i][k] += 1 if k in dp[j]: dp[i][k] += dp[j][k] total += dp[j][k] return total
create.py
from __future__ import print_function import os import subprocess import sys import six from kecpkg.files.rendering import render_to_file from kecpkg.utils import (ensure_dir_exists, get_proper_python, NEED_SUBPROCESS_SHELL, venv, echo_success, echo_failure, echo_info) def create_package(package_dir, settings): """ Create the package directory. package_name (or package_dir) +-- README.md +-- requirements.txt +-- package_info.json +-- main.py (settable with settings['entrypoint_script'] :param package_dir: the full path to the package dir :param settings: settings dict """ ensure_dir_exists(package_dir) render_to_file('README.md', content=settings, target_dir=package_dir) render_to_file('requirements.txt', content=settings, target_dir=package_dir) render_to_file('package_info.json', content=dict(requirements_txt='requirements.txt', entrypoint_script=settings.get('entrypoint_script'), entrypoint_func=settings.get('entrypoint_func')), target_dir=package_dir) render_to_file('.gitignore', content=dict(), target_dir=package_dir) render_to_file('.env', content=dict(), target_dir=package_dir) # runconfigurations run_configurations_path = os.path.join(package_dir, '.idea', 'runConfigurations') ensure_dir_exists(run_configurations_path) render_to_file('Upload_the_kecpkg.xml', content=dict(), target_dir=run_configurations_path) render_to_file('Build_the_kecpkg.xml', content=dict(), target_dir=run_configurations_path) script_filename = '{}.py'.format(settings.get('entrypoint_script')) render_to_file(script_filename, content=settings, template='script.py.template', target_dir=package_dir) def create_venv(package_dir, settings, pypath=None, use_global=False, verbose=False): """ Create the virtual environment in `venv` for the package. The virtual environment path name can be set in the settings. package_dir +-- venv (the virtual environment based on the choosen python version) +-- ... :param package_dir: the full path to the package directory :param settings: the settings dict (including the venv_dir name to create the right venv) :param pypath: absolute path to the python binary interpreter to create the virtual environment with :param use_global: Use global sysem site packages when creating virtual environment (default False) :param verbose: Use verbosity (default False) """ venv_dir = os.path.join(package_dir, settings.get('venv_dir')) if not pypath: from distutils.spawn import find_executable pypath = find_executable(get_proper_python()) command = [sys.executable, '-m', 'virtualenv', venv_dir, '-p', pypath] if use_global: # no cov command.append('--system-site-packages') if not verbose: # no cov command.append('-qqq') if six.PY3: result = subprocess.run(command, shell=NEED_SUBPROCESS_SHELL) return result.returncode elif six.PY2: result = subprocess.check_output(command, shell=NEED_SUBPROCESS_SHELL) return result and 0 or -1 def
(package_dir, settings, verbose=False): """ Install requirements into the virtual environment. :param package_dir: the full path to the package directory :param settings: the settings dict (incluing the venv_dir name) :param verbose: (optional) be more verbose if set to True, defaults to False """ venv_dir = os.path.join(package_dir, settings.get('venv_dir')) if not os.path.exists(venv_dir): echo_failure('virtual environment directory `{}` does not exists, nothing to install'.format(venv_dir)) sys.exit(1) if not os.path.exists(os.path.join(package_dir, settings.get('requirements_filename'))): echo_failure('could not find requirements.txt to install, check if `{}` exists or update settings'.format( settings.get('requirements_filename'))) sys.exit(1) install_command = [sys.executable, '-m', 'pip', 'install', '-r', os.path.join(package_dir, settings.get('requirements_filename'))] if not verbose: # no cov install_command.append('-qqq') with venv(venv_dir): echo_info('Installing requirements from `{}` into the virtual environment `{}`'. format(settings.get('requirements_filename'), settings.get('venv_dir'))) result = None if six.PY3: result = subprocess.run(install_command, shell=NEED_SUBPROCESS_SHELL) return result.returncode elif six.PY2: result = subprocess.check_output(install_command, shell=NEED_SUBPROCESS_SHELL) return result and 0 or -1 if result: echo_success(str(result)) return result.returncode
pip_install_venv
eventsubscriptions.go
package eventgrid // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is regenerated. import ( "context" "github.com/Azure/go-autorest/autorest" "github.com/Azure/go-autorest/autorest/azure" "github.com/Azure/go-autorest/tracing" "net/http" ) // EventSubscriptionsClient is the azure EventGrid Management Client type EventSubscriptionsClient struct { BaseClient } // NewEventSubscriptionsClient creates an instance of the EventSubscriptionsClient client. func NewEventSubscriptionsClient(subscriptionID string) EventSubscriptionsClient { return NewEventSubscriptionsClientWithBaseURI(DefaultBaseURI, subscriptionID) } // NewEventSubscriptionsClientWithBaseURI creates an instance of the EventSubscriptionsClient client using a custom // endpoint. Use this when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure // stack). func
(baseURI string, subscriptionID string) EventSubscriptionsClient { return EventSubscriptionsClient{NewWithBaseURI(baseURI, subscriptionID)} } // CreateOrUpdate asynchronously creates a new event subscription or updates an existing event subscription based on // the specified scope. // Parameters: // scope - the identifier of the resource to which the event subscription needs to be created or updated. The // scope can be a subscription, or a resource group, or a top level resource belonging to a resource provider // namespace, or an EventGrid topic. For example, use '/subscriptions/{subscriptionId}/' for a subscription, // '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}' for a resource group, and // '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}' // for a resource, and // '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventGrid/topics/{topicName}' // for an EventGrid topic. // eventSubscriptionName - name of the event subscription. Event subscription names must be between 3 and 64 // characters in length and should use alphanumeric letters only. // eventSubscriptionInfo - event subscription properties containing the destination and filter information func (client EventSubscriptionsClient) CreateOrUpdate(ctx context.Context, scope string, eventSubscriptionName string, eventSubscriptionInfo EventSubscription) (result EventSubscriptionsCreateOrUpdateFuture, err error) { if tracing.IsEnabled() { ctx = tracing.StartSpan(ctx, fqdn+"/EventSubscriptionsClient.CreateOrUpdate") defer func() { sc := -1 if result.Response() != nil { sc = result.Response().StatusCode } tracing.EndSpan(ctx, sc, err) }() } req, err := client.CreateOrUpdatePreparer(ctx, scope, eventSubscriptionName, eventSubscriptionInfo) if err != nil { err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "CreateOrUpdate", nil, "Failure preparing request") return } result, err = client.CreateOrUpdateSender(req) if err != nil { err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "CreateOrUpdate", nil, "Failure sending request") return } return } // CreateOrUpdatePreparer prepares the CreateOrUpdate request. func (client EventSubscriptionsClient) CreateOrUpdatePreparer(ctx context.Context, scope string, eventSubscriptionName string, eventSubscriptionInfo EventSubscription) (*http.Request, error) { pathParameters := map[string]interface{}{ "eventSubscriptionName": autorest.Encode("path", eventSubscriptionName), "scope": scope, } const APIVersion = "2018-05-01-preview" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsContentType("application/json; charset=utf-8"), autorest.AsPut(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/{scope}/providers/Microsoft.EventGrid/eventSubscriptions/{eventSubscriptionName}", pathParameters), autorest.WithJSON(eventSubscriptionInfo), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // CreateOrUpdateSender sends the CreateOrUpdate request. The method will close the // http.Response Body if it receives an error. func (client EventSubscriptionsClient) CreateOrUpdateSender(req *http.Request) (future EventSubscriptionsCreateOrUpdateFuture, err error) { var resp *http.Response resp, err = client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) if err != nil { return } var azf azure.Future azf, err = azure.NewFutureFromResponse(resp) future.FutureAPI = &azf future.Result = func(client EventSubscriptionsClient) (es EventSubscription, err error) { var done bool done, err = future.DoneWithContext(context.Background(), client) if err != nil { err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsCreateOrUpdateFuture", "Result", future.Response(), "Polling failure") return } if !done { err = azure.NewAsyncOpIncompleteError("eventgrid.EventSubscriptionsCreateOrUpdateFuture") return } sender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) if es.Response.Response, err = future.GetResult(sender); err == nil && es.Response.Response.StatusCode != http.StatusNoContent { es, err = client.CreateOrUpdateResponder(es.Response.Response) if err != nil { err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsCreateOrUpdateFuture", "Result", es.Response.Response, "Failure responding to request") } } return } return } // CreateOrUpdateResponder handles the response to the CreateOrUpdate request. The method always // closes the http.Response Body. func (client EventSubscriptionsClient) CreateOrUpdateResponder(resp *http.Response) (result EventSubscription, err error) { err = autorest.Respond( resp, azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // Delete delete an existing event subscription // Parameters: // scope - the scope of the event subscription. The scope can be a subscription, or a resource group, or a top // level resource belonging to a resource provider namespace, or an EventGrid topic. For example, use // '/subscriptions/{subscriptionId}/' for a subscription, // '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}' for a resource group, and // '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}' // for a resource, and // '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventGrid/topics/{topicName}' // for an EventGrid topic. // eventSubscriptionName - name of the event subscription func (client EventSubscriptionsClient) Delete(ctx context.Context, scope string, eventSubscriptionName string) (result EventSubscriptionsDeleteFuture, err error) { if tracing.IsEnabled() { ctx = tracing.StartSpan(ctx, fqdn+"/EventSubscriptionsClient.Delete") defer func() { sc := -1 if result.Response() != nil { sc = result.Response().StatusCode } tracing.EndSpan(ctx, sc, err) }() } req, err := client.DeletePreparer(ctx, scope, eventSubscriptionName) if err != nil { err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "Delete", nil, "Failure preparing request") return } result, err = client.DeleteSender(req) if err != nil { err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "Delete", nil, "Failure sending request") return } return } // DeletePreparer prepares the Delete request. func (client EventSubscriptionsClient) DeletePreparer(ctx context.Context, scope string, eventSubscriptionName string) (*http.Request, error) { pathParameters := map[string]interface{}{ "eventSubscriptionName": autorest.Encode("path", eventSubscriptionName), "scope": scope, } const APIVersion = "2018-05-01-preview" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsDelete(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/{scope}/providers/Microsoft.EventGrid/eventSubscriptions/{eventSubscriptionName}", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // DeleteSender sends the Delete request. The method will close the // http.Response Body if it receives an error. func (client EventSubscriptionsClient) DeleteSender(req *http.Request) (future EventSubscriptionsDeleteFuture, err error) { var resp *http.Response resp, err = client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) if err != nil { return } var azf azure.Future azf, err = azure.NewFutureFromResponse(resp) future.FutureAPI = &azf future.Result = func(client EventSubscriptionsClient) (ar autorest.Response, err error) { var done bool done, err = future.DoneWithContext(context.Background(), client) if err != nil { err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsDeleteFuture", "Result", future.Response(), "Polling failure") return } if !done { err = azure.NewAsyncOpIncompleteError("eventgrid.EventSubscriptionsDeleteFuture") return } ar.Response = future.Response() return } return } // DeleteResponder handles the response to the Delete request. The method always // closes the http.Response Body. func (client EventSubscriptionsClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) { err = autorest.Respond( resp, azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent), autorest.ByClosing()) result.Response = resp return } // Get get properties of an event subscription // Parameters: // scope - the scope of the event subscription. The scope can be a subscription, or a resource group, or a top // level resource belonging to a resource provider namespace, or an EventGrid topic. For example, use // '/subscriptions/{subscriptionId}/' for a subscription, // '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}' for a resource group, and // '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}' // for a resource, and // '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventGrid/topics/{topicName}' // for an EventGrid topic. // eventSubscriptionName - name of the event subscription func (client EventSubscriptionsClient) Get(ctx context.Context, scope string, eventSubscriptionName string) (result EventSubscription, err error) { if tracing.IsEnabled() { ctx = tracing.StartSpan(ctx, fqdn+"/EventSubscriptionsClient.Get") defer func() { sc := -1 if result.Response.Response != nil { sc = result.Response.Response.StatusCode } tracing.EndSpan(ctx, sc, err) }() } req, err := client.GetPreparer(ctx, scope, eventSubscriptionName) if err != nil { err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "Get", nil, "Failure preparing request") return } resp, err := client.GetSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "Get", resp, "Failure sending request") return } result, err = client.GetResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "Get", resp, "Failure responding to request") return } return } // GetPreparer prepares the Get request. func (client EventSubscriptionsClient) GetPreparer(ctx context.Context, scope string, eventSubscriptionName string) (*http.Request, error) { pathParameters := map[string]interface{}{ "eventSubscriptionName": autorest.Encode("path", eventSubscriptionName), "scope": scope, } const APIVersion = "2018-05-01-preview" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/{scope}/providers/Microsoft.EventGrid/eventSubscriptions/{eventSubscriptionName}", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // GetSender sends the Get request. The method will close the // http.Response Body if it receives an error. func (client EventSubscriptionsClient) GetSender(req *http.Request) (*http.Response, error) { return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) } // GetResponder handles the response to the Get request. The method always // closes the http.Response Body. func (client EventSubscriptionsClient) GetResponder(resp *http.Response) (result EventSubscription, err error) { err = autorest.Respond( resp, azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // GetFullURL get the full endpoint URL for an event subscription // Parameters: // scope - the scope of the event subscription. The scope can be a subscription, or a resource group, or a top // level resource belonging to a resource provider namespace, or an EventGrid topic. For example, use // '/subscriptions/{subscriptionId}/' for a subscription, // '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}' for a resource group, and // '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}' // for a resource, and // '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventGrid/topics/{topicName}' // for an EventGrid topic. // eventSubscriptionName - name of the event subscription func (client EventSubscriptionsClient) GetFullURL(ctx context.Context, scope string, eventSubscriptionName string) (result EventSubscriptionFullURL, err error) { if tracing.IsEnabled() { ctx = tracing.StartSpan(ctx, fqdn+"/EventSubscriptionsClient.GetFullURL") defer func() { sc := -1 if result.Response.Response != nil { sc = result.Response.Response.StatusCode } tracing.EndSpan(ctx, sc, err) }() } req, err := client.GetFullURLPreparer(ctx, scope, eventSubscriptionName) if err != nil { err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "GetFullURL", nil, "Failure preparing request") return } resp, err := client.GetFullURLSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "GetFullURL", resp, "Failure sending request") return } result, err = client.GetFullURLResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "GetFullURL", resp, "Failure responding to request") return } return } // GetFullURLPreparer prepares the GetFullURL request. func (client EventSubscriptionsClient) GetFullURLPreparer(ctx context.Context, scope string, eventSubscriptionName string) (*http.Request, error) { pathParameters := map[string]interface{}{ "eventSubscriptionName": autorest.Encode("path", eventSubscriptionName), "scope": scope, } const APIVersion = "2018-05-01-preview" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsPost(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/{scope}/providers/Microsoft.EventGrid/eventSubscriptions/{eventSubscriptionName}/getFullUrl", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // GetFullURLSender sends the GetFullURL request. The method will close the // http.Response Body if it receives an error. func (client EventSubscriptionsClient) GetFullURLSender(req *http.Request) (*http.Response, error) { return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) } // GetFullURLResponder handles the response to the GetFullURL request. The method always // closes the http.Response Body. func (client EventSubscriptionsClient) GetFullURLResponder(resp *http.Response) (result EventSubscriptionFullURL, err error) { err = autorest.Respond( resp, azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // ListByResource list all event subscriptions that have been created for a specific topic // Parameters: // resourceGroupName - the name of the resource group within the user's subscription. // providerNamespace - namespace of the provider of the topic // resourceTypeName - name of the resource type // resourceName - name of the resource func (client EventSubscriptionsClient) ListByResource(ctx context.Context, resourceGroupName string, providerNamespace string, resourceTypeName string, resourceName string) (result EventSubscriptionsListResult, err error) { if tracing.IsEnabled() { ctx = tracing.StartSpan(ctx, fqdn+"/EventSubscriptionsClient.ListByResource") defer func() { sc := -1 if result.Response.Response != nil { sc = result.Response.Response.StatusCode } tracing.EndSpan(ctx, sc, err) }() } req, err := client.ListByResourcePreparer(ctx, resourceGroupName, providerNamespace, resourceTypeName, resourceName) if err != nil { err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "ListByResource", nil, "Failure preparing request") return } resp, err := client.ListByResourceSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "ListByResource", resp, "Failure sending request") return } result, err = client.ListByResourceResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "ListByResource", resp, "Failure responding to request") return } return } // ListByResourcePreparer prepares the ListByResource request. func (client EventSubscriptionsClient) ListByResourcePreparer(ctx context.Context, resourceGroupName string, providerNamespace string, resourceTypeName string, resourceName string) (*http.Request, error) { pathParameters := map[string]interface{}{ "providerNamespace": autorest.Encode("path", providerNamespace), "resourceGroupName": autorest.Encode("path", resourceGroupName), "resourceName": autorest.Encode("path", resourceName), "resourceTypeName": autorest.Encode("path", resourceTypeName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), } const APIVersion = "2018-05-01-preview" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{providerNamespace}/{resourceTypeName}/{resourceName}/providers/Microsoft.EventGrid/eventSubscriptions", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // ListByResourceSender sends the ListByResource request. The method will close the // http.Response Body if it receives an error. func (client EventSubscriptionsClient) ListByResourceSender(req *http.Request) (*http.Response, error) { return client.Send(req, azure.DoRetryWithRegistration(client.Client)) } // ListByResourceResponder handles the response to the ListByResource request. The method always // closes the http.Response Body. func (client EventSubscriptionsClient) ListByResourceResponder(resp *http.Response) (result EventSubscriptionsListResult, err error) { err = autorest.Respond( resp, azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // ListGlobalByResourceGroup list all global event subscriptions under a specific Azure subscription and resource group // Parameters: // resourceGroupName - the name of the resource group within the user's subscription. func (client EventSubscriptionsClient) ListGlobalByResourceGroup(ctx context.Context, resourceGroupName string) (result EventSubscriptionsListResult, err error) { if tracing.IsEnabled() { ctx = tracing.StartSpan(ctx, fqdn+"/EventSubscriptionsClient.ListGlobalByResourceGroup") defer func() { sc := -1 if result.Response.Response != nil { sc = result.Response.Response.StatusCode } tracing.EndSpan(ctx, sc, err) }() } req, err := client.ListGlobalByResourceGroupPreparer(ctx, resourceGroupName) if err != nil { err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "ListGlobalByResourceGroup", nil, "Failure preparing request") return } resp, err := client.ListGlobalByResourceGroupSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "ListGlobalByResourceGroup", resp, "Failure sending request") return } result, err = client.ListGlobalByResourceGroupResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "ListGlobalByResourceGroup", resp, "Failure responding to request") return } return } // ListGlobalByResourceGroupPreparer prepares the ListGlobalByResourceGroup request. func (client EventSubscriptionsClient) ListGlobalByResourceGroupPreparer(ctx context.Context, resourceGroupName string) (*http.Request, error) { pathParameters := map[string]interface{}{ "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), } const APIVersion = "2018-05-01-preview" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventGrid/eventSubscriptions", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // ListGlobalByResourceGroupSender sends the ListGlobalByResourceGroup request. The method will close the // http.Response Body if it receives an error. func (client EventSubscriptionsClient) ListGlobalByResourceGroupSender(req *http.Request) (*http.Response, error) { return client.Send(req, azure.DoRetryWithRegistration(client.Client)) } // ListGlobalByResourceGroupResponder handles the response to the ListGlobalByResourceGroup request. The method always // closes the http.Response Body. func (client EventSubscriptionsClient) ListGlobalByResourceGroupResponder(resp *http.Response) (result EventSubscriptionsListResult, err error) { err = autorest.Respond( resp, azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // ListGlobalByResourceGroupForTopicType list all global event subscriptions under a resource group for a specific // topic type. // Parameters: // resourceGroupName - the name of the resource group within the user's subscription. // topicTypeName - name of the topic type func (client EventSubscriptionsClient) ListGlobalByResourceGroupForTopicType(ctx context.Context, resourceGroupName string, topicTypeName string) (result EventSubscriptionsListResult, err error) { if tracing.IsEnabled() { ctx = tracing.StartSpan(ctx, fqdn+"/EventSubscriptionsClient.ListGlobalByResourceGroupForTopicType") defer func() { sc := -1 if result.Response.Response != nil { sc = result.Response.Response.StatusCode } tracing.EndSpan(ctx, sc, err) }() } req, err := client.ListGlobalByResourceGroupForTopicTypePreparer(ctx, resourceGroupName, topicTypeName) if err != nil { err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "ListGlobalByResourceGroupForTopicType", nil, "Failure preparing request") return } resp, err := client.ListGlobalByResourceGroupForTopicTypeSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "ListGlobalByResourceGroupForTopicType", resp, "Failure sending request") return } result, err = client.ListGlobalByResourceGroupForTopicTypeResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "ListGlobalByResourceGroupForTopicType", resp, "Failure responding to request") return } return } // ListGlobalByResourceGroupForTopicTypePreparer prepares the ListGlobalByResourceGroupForTopicType request. func (client EventSubscriptionsClient) ListGlobalByResourceGroupForTopicTypePreparer(ctx context.Context, resourceGroupName string, topicTypeName string) (*http.Request, error) { pathParameters := map[string]interface{}{ "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), "topicTypeName": autorest.Encode("path", topicTypeName), } const APIVersion = "2018-05-01-preview" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventGrid/topicTypes/{topicTypeName}/eventSubscriptions", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // ListGlobalByResourceGroupForTopicTypeSender sends the ListGlobalByResourceGroupForTopicType request. The method will close the // http.Response Body if it receives an error. func (client EventSubscriptionsClient) ListGlobalByResourceGroupForTopicTypeSender(req *http.Request) (*http.Response, error) { return client.Send(req, azure.DoRetryWithRegistration(client.Client)) } // ListGlobalByResourceGroupForTopicTypeResponder handles the response to the ListGlobalByResourceGroupForTopicType request. The method always // closes the http.Response Body. func (client EventSubscriptionsClient) ListGlobalByResourceGroupForTopicTypeResponder(resp *http.Response) (result EventSubscriptionsListResult, err error) { err = autorest.Respond( resp, azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // ListGlobalBySubscription list all aggregated global event subscriptions under a specific Azure subscription func (client EventSubscriptionsClient) ListGlobalBySubscription(ctx context.Context) (result EventSubscriptionsListResult, err error) { if tracing.IsEnabled() { ctx = tracing.StartSpan(ctx, fqdn+"/EventSubscriptionsClient.ListGlobalBySubscription") defer func() { sc := -1 if result.Response.Response != nil { sc = result.Response.Response.StatusCode } tracing.EndSpan(ctx, sc, err) }() } req, err := client.ListGlobalBySubscriptionPreparer(ctx) if err != nil { err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "ListGlobalBySubscription", nil, "Failure preparing request") return } resp, err := client.ListGlobalBySubscriptionSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "ListGlobalBySubscription", resp, "Failure sending request") return } result, err = client.ListGlobalBySubscriptionResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "ListGlobalBySubscription", resp, "Failure responding to request") return } return } // ListGlobalBySubscriptionPreparer prepares the ListGlobalBySubscription request. func (client EventSubscriptionsClient) ListGlobalBySubscriptionPreparer(ctx context.Context) (*http.Request, error) { pathParameters := map[string]interface{}{ "subscriptionId": autorest.Encode("path", client.SubscriptionID), } const APIVersion = "2018-05-01-preview" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.EventGrid/eventSubscriptions", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // ListGlobalBySubscriptionSender sends the ListGlobalBySubscription request. The method will close the // http.Response Body if it receives an error. func (client EventSubscriptionsClient) ListGlobalBySubscriptionSender(req *http.Request) (*http.Response, error) { return client.Send(req, azure.DoRetryWithRegistration(client.Client)) } // ListGlobalBySubscriptionResponder handles the response to the ListGlobalBySubscription request. The method always // closes the http.Response Body. func (client EventSubscriptionsClient) ListGlobalBySubscriptionResponder(resp *http.Response) (result EventSubscriptionsListResult, err error) { err = autorest.Respond( resp, azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // ListGlobalBySubscriptionForTopicType list all global event subscriptions under an Azure subscription for a topic // type. // Parameters: // topicTypeName - name of the topic type func (client EventSubscriptionsClient) ListGlobalBySubscriptionForTopicType(ctx context.Context, topicTypeName string) (result EventSubscriptionsListResult, err error) { if tracing.IsEnabled() { ctx = tracing.StartSpan(ctx, fqdn+"/EventSubscriptionsClient.ListGlobalBySubscriptionForTopicType") defer func() { sc := -1 if result.Response.Response != nil { sc = result.Response.Response.StatusCode } tracing.EndSpan(ctx, sc, err) }() } req, err := client.ListGlobalBySubscriptionForTopicTypePreparer(ctx, topicTypeName) if err != nil { err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "ListGlobalBySubscriptionForTopicType", nil, "Failure preparing request") return } resp, err := client.ListGlobalBySubscriptionForTopicTypeSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "ListGlobalBySubscriptionForTopicType", resp, "Failure sending request") return } result, err = client.ListGlobalBySubscriptionForTopicTypeResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "ListGlobalBySubscriptionForTopicType", resp, "Failure responding to request") return } return } // ListGlobalBySubscriptionForTopicTypePreparer prepares the ListGlobalBySubscriptionForTopicType request. func (client EventSubscriptionsClient) ListGlobalBySubscriptionForTopicTypePreparer(ctx context.Context, topicTypeName string) (*http.Request, error) { pathParameters := map[string]interface{}{ "subscriptionId": autorest.Encode("path", client.SubscriptionID), "topicTypeName": autorest.Encode("path", topicTypeName), } const APIVersion = "2018-05-01-preview" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.EventGrid/topicTypes/{topicTypeName}/eventSubscriptions", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // ListGlobalBySubscriptionForTopicTypeSender sends the ListGlobalBySubscriptionForTopicType request. The method will close the // http.Response Body if it receives an error. func (client EventSubscriptionsClient) ListGlobalBySubscriptionForTopicTypeSender(req *http.Request) (*http.Response, error) { return client.Send(req, azure.DoRetryWithRegistration(client.Client)) } // ListGlobalBySubscriptionForTopicTypeResponder handles the response to the ListGlobalBySubscriptionForTopicType request. The method always // closes the http.Response Body. func (client EventSubscriptionsClient) ListGlobalBySubscriptionForTopicTypeResponder(resp *http.Response) (result EventSubscriptionsListResult, err error) { err = autorest.Respond( resp, azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // ListRegionalByResourceGroup list all event subscriptions from the given location under a specific Azure subscription // and resource group // Parameters: // resourceGroupName - the name of the resource group within the user's subscription. // location - name of the location func (client EventSubscriptionsClient) ListRegionalByResourceGroup(ctx context.Context, resourceGroupName string, location string) (result EventSubscriptionsListResult, err error) { if tracing.IsEnabled() { ctx = tracing.StartSpan(ctx, fqdn+"/EventSubscriptionsClient.ListRegionalByResourceGroup") defer func() { sc := -1 if result.Response.Response != nil { sc = result.Response.Response.StatusCode } tracing.EndSpan(ctx, sc, err) }() } req, err := client.ListRegionalByResourceGroupPreparer(ctx, resourceGroupName, location) if err != nil { err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "ListRegionalByResourceGroup", nil, "Failure preparing request") return } resp, err := client.ListRegionalByResourceGroupSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "ListRegionalByResourceGroup", resp, "Failure sending request") return } result, err = client.ListRegionalByResourceGroupResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "ListRegionalByResourceGroup", resp, "Failure responding to request") return } return } // ListRegionalByResourceGroupPreparer prepares the ListRegionalByResourceGroup request. func (client EventSubscriptionsClient) ListRegionalByResourceGroupPreparer(ctx context.Context, resourceGroupName string, location string) (*http.Request, error) { pathParameters := map[string]interface{}{ "location": autorest.Encode("path", location), "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), } const APIVersion = "2018-05-01-preview" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventGrid/locations/{location}/eventSubscriptions", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // ListRegionalByResourceGroupSender sends the ListRegionalByResourceGroup request. The method will close the // http.Response Body if it receives an error. func (client EventSubscriptionsClient) ListRegionalByResourceGroupSender(req *http.Request) (*http.Response, error) { return client.Send(req, azure.DoRetryWithRegistration(client.Client)) } // ListRegionalByResourceGroupResponder handles the response to the ListRegionalByResourceGroup request. The method always // closes the http.Response Body. func (client EventSubscriptionsClient) ListRegionalByResourceGroupResponder(resp *http.Response) (result EventSubscriptionsListResult, err error) { err = autorest.Respond( resp, azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // ListRegionalByResourceGroupForTopicType list all event subscriptions from the given location under a specific Azure // subscription and resource group and topic type // Parameters: // resourceGroupName - the name of the resource group within the user's subscription. // location - name of the location // topicTypeName - name of the topic type func (client EventSubscriptionsClient) ListRegionalByResourceGroupForTopicType(ctx context.Context, resourceGroupName string, location string, topicTypeName string) (result EventSubscriptionsListResult, err error) { if tracing.IsEnabled() { ctx = tracing.StartSpan(ctx, fqdn+"/EventSubscriptionsClient.ListRegionalByResourceGroupForTopicType") defer func() { sc := -1 if result.Response.Response != nil { sc = result.Response.Response.StatusCode } tracing.EndSpan(ctx, sc, err) }() } req, err := client.ListRegionalByResourceGroupForTopicTypePreparer(ctx, resourceGroupName, location, topicTypeName) if err != nil { err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "ListRegionalByResourceGroupForTopicType", nil, "Failure preparing request") return } resp, err := client.ListRegionalByResourceGroupForTopicTypeSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "ListRegionalByResourceGroupForTopicType", resp, "Failure sending request") return } result, err = client.ListRegionalByResourceGroupForTopicTypeResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "ListRegionalByResourceGroupForTopicType", resp, "Failure responding to request") return } return } // ListRegionalByResourceGroupForTopicTypePreparer prepares the ListRegionalByResourceGroupForTopicType request. func (client EventSubscriptionsClient) ListRegionalByResourceGroupForTopicTypePreparer(ctx context.Context, resourceGroupName string, location string, topicTypeName string) (*http.Request, error) { pathParameters := map[string]interface{}{ "location": autorest.Encode("path", location), "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), "topicTypeName": autorest.Encode("path", topicTypeName), } const APIVersion = "2018-05-01-preview" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventGrid/locations/{location}/topicTypes/{topicTypeName}/eventSubscriptions", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // ListRegionalByResourceGroupForTopicTypeSender sends the ListRegionalByResourceGroupForTopicType request. The method will close the // http.Response Body if it receives an error. func (client EventSubscriptionsClient) ListRegionalByResourceGroupForTopicTypeSender(req *http.Request) (*http.Response, error) { return client.Send(req, azure.DoRetryWithRegistration(client.Client)) } // ListRegionalByResourceGroupForTopicTypeResponder handles the response to the ListRegionalByResourceGroupForTopicType request. The method always // closes the http.Response Body. func (client EventSubscriptionsClient) ListRegionalByResourceGroupForTopicTypeResponder(resp *http.Response) (result EventSubscriptionsListResult, err error) { err = autorest.Respond( resp, azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // ListRegionalBySubscription list all event subscriptions from the given location under a specific Azure subscription // Parameters: // location - name of the location func (client EventSubscriptionsClient) ListRegionalBySubscription(ctx context.Context, location string) (result EventSubscriptionsListResult, err error) { if tracing.IsEnabled() { ctx = tracing.StartSpan(ctx, fqdn+"/EventSubscriptionsClient.ListRegionalBySubscription") defer func() { sc := -1 if result.Response.Response != nil { sc = result.Response.Response.StatusCode } tracing.EndSpan(ctx, sc, err) }() } req, err := client.ListRegionalBySubscriptionPreparer(ctx, location) if err != nil { err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "ListRegionalBySubscription", nil, "Failure preparing request") return } resp, err := client.ListRegionalBySubscriptionSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "ListRegionalBySubscription", resp, "Failure sending request") return } result, err = client.ListRegionalBySubscriptionResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "ListRegionalBySubscription", resp, "Failure responding to request") return } return } // ListRegionalBySubscriptionPreparer prepares the ListRegionalBySubscription request. func (client EventSubscriptionsClient) ListRegionalBySubscriptionPreparer(ctx context.Context, location string) (*http.Request, error) { pathParameters := map[string]interface{}{ "location": autorest.Encode("path", location), "subscriptionId": autorest.Encode("path", client.SubscriptionID), } const APIVersion = "2018-05-01-preview" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.EventGrid/locations/{location}/eventSubscriptions", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // ListRegionalBySubscriptionSender sends the ListRegionalBySubscription request. The method will close the // http.Response Body if it receives an error. func (client EventSubscriptionsClient) ListRegionalBySubscriptionSender(req *http.Request) (*http.Response, error) { return client.Send(req, azure.DoRetryWithRegistration(client.Client)) } // ListRegionalBySubscriptionResponder handles the response to the ListRegionalBySubscription request. The method always // closes the http.Response Body. func (client EventSubscriptionsClient) ListRegionalBySubscriptionResponder(resp *http.Response) (result EventSubscriptionsListResult, err error) { err = autorest.Respond( resp, azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // ListRegionalBySubscriptionForTopicType list all event subscriptions from the given location under a specific Azure // subscription and topic type. // Parameters: // location - name of the location // topicTypeName - name of the topic type func (client EventSubscriptionsClient) ListRegionalBySubscriptionForTopicType(ctx context.Context, location string, topicTypeName string) (result EventSubscriptionsListResult, err error) { if tracing.IsEnabled() { ctx = tracing.StartSpan(ctx, fqdn+"/EventSubscriptionsClient.ListRegionalBySubscriptionForTopicType") defer func() { sc := -1 if result.Response.Response != nil { sc = result.Response.Response.StatusCode } tracing.EndSpan(ctx, sc, err) }() } req, err := client.ListRegionalBySubscriptionForTopicTypePreparer(ctx, location, topicTypeName) if err != nil { err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "ListRegionalBySubscriptionForTopicType", nil, "Failure preparing request") return } resp, err := client.ListRegionalBySubscriptionForTopicTypeSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "ListRegionalBySubscriptionForTopicType", resp, "Failure sending request") return } result, err = client.ListRegionalBySubscriptionForTopicTypeResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "ListRegionalBySubscriptionForTopicType", resp, "Failure responding to request") return } return } // ListRegionalBySubscriptionForTopicTypePreparer prepares the ListRegionalBySubscriptionForTopicType request. func (client EventSubscriptionsClient) ListRegionalBySubscriptionForTopicTypePreparer(ctx context.Context, location string, topicTypeName string) (*http.Request, error) { pathParameters := map[string]interface{}{ "location": autorest.Encode("path", location), "subscriptionId": autorest.Encode("path", client.SubscriptionID), "topicTypeName": autorest.Encode("path", topicTypeName), } const APIVersion = "2018-05-01-preview" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.EventGrid/locations/{location}/topicTypes/{topicTypeName}/eventSubscriptions", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // ListRegionalBySubscriptionForTopicTypeSender sends the ListRegionalBySubscriptionForTopicType request. The method will close the // http.Response Body if it receives an error. func (client EventSubscriptionsClient) ListRegionalBySubscriptionForTopicTypeSender(req *http.Request) (*http.Response, error) { return client.Send(req, azure.DoRetryWithRegistration(client.Client)) } // ListRegionalBySubscriptionForTopicTypeResponder handles the response to the ListRegionalBySubscriptionForTopicType request. The method always // closes the http.Response Body. func (client EventSubscriptionsClient) ListRegionalBySubscriptionForTopicTypeResponder(resp *http.Response) (result EventSubscriptionsListResult, err error) { err = autorest.Respond( resp, azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // Update asynchronously updates an existing event subscription. // Parameters: // scope - the scope of existing event subscription. The scope can be a subscription, or a resource group, or a // top level resource belonging to a resource provider namespace, or an EventGrid topic. For example, use // '/subscriptions/{subscriptionId}/' for a subscription, // '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}' for a resource group, and // '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}' // for a resource, and // '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventGrid/topics/{topicName}' // for an EventGrid topic. // eventSubscriptionName - name of the event subscription to be updated // eventSubscriptionUpdateParameters - updated event subscription information func (client EventSubscriptionsClient) Update(ctx context.Context, scope string, eventSubscriptionName string, eventSubscriptionUpdateParameters EventSubscriptionUpdateParameters) (result EventSubscriptionsUpdateFuture, err error) { if tracing.IsEnabled() { ctx = tracing.StartSpan(ctx, fqdn+"/EventSubscriptionsClient.Update") defer func() { sc := -1 if result.Response() != nil { sc = result.Response().StatusCode } tracing.EndSpan(ctx, sc, err) }() } req, err := client.UpdatePreparer(ctx, scope, eventSubscriptionName, eventSubscriptionUpdateParameters) if err != nil { err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "Update", nil, "Failure preparing request") return } result, err = client.UpdateSender(req) if err != nil { err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsClient", "Update", nil, "Failure sending request") return } return } // UpdatePreparer prepares the Update request. func (client EventSubscriptionsClient) UpdatePreparer(ctx context.Context, scope string, eventSubscriptionName string, eventSubscriptionUpdateParameters EventSubscriptionUpdateParameters) (*http.Request, error) { pathParameters := map[string]interface{}{ "eventSubscriptionName": autorest.Encode("path", eventSubscriptionName), "scope": scope, } const APIVersion = "2018-05-01-preview" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsContentType("application/json; charset=utf-8"), autorest.AsPatch(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/{scope}/providers/Microsoft.EventGrid/eventSubscriptions/{eventSubscriptionName}", pathParameters), autorest.WithJSON(eventSubscriptionUpdateParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // UpdateSender sends the Update request. The method will close the // http.Response Body if it receives an error. func (client EventSubscriptionsClient) UpdateSender(req *http.Request) (future EventSubscriptionsUpdateFuture, err error) { var resp *http.Response resp, err = client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) if err != nil { return } var azf azure.Future azf, err = azure.NewFutureFromResponse(resp) future.FutureAPI = &azf future.Result = func(client EventSubscriptionsClient) (es EventSubscription, err error) { var done bool done, err = future.DoneWithContext(context.Background(), client) if err != nil { err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsUpdateFuture", "Result", future.Response(), "Polling failure") return } if !done { err = azure.NewAsyncOpIncompleteError("eventgrid.EventSubscriptionsUpdateFuture") return } sender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) if es.Response.Response, err = future.GetResult(sender); err == nil && es.Response.Response.StatusCode != http.StatusNoContent { es, err = client.UpdateResponder(es.Response.Response) if err != nil { err = autorest.NewErrorWithError(err, "eventgrid.EventSubscriptionsUpdateFuture", "Result", es.Response.Response, "Failure responding to request") } } return } return } // UpdateResponder handles the response to the Update request. The method always // closes the http.Response Body. func (client EventSubscriptionsClient) UpdateResponder(resp *http.Response) (result EventSubscription, err error) { err = autorest.Respond( resp, azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return }
NewEventSubscriptionsClientWithBaseURI
create-api-complex-fixture.js
const path = require('path') const run = require('../utils/run') const assert = require('../utils/assert') const sameDirContent = require('../utils/sameDirContent') const f = require('../fixtures/constants') const excludeFilter = [ 'ElectrodeApiImpl.xcodeproj', 'project.pbxproj', 'package.json', '.DS_Store', 'index.android.bundle', 'index.android.bundle.meta', 'yarn.lock', 'README.md', 'WalmartItemApi.spec.js', 'SysteTestEventApi.spec.js', 'SystemTestsApi.spec.js', '.yarn-integrity' ].map(s => `**/${s}`).join(',')
const fixtureApiPath = f.pathToComplexApiFixture const generatedApiPath = path.join(process.cwd(), f.complexApiPkgName) assert(sameDirContent(fixtureApiPath, generatedApiPath, {excludeFilter}), 'Generated API differ from reference fixture !')
run(`create-api ${f.complexApiName} -p ${f.complexApiPkgName} --schemaPath ${f.pathToComplexApiSchema} --skipNpmCheck`)
coordinate_converter.py
import requests from abc import ABC, abstractmethod from typing import Tuple, List import json class CoordinateConverter(ABC): def __init__(self): super().__init__() @abstractmethod def convert_coordinate(self, coordinate: Tuple, base_system_code, target_system_code): pass @abstractmethod def convert_multiple_coordinates(self, coordinates: List[Tuple], base_system_code, target_system_code): pass class EpsgCoordinateConverter(CoordinateConverter):
def __init__(self): super().__init__() self.base_url = 'http://epsg.io/trans?' def convert_coordinate(self, coordinate: Tuple, base_system_code: str, target_system_code: str): """ :param coordinate: tuple of 2 or 3 coordinate :param base_system_code: source system code in epsg in string format (ESPG:3879 -> 3879) :param target_system_code: target system code :return: Converted coordinates """ if len(coordinate) < 2 or len(coordinate) > 3: raise ValueError('Coordinate must be a tuple contains (x, y) or (x, y, z) coordinates') if len(coordinate) == 2: query = f"x={coordinate[0]}&y={coordinate[1]}" else: query = f"x={coordinate[0]}&y={coordinate[1]}&z={coordinate[2]}" query += f"&s_srs={base_system_code}&t_srs={target_system_code}" r = requests.get(self.base_url + query) r.raise_for_status() result_as_json = json.loads(r.content.decode('latin1')) return result_as_json['x'], result_as_json['y'] def convert_multiple_coordinates(self, coordinates: List[Tuple], base_system_code, target_system_code): """ :param coordinates: list of tuple of 2 or 3 coordinate :param base_system_code: source system code in epsg in string format (ESPG:3879 -> 3879) :param target_system_code: target system code :return: List of converted coordinates """ if len(coordinates[0]) < 2 or len(coordinates[0]) > 3: raise ValueError('Coordinates must be a list of tuple contains (x, y) or (x, y, z) coordinates') query = 'data=' for idx, coor in enumerate(coordinates): query += ','.join([str(c) for c in coor]) if idx != len(coor) - 1: query += ';' query += f"&s_srs={base_system_code}&t_srs={target_system_code}" r = requests.get(self.base_url + query) r.raise_for_status() result_as_json = json.loads(r.content.decode('latin1')) if len(coordinates[0]) == 2: results = [(t['x'], t['y']) for t in result_as_json] else: results = [(t['x'], t['y'], t['z']) for t in result_as_json] return results
make-test.py
# -*- coding: utf-8 -*- import os import sys import random sourceDir = '/data/deresute-face' trFile = 'train.txt' teFile = 'test.txt' mapFile = 'config/classes.py' if len(sys.argv) != 3: print ("usage %s trainNum testNum" % (sys.argv[0])) exit() datanum = int(sys.argv[1]) testnum = int(sys.argv[2]) def listClass(dir): ret = [] for file in os.listdir(dir): if(file == "." or file == ".."): # -1 は 全部 0 のラベルにしたかった。 # if(file == "." or file == ".." or file == "-1"): continue; ret.append(file) return ret def find(dir, dirs): ret = dirs for file in os.listdir(dir): realfile = os.path.join("%s","%s")%(dir,file) if (os.path.isdir(realfile)): ret = find(realfile, ret) else: ret.append(realfile) return ret def ref(dict, key, default):
y: return dict[key] except: return default def addDict(dict, key): try: dict[key] += 1 except: dict[key] = 1 dirs = listClass(sourceDir) def getId(className): return dirs.index(className) images = find(sourceDir, []) random.shuffle(images); fp = open(mapFile, "w") fp.write("classList = {}\n") i = 0 for className in dirs: fp.write("classList[%d] = \"%s\"\n"% (i, className)) i += 1 fp.close() teFp = open(teFile, "w") trFp = open(trFile, "w") limits = {}; limits2 = {}; for image in images: className = os.path.basename(os.path.dirname(image)) isTest = False if ref(limits2, className, 0) >= testnum: continue elif ref(limits, className, 0) >= datanum: addDict(limits2, className) isTest = True else: addDict(limits, className) # if className == "-1": # continue id = getId(className); if isTest: teFp.write("%s,%d\n" % (image, id)); else: trFp.write("%s,%d\n" % (image, id)); trFp.close() teFp.close()
tr
index.ts
export * from "./response-content-utils"; export * from "./request-expectation"; export * from "./mock-api-router"; export * from "./validation-error"; import { MockApiRouter } from "./mock-api-router";
export const app = new MockApiRouter();
main.py
import module1
compile-site.ts
import webpack from 'webpack'; import WebpackDevServer from 'webpack-dev-server'; import { get } from 'lodash'; import { getPortPromise } from 'portfinder'; import { getSiteDevConfig } from '../config/webpack.site.dev'; import { getSitePrdConfig } from '../config/webpack.site.prd'; async function runDevServer( port: number, config: ReturnType<typeof getSiteDevConfig> ) { const host = get(config.devServer, 'host', 'localhost'); const server = new WebpackDevServer( { ...config.devServer, port, host, }, webpack(config) ); await server.start(); } async function
() { const config = getSiteDevConfig(); const port = await getPortPromise({ port: config.devServer.port, }); await runDevServer(port, config); } function build() { return new Promise<void>((resolve, reject) => { const config = getSitePrdConfig(); webpack(config, (err, stats) => { if (err || (stats && stats.hasErrors())) { reject(); } else { resolve(); } }); }); } export async function compileSite(production = false) { if (production) { await build(); } else { await watch(); } }
watch
register.go
/* Copyright 2018 BlackRock, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Code generated by client-gen. DO NOT EDIT. package scheme import ( argoprojv1alpha1 "github.com/argoproj/argo-events/pkg/apis/sensor/v1alpha1" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" runtime "k8s.io/apimachinery/pkg/runtime" schema "k8s.io/apimachinery/pkg/runtime/schema" serializer "k8s.io/apimachinery/pkg/runtime/serializer" utilruntime "k8s.io/apimachinery/pkg/util/runtime" ) var Scheme = runtime.NewScheme() var Codecs = serializer.NewCodecFactory(Scheme) var ParameterCodec = runtime.NewParameterCodec(Scheme) var localSchemeBuilder = runtime.SchemeBuilder{ argoprojv1alpha1.AddToScheme, } // AddToScheme adds all types of this clientset into the given scheme. This allows composition // of clientsets, like in: // // import ( // "k8s.io/client-go/kubernetes" // clientsetscheme "k8s.io/client-go/kubernetes/scheme" // aggregatorclientsetscheme "k8s.io/kube-aggregator/pkg/client/clientset_generated/clientset/scheme" // ) // // kclientset, _ := kubernetes.NewForConfig(c) // _ = aggregatorclientsetscheme.AddToScheme(clientsetscheme.Scheme) // // After this, RawExtensions in Kubernetes types will serialize kube-aggregator types // correctly. var AddToScheme = localSchemeBuilder.AddToScheme func
() { v1.AddToGroupVersion(Scheme, schema.GroupVersion{Version: "v1"}) utilruntime.Must(AddToScheme(Scheme)) }
init
vspherecluster_types.go
/* Copyright 2019 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package v1alpha3 import ( corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "sigs.k8s.io/cluster-api-provider-vsphere/api/v1alpha3/cloudprovider" ) const ( // ClusterFinalizer allows ReconcileVSphereCluster to clean up vSphere // resources associated with VSphereCluster before removing it from the // API server. ClusterFinalizer = "vspherecluster.infrastructure.cluster.x-k8s.io" ) // VSphereClusterSpec defines the desired state of VSphereCluster type VSphereClusterSpec struct { // Server is the address of the vSphere endpoint. Server string `json:"server,omitempty"` // Insecure is a flag that controls whether or not to validate the // vSphere server's certificate. // +optional Insecure *bool `json:"insecure,omitempty"` // CloudProviderConfiguration holds the cluster-wide configuration for the // vSphere cloud provider. CloudProviderConfiguration cloudprovider.Config `json:"cloudProviderConfiguration,omitempty"` // ControlPlaneEndpoint represents the endpoint used to communicate with the control plane. // +optional ControlPlaneEndpoint APIEndpoint `json:"controlPlaneEndpoint"` // LoadBalancerRef may be used to enable a control plane load balancer // for this cluster. // When a LoadBalancerRef is provided, the VSphereCluster.Status.Ready field // will not be true until the referenced resource is Status.Ready and has a // non-empty Status.Address value. // +optional LoadBalancerRef *corev1.ObjectReference `json:"loadBalancerRef,omitempty"` } // VSphereClusterStatus defines the observed state of VSphereClusterSpec type VSphereClusterStatus struct { Ready bool `json:"ready"` } // +kubebuilder:object:root=true // +kubebuilder:resource:path=vsphereclusters,scope=Namespaced,categories=cluster-api // +kubebuilder:subresource:status // VSphereCluster is the Schema for the vsphereclusters API type VSphereCluster struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata,omitempty"` Spec VSphereClusterSpec `json:"spec,omitempty"` Status VSphereClusterStatus `json:"status,omitempty"` } // +kubebuilder:object:root=true // VSphereClusterList contains a list of VSphereCluster type VSphereClusterList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty"` Items []VSphereCluster `json:"items"` } func init()
{ SchemeBuilder.Register(&VSphereCluster{}, &VSphereClusterList{}) }
index.js
'use strict'; Object.defineProperty(exports, '__esModule', { value: true }); function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } var AggregateError = _interopDefault(require('aggregate-error')); var webhooksMethods = require('@octokit/webhooks-methods'); function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) { symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); } keys.push.apply(keys, symbols); } return keys; } function _objectSpread2(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; } function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } const createLogger = logger => _objectSpread2({ debug: () => {}, info: () => {}, warn: console.warn.bind(console), error: console.error.bind(console) }, logger); // THIS FILE IS GENERATED - DO NOT EDIT DIRECTLY // make edits in scripts/generate-types.ts const emitterEventNames = ["branch_protection_rule", "branch_protection_rule.created", "branch_protection_rule.deleted", "branch_protection_rule.edited", "check_run", "check_run.completed", "check_run.created", "check_run.requested_action", "check_run.rerequested", "check_suite", "check_suite.completed", "check_suite.requested", "check_suite.rerequested", "code_scanning_alert", "code_scanning_alert.appeared_in_branch", "code_scanning_alert.closed_by_user", "code_scanning_alert.created", "code_scanning_alert.fixed", "code_scanning_alert.reopened", "code_scanning_alert.reopened_by_user", "commit_comment", "commit_comment.created", "content_reference", "content_reference.created", "create", "delete", "deploy_key", "deploy_key.created", "deploy_key.deleted", "deployment", "deployment.created", "deployment_status", "deployment_status.created", "discussion", "discussion.answered", "discussion.category_changed", "discussion.created", "discussion.deleted", "discussion.edited", "discussion.labeled", "discussion.locked", "discussion.pinned", "discussion.transferred", "discussion.unanswered", "discussion.unlabeled", "discussion.unlocked", "discussion.unpinned", "discussion_comment", "discussion_comment.created", "discussion_comment.deleted", "discussion_comment.edited", "fork", "github_app_authorization", "github_app_authorization.revoked", "gollum", "installation", "installation.created", "installation.deleted", "installation.new_permissions_accepted", "installation.suspend", "installation.unsuspend", "installation_repositories", "installation_repositories.added", "installation_repositories.removed", "issue_comment", "issue_comment.created", "issue_comment.deleted", "issue_comment.edited", "issues", "issues.assigned", "issues.closed", "issues.deleted", "issues.demilestoned", "issues.edited", "issues.labeled", "issues.locked", "issues.milestoned", "issues.opened", "issues.pinned", "issues.reopened", "issues.transferred", "issues.unassigned", "issues.unlabeled", "issues.unlocked", "issues.unpinned", "label", "label.created", "label.deleted", "label.edited", "marketplace_purchase", "marketplace_purchase.cancelled", "marketplace_purchase.changed", "marketplace_purchase.pending_change", "marketplace_purchase.pending_change_cancelled", "marketplace_purchase.purchased", "member", "member.added", "member.edited", "member.removed", "membership", "membership.added", "membership.removed", "meta", "meta.deleted", "milestone", "milestone.closed", "milestone.created", "milestone.deleted", "milestone.edited", "milestone.opened", "org_block", "org_block.blocked", "org_block.unblocked", "organization", "organization.deleted", "organization.member_added", "organization.member_invited", "organization.member_removed", "organization.renamed", "package", "package.published", "package.updated", "page_build", "ping", "project", "project.closed", "project.created", "project.deleted", "project.edited", "project.reopened", "project_card", "project_card.converted", "project_card.created", "project_card.deleted", "project_card.edited", "project_card.moved", "project_column", "project_column.created", "project_column.deleted", "project_column.edited", "project_column.moved", "public", "pull_request", "pull_request.assigned", "pull_request.auto_merge_disabled", "pull_request.auto_merge_enabled", "pull_request.closed", "pull_request.converted_to_draft", "pull_request.edited", "pull_request.labeled", "pull_request.locked", "pull_request.opened", "pull_request.ready_for_review", "pull_request.reopened", "pull_request.review_request_removed", "pull_request.review_requested", "pull_request.synchronize", "pull_request.unassigned", "pull_request.unlabeled", "pull_request.unlocked", "pull_request_review", "pull_request_review.dismissed", "pull_request_review.edited", "pull_request_review.submitted", "pull_request_review_comment", "pull_request_review_comment.created", "pull_request_review_comment.deleted", "pull_request_review_comment.edited", "pull_request_review_thread", "pull_request_review_thread.resolved", "pull_request_review_thread.unresolved", "push", "release", "release.created", "release.deleted", "release.edited", "release.prereleased", "release.published", "release.released", "release.unpublished", "repository", "repository.archived", "repository.created", "repository.deleted", "repository.edited", "repository.privatized", "repository.publicized", "repository.renamed", "repository.transferred", "repository.unarchived", "repository_dispatch", "repository_import", "repository_vulnerability_alert", "repository_vulnerability_alert.create", "repository_vulnerability_alert.dismiss", "repository_vulnerability_alert.resolve", "secret_scanning_alert", "secret_scanning_alert.created", "secret_scanning_alert.reopened", "secret_scanning_alert.resolved", "security_advisory", "security_advisory.performed", "security_advisory.published", "security_advisory.updated", "security_advisory.withdrawn", "sponsorship", "sponsorship.cancelled", "sponsorship.created", "sponsorship.edited", "sponsorship.pending_cancellation", "sponsorship.pending_tier_change", "sponsorship.tier_changed", "star", "star.created", "star.deleted", "status", "team", "team.added_to_repository", "team.created", "team.deleted", "team.edited", "team.removed_from_repository", "team_add", "watch", "watch.started", "workflow_dispatch", "workflow_job", "workflow_job.completed", "workflow_job.in_progress", "workflow_job.queued", "workflow_job.started", "workflow_run", "workflow_run.completed", "workflow_run.requested"]; function handleEventHandlers(state, webhookName, handler) { if (!state.hooks[webhookName]) { state.hooks[webhookName] = []; } state.hooks[webhookName].push(handler); } function receiverOn(state, webhookNameOrNames, handler) { if (Array.isArray(webhookNameOrNames)) { webhookNameOrNames.forEach(webhookName => receiverOn(state, webhookName, handler)); return; } if (["*", "error"].includes(webhookNameOrNames)) { const webhookName = webhookNameOrNames === "*" ? "any" : webhookNameOrNames; const message = `Using the "${webhookNameOrNames}" event with the regular Webhooks.on() function is not supported. Please use the Webhooks.on${webhookName.charAt(0).toUpperCase() + webhookName.slice(1)}() method instead`; throw new Error(message); } if (!emitterEventNames.includes(webhookNameOrNames)) { state.log.warn(`"${webhookNameOrNames}" is not a known webhook name (https://developer.github.com/v3/activity/events/types/)`); } handleEventHandlers(state, webhookNameOrNames, handler); } function receiverOnAny(state, handler) { handleEventHandlers(state, "*", handler); } function receiverOnError(state, handler) { handleEventHandlers(state, "error", handler); } // Errors thrown or rejected Promises in "error" event handlers are not handled // as they are in the webhook event handlers. If errors occur, we log a // "Fatal: Error occurred" message to stdout function wrapErrorHandler(handler, error) { let returnValue; try { returnValue = handler(error); } catch (error) { console.log('FATAL: Error occurred in "error" event handler'); console.log(error); } if (returnValue && returnValue.catch) { returnValue.catch(error => { console.log('FATAL: Error occurred in "error" event handler'); console.log(error); }); } } // @ts-ignore to address #245 function getHooks(state, eventPayloadAction, eventName) { const hooks = [state.hooks[eventName], state.hooks["*"]]; if (eventPayloadAction) { hooks.unshift(state.hooks[`${eventName}.${eventPayloadAction}`]); } return [].concat(...hooks.filter(Boolean)); } // main handler function function receiverHandle(state, event) { const errorHandlers = state.hooks.error || []; if (event instanceof Error) { const error = Object.assign(new AggregateError([event]), { event, errors: [event] }); errorHandlers.forEach(handler => wrapErrorHandler(handler, error)); return Promise.reject(error); } if (!event || !event.name) { throw new AggregateError(["Event name not passed"]); } if (!event.payload) { throw new AggregateError(["Event payload not passed"]); } // flatten arrays of event listeners and remove undefined values const hooks = getHooks(state, "action" in event.payload ? event.payload.action : null, event.name); if (hooks.length === 0) { return Promise.resolve(); } const errors = []; const promises = hooks.map(handler => { let promise = Promise.resolve(event); if (state.transform) { promise = promise.then(state.transform); } return promise.then(event => { return handler(event); }).catch(error => errors.push(Object.assign(error, { event }))); }); return Promise.all(promises).then(() => { if (errors.length === 0) { return; } const error = new AggregateError(errors); Object.assign(error, { event, errors }); errorHandlers.forEach(handler => wrapErrorHandler(handler, error)); throw error; }); } function removeListener(state, webhookNameOrNames, handler) { if (Array.isArray(webhookNameOrNames)) { webhookNameOrNames.forEach(webhookName => removeListener(state, webhookName, handler)); return; } if (!state.hooks[webhookNameOrNames]) { return; } // remove last hook that has been added, that way // it behaves the same as removeListener for (let i = state.hooks[webhookNameOrNames].length - 1; i >= 0; i--) { if (state.hooks[webhookNameOrNames][i] === handler) { state.hooks[webhookNameOrNames].splice(i, 1); return; } } } function createEventHandler(options) { const state = { hooks: {}, log: createLogger(options && options.log) }; if (options && options.transform) { state.transform = options.transform; } return { on: receiverOn.bind(null, state), onAny: receiverOnAny.bind(null, state), onError: receiverOnError.bind(null, state), removeListener: removeListener.bind(null, state), receive: receiverHandle.bind(null, state) }; } /** * GitHub sends its JSON with an indentation of 2 spaces and a line break at the end */ function toNormalizedJsonString(payload) { const payloadString = JSON.stringify(payload); return payloadString.replace(/[^\\]\\u[\da-f]{4}/g, s => { return s.substr(0, 3) + s.substr(3).toUpperCase(); }); } async function sign(secret, payload) { return webhooksMethods.sign(secret, typeof payload === "string" ? payload : toNormalizedJsonString(payload)); } async function
(secret, payload, signature) { return webhooksMethods.verify(secret, typeof payload === "string" ? payload : toNormalizedJsonString(payload), signature); } async function verifyAndReceive(state, event) { // verify will validate that the secret is not undefined const matchesSignature = await webhooksMethods.verify(state.secret, typeof event.payload === "object" ? toNormalizedJsonString(event.payload) : event.payload, event.signature); if (!matchesSignature) { const error = new Error("[@octokit/webhooks] signature does not match event payload and secret"); return state.eventHandler.receive(Object.assign(error, { event, status: 400 })); } return state.eventHandler.receive({ id: event.id, name: event.name, payload: typeof event.payload === "string" ? JSON.parse(event.payload) : event.payload }); } const WEBHOOK_HEADERS = ["x-github-event", "x-hub-signature-256", "x-github-delivery"]; // https://docs.github.com/en/developers/webhooks-and-events/webhook-events-and-payloads#delivery-headers function getMissingHeaders(request) { return WEBHOOK_HEADERS.filter(header => !(header in request.headers)); } // @ts-ignore to address #245 function getPayload(request) { // If request.body already exists we can stop here // See https://github.com/octokit/webhooks.js/pull/23 if (request.body) return Promise.resolve(request.body); return new Promise((resolve, reject) => { let data = ""; request.setEncoding("utf8"); // istanbul ignore next request.on("error", error => reject(new AggregateError([error]))); request.on("data", chunk => data += chunk); request.on("end", () => { try { resolve(JSON.parse(data)); } catch (error) { error.message = "Invalid JSON"; error.status = 400; reject(new AggregateError([error])); } }); }); } async function middleware(webhooks, options, request, response, next) { let pathname; try { pathname = new URL(request.url, "http://localhost").pathname; } catch (error) { response.writeHead(422, { "content-type": "application/json" }); response.end(JSON.stringify({ error: `Request URL could not be parsed: ${request.url}` })); return; } const isUnknownRoute = request.method !== "POST" || pathname !== options.path; const isExpressMiddleware = typeof next === "function"; if (isUnknownRoute) { if (isExpressMiddleware) { return next(); } else { return options.onUnhandledRequest(request, response); } } const missingHeaders = getMissingHeaders(request).join(", "); if (missingHeaders) { response.writeHead(400, { "content-type": "application/json" }); response.end(JSON.stringify({ error: `Required headers missing: ${missingHeaders}` })); return; } const eventName = request.headers["x-github-event"]; const signatureSHA256 = request.headers["x-hub-signature-256"]; const id = request.headers["x-github-delivery"]; options.log.debug(`${eventName} event received (id: ${id})`); // GitHub will abort the request if it does not receive a response within 10s // See https://github.com/octokit/webhooks.js/issues/185 let didTimeout = false; const timeout = setTimeout(() => { didTimeout = true; response.statusCode = 202; response.end("still processing\n"); }, 9000).unref(); try { const payload = await getPayload(request); await webhooks.verifyAndReceive({ id: id, name: eventName, payload: payload, signature: signatureSHA256 }); clearTimeout(timeout); if (didTimeout) return; response.end("ok\n"); } catch (error) { clearTimeout(timeout); if (didTimeout) return; const statusCode = Array.from(error)[0].status; response.statusCode = typeof statusCode !== "undefined" ? statusCode : 500; response.end(String(error)); } } function onUnhandledRequestDefault(request, response) { response.writeHead(404, { "content-type": "application/json" }); response.end(JSON.stringify({ error: `Unknown route: ${request.method} ${request.url}` })); } function createNodeMiddleware(webhooks, { path = "/api/github/webhooks", onUnhandledRequest = onUnhandledRequestDefault, log = createLogger() } = {}) { return middleware.bind(null, webhooks, { path, onUnhandledRequest, log }); } class Webhooks { constructor(options) { if (!options || !options.secret) { throw new Error("[@octokit/webhooks] options.secret required"); } const state = { eventHandler: createEventHandler(options), secret: options.secret, hooks: {}, log: createLogger(options.log) }; this.sign = sign.bind(null, options.secret); this.verify = verify.bind(null, options.secret); this.on = state.eventHandler.on; this.onAny = state.eventHandler.onAny; this.onError = state.eventHandler.onError; this.removeListener = state.eventHandler.removeListener; this.receive = state.eventHandler.receive; this.verifyAndReceive = verifyAndReceive.bind(null, state); } } exports.Webhooks = Webhooks; exports.createEventHandler = createEventHandler; exports.createNodeMiddleware = createNodeMiddleware; exports.emitterEventNames = emitterEventNames; //# sourceMappingURL=index.js.map
verify
tab.service.ts
export let tabbableSelector = "a[href], area[href], input:not([disabled]):not([tabindex=\'-1\']), " + "button:not([disabled]):not([tabindex=\'-1\']),select:not([disabled]):not([tabindex=\'-1\']), " + "textarea:not([disabled]):not([tabindex=\'-1\']), " + "iframe, object, embed, *[tabindex]:not([tabindex=\'-1\']), *[contenteditable=true]"; export function getFocusElementList(element) { let elements = element.querySelectorAll(tabbableSelector); return elements ? Array.prototype.filter.call(elements, el => isVisible(el)) : elements; } export function isFocusInFirstItem(event, list) { if (list.length > 0) { return (event.target || event.srcElement) === list[0]; } return false; } export function isFocusInLastItem(event, list) { if (list.length > 0) { return (event.target || event.srcElement) === list[list.length - 1]; } return false; } export function isElementFocused(event, element) { return (event.target || event.srcElement) === element; } export function focusFirstFocusableElement(list) { if (list.length > 0) { list[0].focus(); return true; } return false; } export function focusLastFocusableElement(list) { if (list.length > 0) { list[list.length - 1].focus(); return true; } return false; } export function isVisible(element) { return !!(element.offsetWidth || element.offsetHeight || element.getClientRects().length); } export function cycleTabs(event, element) { if (event.key === "Tab") { let list = getFocusElementList(element); let focusChanged = false; if (event.shiftKey) { if (isFocusInFirstItem(event, list) || isElementFocused(event, element)) { focusChanged = focusLastFocusableElement(list); } } else { if (isFocusInLastItem(event, list)) { focusChanged = focusFirstFocusableElement(list); } } if (focusChanged) {
} } }
event.preventDefault(); event.stopPropagation();
usbscsi.py
#!/usr/bin/env python3 import argparse from Library.usblib import * def
(): info='MassStorageBackdoor (c) B.Kerler 2019.' parser = argparse.ArgumentParser(description=info) print("\n"+info+"\n\n") parser.add_argument('-vid',metavar="<vid>",help='[Option] Specify vid, default=0x2e04)', default="0x2e04") parser.add_argument('-pid',metavar="<pid>", help='[Option] Specify pid, default=0xc025)', default="0xc025") parser.add_argument('-interface', metavar="<pid>", help='[Option] Specify interface number)', default="") parser.add_argument('-nokia', help='[Option] Enable Nokia adb backdoor', action='store_true') args = parser.parse_args() if args.vid!="": vid=int(args.vid,16) if args.pid!="": pid=int(args.pid,16) if args.interface!="": interface=int(args.interface,16) else: interface=-1 usbscsi = scsi(vid, pid, interface) if (usbscsi.connect()): if args.nokia: usbscsi.send_fih_adbenable() usbscsi.send_fih_root() else: print("A command is required. Use -h to see options.") exit(0) usbscsi.close() if __name__ == '__main__': main()
main
identifier_completer_test.py
# Copyright (C) 2020 ycmd contributors # # This file is part of ycmd. # # ycmd is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # ycmd is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with ycmd. If not, see <http://www.gnu.org/licenses/>. import os from hamcrest import assert_that, empty, equal_to, contains_exactly from ycmd.user_options_store import DefaultOptions from ycmd.completers.all import identifier_completer as ic from ycmd.completers.all.identifier_completer import IdentifierCompleter from ycmd.request_wrap import RequestWrap from ycmd.tests import PathToTestFile from ycmd.tests.test_utils import BuildRequest def BuildRequestWrap( contents, column_num, line_num = 1 ): return RequestWrap( BuildRequest( column_num = column_num, line_num = line_num, contents = contents ) ) def GetCursorIdentifier_StartOfLine_test(): assert_that( 'foo', equal_to( ic._GetCursorIdentifier( False, BuildRequestWrap( 'foo', 1 ) ) ) ) assert_that( 'fooBar', equal_to( ic._GetCursorIdentifier( False, BuildRequestWrap( 'fooBar', 1 ) ) ) ) def GetCursorIdentifier_EndOfLine_test(): assert_that( 'foo', equal_to( ic._GetCursorIdentifier( False, BuildRequestWrap( 'foo', 3 ) ) ) ) def GetCursorIdentifier_PastEndOfLine_test(): assert_that( '', equal_to( ic._GetCursorIdentifier( False, BuildRequestWrap( 'foo', 11 ) ) ) ) def GetCursorIdentifier_NegativeColumn_test(): assert_that( 'foo', equal_to( ic._GetCursorIdentifier( False, BuildRequestWrap( 'foo', -10 ) ) ) ) def GetCursorIdentifier_StartOfLine_StopsAtNonIdentifierChar_test(): assert_that( 'foo', equal_to( ic._GetCursorIdentifier( False, BuildRequestWrap( 'foo(goo)', 1 ) ) ) ) def GetCursorIdentifier_AtNonIdentifier_test(): assert_that( 'goo', equal_to( ic._GetCursorIdentifier( False, BuildRequestWrap( 'foo(goo)', 4 ) ) ) ) def GetCursorIdentifier_WalksForwardForIdentifier_test(): assert_that( 'foo', equal_to( ic._GetCursorIdentifier( False, BuildRequestWrap( ' foo', 1 ) ) ) ) def GetCursorIdentifier_FindsNothingForward_test(): assert_that( '', equal_to( ic._GetCursorIdentifier( False, BuildRequestWrap( 'foo ()***()', 5 ) ) ) ) def GetCursorIdentifier_SingleCharIdentifier_test(): assert_that( 'f', equal_to( ic._GetCursorIdentifier( False, BuildRequestWrap( ' f ', 1 ) ) ) ) def GetCursorIdentifier_StartsInMiddleOfIdentifier_test(): assert_that( 'foobar', equal_to( ic._GetCursorIdentifier( False, BuildRequestWrap( 'foobar', 4 ) ) ) ) def GetCursorIdentifier_LineEmpty_test(): assert_that( '', equal_to( ic._GetCursorIdentifier( False, BuildRequestWrap( '', 12 ) ) ) ) def GetCursorIdentifier_IgnoreIdentifierFromCommentsAndStrings_test(): assert_that( '', equal_to( ic._GetCursorIdentifier( False, BuildRequestWrap( '"foobar"', 4 ) ) ) ) assert_that( '', equal_to( ic._GetCursorIdentifier( False, BuildRequestWrap( '/*\n' ' * foobar\n' ' */', 5, 2 ) ) ) ) def GetCursorIdentifier_CollectIdentifierFromCommentsAndStrings_test(): assert_that( 'foobar', equal_to( ic._GetCursorIdentifier( True, BuildRequestWrap( '"foobar"', 4 ) ) ) ) assert_that( 'foobar', equal_to( ic._GetCursorIdentifier( True, BuildRequestWrap( '/*\n' ' * foobar\n' ' */', 5, 2 ) ) ) ) def PreviousIdentifier_Simple_test(): assert_that( 'foo', equal_to( ic._PreviousIdentifier( 2, False, BuildRequestWrap( 'foo', 4 ) ) ) ) def PreviousIdentifier_WholeIdentShouldBeBeforeColumn_test(): assert_that( '', equal_to( ic._PreviousIdentifier( 2, False, BuildRequestWrap( 'foobar', column_num = 4 ) ) ) ) def PreviousIdentifier_DoNotWrap_test(): assert_that( '', equal_to( ic._PreviousIdentifier( 2, False, BuildRequestWrap( 'foobar\n bar', column_num = 4 ) ) ) ) def PreviousIdentifier_IgnoreForwardIdents_test(): assert_that( 'foo', equal_to( ic._PreviousIdentifier( 2, False, BuildRequestWrap( 'foo bar zoo', 4 ) ) ) ) def PreviousIdentifier_IgnoreTooSmallIdent_test(): assert_that( '', equal_to( ic._PreviousIdentifier( 4, False, BuildRequestWrap( 'foo', 4 ) ) ) ) def PreviousIdentifier_IgnoreTooSmallIdent_DontContinueLooking_test(): assert_that( '', equal_to( ic._PreviousIdentifier( 4, False, BuildRequestWrap( 'abcde foo', 10 ) ) ) ) def
(): assert_that( 'foo', equal_to( ic._PreviousIdentifier( 2, False, BuildRequestWrap( 'foo ', 6 ) ) ) ) def PreviousIdentifier_JunkAfterIdent_test(): assert_that( 'foo', equal_to( ic._PreviousIdentifier( 2, False, BuildRequestWrap( 'foo ;;()** ', 13 ) ) ) ) def PreviousIdentifier_IdentInMiddleOfJunk_test(): assert_that( 'aa', equal_to( ic._PreviousIdentifier( 2, False, BuildRequestWrap( 'foo ;;(aa)** ', 13 ) ) ) ) def PreviousIdentifier_IdentOnPreviousLine_test(): assert_that( 'foo', equal_to( ic._PreviousIdentifier( 2, False, BuildRequestWrap( 'foo\n ', column_num = 3, line_num = 2 ) ) ) ) assert_that( 'foo', equal_to( ic._PreviousIdentifier( 2, False, BuildRequestWrap( 'foo\n', column_num = 1, line_num = 2 ) ) ) ) def PreviousIdentifier_IdentOnPreviousLine_JunkAfterIdent_test(): assert_that( 'foo', equal_to( ic._PreviousIdentifier( 2, False, BuildRequestWrap( 'foo **;()\n ', column_num = 3, line_num = 2 ) ) ) ) def PreviousIdentifier_NoGoodIdentFound_test(): assert_that( '', equal_to( ic._PreviousIdentifier( 5, False, BuildRequestWrap( 'foo\n ', column_num = 2, line_num = 2 ) ) ) ) def PreviousIdentifier_IgnoreIdentifierFromCommentsAndStrings_test(): assert_that( '', equal_to( ic._PreviousIdentifier( 2, False, BuildRequestWrap( '"foo"\n', column_num = 1, line_num = 2 ) ) ) ) assert_that( '', equal_to( ic._PreviousIdentifier( 2, False, BuildRequestWrap( '/*\n' ' * foo\n' ' */', column_num = 2, line_num = 3 ) ) ) ) def PreviousIdentifier_CollectIdentifierFromCommentsAndStrings_test(): assert_that( 'foo', equal_to( ic._PreviousIdentifier( 2, True, BuildRequestWrap( '"foo"\n', column_num = 1, line_num = 2 ) ) ) ) assert_that( 'foo', equal_to( ic._PreviousIdentifier( 2, True, BuildRequestWrap( '/*\n' ' * foo\n' ' */', column_num = 2, line_num = 3 ) ) ) ) def FilterUnchangedTagFiles_NoFiles_test(): ident_completer = IdentifierCompleter( DefaultOptions() ) assert_that( list( ident_completer._FilterUnchangedTagFiles( [] ) ), empty() ) def FilterUnchangedTagFiles_SkipBadFiles_test(): ident_completer = IdentifierCompleter( DefaultOptions() ) assert_that( list( ident_completer._FilterUnchangedTagFiles( [ '/some/tags' ] ) ), empty() ) def FilterUnchangedTagFiles_KeepGoodFiles_test(): ident_completer = IdentifierCompleter( DefaultOptions() ) tag_file = PathToTestFile( 'basic.tags' ) assert_that( ident_completer._FilterUnchangedTagFiles( [ tag_file ] ), contains_exactly( tag_file ) ) def FilterUnchangedTagFiles_SkipUnchangesFiles_test(): ident_completer = IdentifierCompleter( DefaultOptions() ) # simulate an already open tags file that didn't change in the meantime. tag_file = PathToTestFile( 'basic.tags' ) ident_completer._tags_file_last_mtime[ tag_file ] = os.path.getmtime( tag_file ) assert_that( list( ident_completer._FilterUnchangedTagFiles( [ tag_file ] ) ), empty() )
PreviousIdentifier_WhitespaceAfterIdent_test
ember_configuration.js
/*globals ENV QUnit EmberDev */ (function() { window.Ember = { testing: true }; window.ENV = window.ENV || {}; // Test for "hooks in ENV.EMBER_LOAD_HOOKS['hookName'] get executed" ENV.EMBER_LOAD_HOOKS = ENV.EMBER_LOAD_HOOKS || {}; ENV.EMBER_LOAD_HOOKS.__before_ember_test_hook__ = ENV.EMBER_LOAD_HOOKS.__before_ember_test_hook__ || []; ENV.__test_hook_count__ = 0; ENV.EMBER_LOAD_HOOKS.__before_ember_test_hook__.push(function(object) { ENV.__test_hook_count__ += object; }); window.ENV.FEATURES = !!QUnit.urlParams.prod ? {"ember-routing-named-substates":null,"ember-routing-add-model-option":true,"ember-routing-linkto-target-attribute":true,"ember-routing-will-change-hooks":null,"ember-routing-multi-current-when":true,"ember-routing-auto-location-uses-replace-state-for-history":true,"event-dispatcher-can-disable-event-manager":null,"ember-metal-is-present":true,"property-brace-expansion-improvement":true,"ember-routing-handlebars-action-with-key-code":null,"ember-runtime-item-controller-inline-class":null,"ember-metal-injected-properties":null,"mandatory-setter":false} : {"ember-routing-named-substates":null,"ember-routing-add-model-option":true,"ember-routing-linkto-target-attribute":true,"ember-routing-will-change-hooks":null,"ember-routing-multi-current-when":true,"ember-routing-auto-location-uses-replace-state-for-history":true,"event-dispatcher-can-disable-event-manager":null,"ember-metal-is-present":true,"property-brace-expansion-improvement":true,"ember-routing-handlebars-action-with-key-code":null,"ember-runtime-item-controller-inline-class":null,"ember-metal-injected-properties":null,"mandatory-setter":true}; // Handle extending prototypes ENV['EXTEND_PROTOTYPES'] = !!QUnit.urlParams.extendprototypes; // Handle testing feature flags ENV['ENABLE_OPTIONAL_FEATURES'] = !!QUnit.urlParams.enableoptionalfeatures; // Don't worry about jQuery version ENV['FORCE_JQUERY'] = true; // Don't worry about jQuery version ENV['RAISE_ON_DEPRECATION'] = !!QUnit.urlParams.raiseonunhandleddeprecation; if (EmberDev.jsHint) {
})();
// jsHint makes its own Object.create stub, we don't want to use this ENV['STUB_OBJECT_CREATE'] = !Object.create; }
dynamo.pb.go
// Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.25.0 // protoc v3.16.0 // source: envoy/config/filter/http/dynamo/v2/dynamo.proto package envoy_config_filter_http_dynamo_v2 import ( _ "github.com/cncf/xds/go/udpa/annotations" proto "github.com/golang/protobuf/proto" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" reflect "reflect"
) const ( // Verify that this generated code is sufficiently up-to-date. _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) // Verify that runtime/protoimpl is sufficiently up-to-date. _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) ) // This is a compile-time assertion that a sufficiently up-to-date version // of the legacy proto package is being used. const _ = proto.ProtoPackageIsVersion4 // Dynamo filter config. type Dynamo struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields } func (x *Dynamo) Reset() { *x = Dynamo{} if protoimpl.UnsafeEnabled { mi := &file_envoy_config_filter_http_dynamo_v2_dynamo_proto_msgTypes[0] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } func (x *Dynamo) String() string { return protoimpl.X.MessageStringOf(x) } func (*Dynamo) ProtoMessage() {} func (x *Dynamo) ProtoReflect() protoreflect.Message { mi := &file_envoy_config_filter_http_dynamo_v2_dynamo_proto_msgTypes[0] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Dynamo.ProtoReflect.Descriptor instead. func (*Dynamo) Descriptor() ([]byte, []int) { return file_envoy_config_filter_http_dynamo_v2_dynamo_proto_rawDescGZIP(), []int{0} } var File_envoy_config_filter_http_dynamo_v2_dynamo_proto protoreflect.FileDescriptor var file_envoy_config_filter_http_dynamo_v2_dynamo_proto_rawDesc = []byte{ 0x0a, 0x2f, 0x65, 0x6e, 0x76, 0x6f, 0x79, 0x2f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2f, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x2f, 0x68, 0x74, 0x74, 0x70, 0x2f, 0x64, 0x79, 0x6e, 0x61, 0x6d, 0x6f, 0x2f, 0x76, 0x32, 0x2f, 0x64, 0x79, 0x6e, 0x61, 0x6d, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x22, 0x65, 0x6e, 0x76, 0x6f, 0x79, 0x2e, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x2e, 0x68, 0x74, 0x74, 0x70, 0x2e, 0x64, 0x79, 0x6e, 0x61, 0x6d, 0x6f, 0x2e, 0x76, 0x32, 0x1a, 0x1e, 0x75, 0x64, 0x70, 0x61, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x6d, 0x69, 0x67, 0x72, 0x61, 0x74, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1d, 0x75, 0x64, 0x70, 0x61, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x08, 0x0a, 0x06, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x6f, 0x42, 0x78, 0x0a, 0x30, 0x69, 0x6f, 0x2e, 0x65, 0x6e, 0x76, 0x6f, 0x79, 0x70, 0x72, 0x6f, 0x78, 0x79, 0x2e, 0x65, 0x6e, 0x76, 0x6f, 0x79, 0x2e, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x2e, 0x68, 0x74, 0x74, 0x70, 0x2e, 0x64, 0x79, 0x6e, 0x61, 0x6d, 0x6f, 0x2e, 0x76, 0x32, 0x42, 0x0b, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x6f, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0xf2, 0x98, 0xfe, 0x8f, 0x05, 0x29, 0x12, 0x27, 0x65, 0x6e, 0x76, 0x6f, 0x79, 0x2e, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x73, 0x2e, 0x68, 0x74, 0x74, 0x70, 0x2e, 0x64, 0x79, 0x6e, 0x61, 0x6d, 0x6f, 0x2e, 0x76, 0x33, 0xba, 0x80, 0xc8, 0xd1, 0x06, 0x02, 0x10, 0x01, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( file_envoy_config_filter_http_dynamo_v2_dynamo_proto_rawDescOnce sync.Once file_envoy_config_filter_http_dynamo_v2_dynamo_proto_rawDescData = file_envoy_config_filter_http_dynamo_v2_dynamo_proto_rawDesc ) func file_envoy_config_filter_http_dynamo_v2_dynamo_proto_rawDescGZIP() []byte { file_envoy_config_filter_http_dynamo_v2_dynamo_proto_rawDescOnce.Do(func() { file_envoy_config_filter_http_dynamo_v2_dynamo_proto_rawDescData = protoimpl.X.CompressGZIP(file_envoy_config_filter_http_dynamo_v2_dynamo_proto_rawDescData) }) return file_envoy_config_filter_http_dynamo_v2_dynamo_proto_rawDescData } var file_envoy_config_filter_http_dynamo_v2_dynamo_proto_msgTypes = make([]protoimpl.MessageInfo, 1) var file_envoy_config_filter_http_dynamo_v2_dynamo_proto_goTypes = []interface{}{ (*Dynamo)(nil), // 0: envoy.config.filter.http.dynamo.v2.Dynamo } var file_envoy_config_filter_http_dynamo_v2_dynamo_proto_depIdxs = []int32{ 0, // [0:0] is the sub-list for method output_type 0, // [0:0] is the sub-list for method input_type 0, // [0:0] is the sub-list for extension type_name 0, // [0:0] is the sub-list for extension extendee 0, // [0:0] is the sub-list for field type_name } func init() { file_envoy_config_filter_http_dynamo_v2_dynamo_proto_init() } func file_envoy_config_filter_http_dynamo_v2_dynamo_proto_init() { if File_envoy_config_filter_http_dynamo_v2_dynamo_proto != nil { return } if !protoimpl.UnsafeEnabled { file_envoy_config_filter_http_dynamo_v2_dynamo_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Dynamo); i { case 0: return &v.state case 1: return &v.sizeCache case 2: return &v.unknownFields default: return nil } } } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_envoy_config_filter_http_dynamo_v2_dynamo_proto_rawDesc, NumEnums: 0, NumMessages: 1, NumExtensions: 0, NumServices: 0, }, GoTypes: file_envoy_config_filter_http_dynamo_v2_dynamo_proto_goTypes, DependencyIndexes: file_envoy_config_filter_http_dynamo_v2_dynamo_proto_depIdxs, MessageInfos: file_envoy_config_filter_http_dynamo_v2_dynamo_proto_msgTypes, }.Build() File_envoy_config_filter_http_dynamo_v2_dynamo_proto = out.File file_envoy_config_filter_http_dynamo_v2_dynamo_proto_rawDesc = nil file_envoy_config_filter_http_dynamo_v2_dynamo_proto_goTypes = nil file_envoy_config_filter_http_dynamo_v2_dynamo_proto_depIdxs = nil }
sync "sync"
unpacker.go
package unpack import ( "github.com/ywangd/gobufrkit/tdcfio" "math" "fmt" "github.com/ywangd/gobufrkit/bufr" ) type Unpacker interface { Unpack(info *bufr.PackingInfo) (interface{}, error) } // NewUnpacker returns an appropriate Unpacker implementation based on given reader type func NewUnpacker(reader tdcfio.Reader, nsubsets int, compressed bool, inputType tdcfio.InputType) (Unpacker, error)
func unpackMissing(info *bufr.PackingInfo, x uint) (interface{}, error) { if bufr.IsMissing(x, info.Nbits) { return nil, nil } return x, nil } // Numeric values are always unpacked to float64 func unpackNumeric(info *bufr.PackingInfo, x uint) (interface{}, error) { // No need to apply any packing for missing value if y, _ := unpackMissing(info, x); y == nil { return nil, nil } xfloat := float64(x) if info.Refval != 0 || info.Scale != 0 { if info.Refval != 0 { xfloat += info.Refval } if info.Scale != 0 { xfloat /= math.Pow10(info.Scale) } return xfloat, nil } return xfloat, nil } func unpackBinary(info *bufr.PackingInfo, x uint) (interface{}, error) { return tdcfio.NewBinaryFromUint(x, info.Nbits) }
{ switch inputType { case tdcfio.BinaryInput: if compressed { return &CompressedBitUnpacker{r: reader, nsubsets: nsubsets}, nil } return &UncompressBitUnpacker{r: reader}, nil case tdcfio.FlatJsonInput: return &JsonUnpacker{r: reader}, nil default: return nil, fmt.Errorf("no candidate unpacker for %v", inputType) } }
nonstandard_style.rs
use rustc::lint::{EarlyContext, EarlyLintPass, LateContext, LateLintPass, LintContext}; use rustc::ty; use rustc_errors::Applicability; use rustc_hir as hir; use rustc_hir::def::{DefKind, Res}; use rustc_hir::intravisit::FnKind; use rustc_hir::{GenericParamKind, PatKind}; use rustc_span::symbol::sym; use rustc_span::{symbol::Ident, BytePos, Span}; use rustc_target::spec::abi::Abi; use syntax::ast; use syntax::attr; #[derive(PartialEq)] pub enum MethodLateContext { TraitAutoImpl, TraitImpl, PlainImpl, } pub fn method_context(cx: &LateContext<'_, '_>, id: hir::HirId) -> MethodLateContext { let def_id = cx.tcx.hir().local_def_id(id); let item = cx.tcx.associated_item(def_id); match item.container { ty::TraitContainer(..) => MethodLateContext::TraitAutoImpl, ty::ImplContainer(cid) => match cx.tcx.impl_trait_ref(cid) { Some(_) => MethodLateContext::TraitImpl, None => MethodLateContext::PlainImpl, }, } } declare_lint! { pub NON_CAMEL_CASE_TYPES, Warn, "types, variants, traits and type parameters should have camel case names" } declare_lint_pass!(NonCamelCaseTypes => [NON_CAMEL_CASE_TYPES]); fn char_has_case(c: char) -> bool { c.is_lowercase() || c.is_uppercase() } fn is_camel_case(name: &str) -> bool { let name = name.trim_matches('_'); if name.is_empty() { return true; } // start with a non-lowercase letter rather than non-uppercase // ones (some scripts don't have a concept of upper/lowercase) !name.chars().next().unwrap().is_lowercase() && !name.contains("__") && !name.chars().collect::<Vec<_>>().windows(2).any(|pair| { // contains a capitalisable character followed by, or preceded by, an underscore char_has_case(pair[0]) && pair[1] == '_' || char_has_case(pair[1]) && pair[0] == '_' }) } fn to_camel_case(s: &str) -> String { s.trim_matches('_') .split('_') .filter(|component| !component.is_empty()) .map(|component| { let mut camel_cased_component = String::new(); let mut new_word = true; let mut prev_is_lower_case = true; for c in component.chars() { // Preserve the case if an uppercase letter follows a lowercase letter, so that // `camelCase` is converted to `CamelCase`. if prev_is_lower_case && c.is_uppercase() { new_word = true; } if new_word { camel_cased_component.push_str(&c.to_uppercase().to_string()); } else { camel_cased_component.push_str(&c.to_lowercase().to_string()); } prev_is_lower_case = c.is_lowercase(); new_word = false; } camel_cased_component }) .fold((String::new(), None), |(acc, prev): (String, Option<String>), next| { // separate two components with an underscore if their boundary cannot // be distinguished using a uppercase/lowercase case distinction let join = if let Some(prev) = prev { let l = prev.chars().last().unwrap(); let f = next.chars().next().unwrap(); !char_has_case(l) && !char_has_case(f) } else { false }; (acc + if join { "_" } else { "" } + &next, Some(next)) }) .0 } impl NonCamelCaseTypes { fn check_case(&self, cx: &EarlyContext<'_>, sort: &str, ident: &Ident) { let name = &ident.name.as_str(); if !is_camel_case(name) { let msg = format!("{} `{}` should have an upper camel case name", sort, name); cx.struct_span_lint(NON_CAMEL_CASE_TYPES, ident.span, &msg) .span_suggestion( ident.span, "convert the identifier to upper camel case", to_camel_case(name), Applicability::MaybeIncorrect, ) .emit(); } } } impl EarlyLintPass for NonCamelCaseTypes { fn check_item(&mut self, cx: &EarlyContext<'_>, it: &ast::Item) { let has_repr_c = it .attrs .iter() .any(|attr| attr::find_repr_attrs(&cx.sess.parse_sess, attr).contains(&attr::ReprC)); if has_repr_c { return; } match it.kind { ast::ItemKind::TyAlias(..) | ast::ItemKind::Enum(..) | ast::ItemKind::Struct(..) | ast::ItemKind::Union(..) => self.check_case(cx, "type", &it.ident), ast::ItemKind::Trait(..) => self.check_case(cx, "trait", &it.ident), _ => (), } } fn check_trait_item(&mut self, cx: &EarlyContext<'_>, it: &ast::AssocItem) { if let ast::AssocItemKind::TyAlias(..) = it.kind { self.check_case(cx, "associated type", &it.ident); } } fn check_variant(&mut self, cx: &EarlyContext<'_>, v: &ast::Variant) { self.check_case(cx, "variant", &v.ident); } fn check_generic_param(&mut self, cx: &EarlyContext<'_>, param: &ast::GenericParam) { if let ast::GenericParamKind::Type { .. } = param.kind { self.check_case(cx, "type parameter", &param.ident); } } } declare_lint! { pub NON_SNAKE_CASE, Warn, "variables, methods, functions, lifetime parameters and modules should have snake case names" } declare_lint_pass!(NonSnakeCase => [NON_SNAKE_CASE]); impl NonSnakeCase { fn to_snake_case(mut str: &str) -> String
/// Checks if a given identifier is snake case, and reports a diagnostic if not. fn check_snake_case(&self, cx: &LateContext<'_, '_>, sort: &str, ident: &Ident) { fn is_snake_case(ident: &str) -> bool { if ident.is_empty() { return true; } let ident = ident.trim_start_matches('\''); let ident = ident.trim_matches('_'); let mut allow_underscore = true; ident.chars().all(|c| { allow_underscore = match c { '_' if !allow_underscore => return false, '_' => false, // It would be more obvious to use `c.is_lowercase()`, // but some characters do not have a lowercase form c if !c.is_uppercase() => true, _ => return false, }; true }) } let name = &ident.name.as_str(); if !is_snake_case(name) { let sc = NonSnakeCase::to_snake_case(name); let msg = format!("{} `{}` should have a snake case name", sort, name); let mut err = cx.struct_span_lint(NON_SNAKE_CASE, ident.span, &msg); // We have a valid span in almost all cases, but we don't have one when linting a crate // name provided via the command line. if !ident.span.is_dummy() { err.span_suggestion( ident.span, "convert the identifier to snake case", sc, Applicability::MaybeIncorrect, ); } else { err.help(&format!("convert the identifier to snake case: `{}`", sc)); } err.emit(); } } } impl<'a, 'tcx> LateLintPass<'a, 'tcx> for NonSnakeCase { fn check_mod( &mut self, cx: &LateContext<'_, '_>, _: &'tcx hir::Mod<'tcx>, _: Span, id: hir::HirId, ) { if id != hir::CRATE_HIR_ID { return; } let crate_ident = if let Some(name) = &cx.tcx.sess.opts.crate_name { Some(Ident::from_str(name)) } else { attr::find_by_name(&cx.tcx.hir().attrs(hir::CRATE_HIR_ID), sym::crate_name) .and_then(|attr| attr.meta()) .and_then(|meta| { meta.name_value_literal().and_then(|lit| { if let ast::LitKind::Str(name, ..) = lit.kind { // Discard the double quotes surrounding the literal. let sp = cx .sess() .source_map() .span_to_snippet(lit.span) .ok() .and_then(|snippet| { let left = snippet.find('"')?; let right = snippet.rfind('"').map(|pos| snippet.len() - pos)?; Some( lit.span .with_lo(lit.span.lo() + BytePos(left as u32 + 1)) .with_hi(lit.span.hi() - BytePos(right as u32)), ) }) .unwrap_or_else(|| lit.span); Some(Ident::new(name, sp)) } else { None } }) }) }; if let Some(ident) = &crate_ident { self.check_snake_case(cx, "crate", ident); } } fn check_generic_param(&mut self, cx: &LateContext<'_, '_>, param: &hir::GenericParam<'_>) { if let GenericParamKind::Lifetime { .. } = param.kind { self.check_snake_case(cx, "lifetime", &param.name.ident()); } } fn check_fn( &mut self, cx: &LateContext<'_, '_>, fk: FnKind<'_>, _: &hir::FnDecl<'_>, _: &hir::Body<'_>, _: Span, id: hir::HirId, ) { match &fk { FnKind::Method(ident, ..) => match method_context(cx, id) { MethodLateContext::PlainImpl => { self.check_snake_case(cx, "method", ident); } MethodLateContext::TraitAutoImpl => { self.check_snake_case(cx, "trait method", ident); } _ => (), }, FnKind::ItemFn(ident, _, header, _, attrs) => { // Skip foreign-ABI #[no_mangle] functions (Issue #31924) if header.abi != Abi::Rust && attr::contains_name(attrs, sym::no_mangle) { return; } self.check_snake_case(cx, "function", ident); } FnKind::Closure(_) => (), } } fn check_item(&mut self, cx: &LateContext<'_, '_>, it: &hir::Item<'_>) { if let hir::ItemKind::Mod(_) = it.kind { self.check_snake_case(cx, "module", &it.ident); } } fn check_trait_item(&mut self, cx: &LateContext<'_, '_>, item: &hir::TraitItem<'_>) { if let hir::TraitItemKind::Method(_, hir::TraitMethod::Required(pnames)) = item.kind { self.check_snake_case(cx, "trait method", &item.ident); for param_name in pnames { self.check_snake_case(cx, "variable", param_name); } } } fn check_pat(&mut self, cx: &LateContext<'_, '_>, p: &hir::Pat<'_>) { if let &PatKind::Binding(_, _, ident, _) = &p.kind { self.check_snake_case(cx, "variable", &ident); } } fn check_struct_def(&mut self, cx: &LateContext<'_, '_>, s: &hir::VariantData<'_>) { for sf in s.fields() { self.check_snake_case(cx, "structure field", &sf.ident); } } } declare_lint! { pub NON_UPPER_CASE_GLOBALS, Warn, "static constants should have uppercase identifiers" } declare_lint_pass!(NonUpperCaseGlobals => [NON_UPPER_CASE_GLOBALS]); impl NonUpperCaseGlobals { fn check_upper_case(cx: &LateContext<'_, '_>, sort: &str, ident: &Ident) { let name = &ident.name.as_str(); if name.chars().any(|c| c.is_lowercase()) { let uc = NonSnakeCase::to_snake_case(&name).to_uppercase(); let msg = format!("{} `{}` should have an upper case name", sort, name); cx.struct_span_lint(NON_UPPER_CASE_GLOBALS, ident.span, &msg) .span_suggestion( ident.span, "convert the identifier to upper case", uc, Applicability::MaybeIncorrect, ) .emit(); } } } impl<'a, 'tcx> LateLintPass<'a, 'tcx> for NonUpperCaseGlobals { fn check_item(&mut self, cx: &LateContext<'_, '_>, it: &hir::Item<'_>) { match it.kind { hir::ItemKind::Static(..) if !attr::contains_name(&it.attrs, sym::no_mangle) => { NonUpperCaseGlobals::check_upper_case(cx, "static variable", &it.ident); } hir::ItemKind::Const(..) => { NonUpperCaseGlobals::check_upper_case(cx, "constant", &it.ident); } _ => {} } } fn check_trait_item(&mut self, cx: &LateContext<'_, '_>, ti: &hir::TraitItem<'_>) { if let hir::TraitItemKind::Const(..) = ti.kind { NonUpperCaseGlobals::check_upper_case(cx, "associated constant", &ti.ident); } } fn check_impl_item(&mut self, cx: &LateContext<'_, '_>, ii: &hir::ImplItem<'_>) { if let hir::ImplItemKind::Const(..) = ii.kind { NonUpperCaseGlobals::check_upper_case(cx, "associated constant", &ii.ident); } } fn check_pat(&mut self, cx: &LateContext<'_, '_>, p: &hir::Pat<'_>) { // Lint for constants that look like binding identifiers (#7526) if let PatKind::Path(hir::QPath::Resolved(None, ref path)) = p.kind { if let Res::Def(DefKind::Const, _) = path.res { if path.segments.len() == 1 { NonUpperCaseGlobals::check_upper_case( cx, "constant in pattern", &path.segments[0].ident, ); } } } } fn check_generic_param(&mut self, cx: &LateContext<'_, '_>, param: &hir::GenericParam<'_>) { if let GenericParamKind::Const { .. } = param.kind { NonUpperCaseGlobals::check_upper_case(cx, "const parameter", &param.name.ident()); } } } #[cfg(test)] mod tests;
{ let mut words = vec![]; // Preserve leading underscores str = str.trim_start_matches(|c: char| { if c == '_' { words.push(String::new()); true } else { false } }); for s in str.split('_') { let mut last_upper = false; let mut buf = String::new(); if s.is_empty() { continue; } for ch in s.chars() { if !buf.is_empty() && buf != "'" && ch.is_uppercase() && !last_upper { words.push(buf); buf = String::new(); } last_upper = ch.is_uppercase(); buf.extend(ch.to_lowercase()); } words.push(buf); } words.join("_") }
sync.rs
use super::number::Number; use serde::{Serialize, Serializer}; /// Sync info #[derive(Debug, Serialize, PartialEq, Eq)] #[serde(rename_all = "camelCase")] pub struct SyncInfo { /// Starting block starting_block: Number, /// Current block current_block: Number, /// Highest block seen so far highest_block: Number, } /// Sync status #[derive(Debug, PartialEq, Eq)] pub enum SyncStatus { /// Info when syncing Info(SyncInfo), /// Not syncing #[allow(dead_code)] None, } impl SyncStatus { pub fn build() -> Self { Self::Info(SyncInfo { starting_block: 0.into(), current_block: 0.into(), highest_block: 0.into(), }) } } impl Serialize for SyncStatus { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { match *self { SyncStatus::Info(ref info) => info.serialize(serializer), SyncStatus::None => false.serialize(serializer), } } } #[cfg(test)] mod tests { use super::*; #[test] fn sync_status_json_deserialize()
}
{ let ss_none = SyncStatus::None; let ss_info = SyncStatus::Info(SyncInfo { starting_block: 0.into(), current_block: 0.into(), highest_block: 0.into(), }); assert_eq!(serde_json::to_string(&ss_none).unwrap(), "false"); assert_eq!( serde_json::to_string(&ss_info).unwrap(), r#"{"startingBlock":"0x0","currentBlock":"0x0","highestBlock":"0x0"}"# ); }
statement.rs
use crate::coretypes::{CompileSession, RichDebug, Statement}; use crate::util::indented; impl RichDebug for Statement { fn rich_debug(&self, session: &CompileSession) -> String
}
{ match self { Statement::Output(_, expressions, to_print_linefeed) => format!( "Output{} {{\n{}\n}}", if *to_print_linefeed { "" } else { "(No Linefeed)" }, indented( expressions .iter() .map(|expression| expression.rich_debug(session)) .collect::<Vec<_>>() .join("\n") ) ), Statement::Expression(expression) => { format!("Statement :: {}", expression.rich_debug(session)) } Statement::Declaration(declaration) => declaration.rich_debug(session), _ => "Unknown Statement".into(), } }
searchAgents.py
# searchAgents.py # --------------- # Licensing Information: Please do not distribute or publish solutions to this # project. You are free to use and extend these projects for educational # purposes. The Pacman AI projects were developed at UC Berkeley, primarily by # John DeNero ([email protected]) and Dan Klein ([email protected]). # For more info, see http://inst.eecs.berkeley.edu/~cs188/sp09/pacman.html """ This file contains all of the agents that can be selected to control Pacman. To select an agent, use the '-p' option when running pacman.py. Arguments can be passed to your agent using '-a'. For example, to load a SearchAgent that uses depth first search (dfs), run the following command: > python pacman.py -p SearchAgent -a searchFunction=depthFirstSearch Commands to invoke other search strategies can be found in the project description. Please only change the parts of the file you are asked to. Look for the lines that say "*** YOUR CODE HERE ***" The parts you fill in start about 3/4 of the way down. Follow the project description for details. Good luck and happy searching! """ from game import Directions from game import Agent from game import Actions import util import time import search import searchAgents class GoWestAgent(Agent): "An agent that goes West until it can't." def getAction(self, state): "The agent receives a GameState (defined in pacman.py)." if Directions.WEST in state.getLegalPacmanActions(): return Directions.WEST else: return Directions.STOP ####################################################### # This portion is written for you, but will only work # # after you fill in parts of search.py # ####################################################### class SearchAgent(Agent): """ This very general search agent finds a path using a supplied search algorithm for a supplied search problem, then returns actions to follow that path. As a default, this agent runs DFS on a PositionSearchProblem to find location (1,1) Options for fn include: depthFirstSearch or dfs breadthFirstSearch or bfs Note: You should NOT change any code in SearchAgent """ def __init__(self, fn='depthFirstSearch', prob='PositionSearchProblem', heuristic='nullHeuristic'): # Warning: some advanced Python magic is employed below to find the right functions and problems # Get the search function from the name and heuristic if fn not in dir(search): raise AttributeError, fn + ' is not a search function in search.py.' func = getattr(search, fn) if 'heuristic' not in func.func_code.co_varnames: print('[SearchAgent] using function ' + fn) self.searchFunction = func else: if heuristic in dir(searchAgents): heur = getattr(searchAgents, heuristic) elif heuristic in dir(search): heur = getattr(search, heuristic) else: raise AttributeError, heuristic + ' is not a function in searchAgents.py or search.py.' print('[SearchAgent] using function %s and heuristic %s' % (fn, heuristic)) # Note: this bit of Python trickery combines the search algorithm and the heuristic self.searchFunction = lambda x: func(x, heuristic=heur) # Get the search problem type from the name if prob not in dir(searchAgents) or not prob.endswith('Problem'): raise AttributeError, prob + ' is not a search problem type in SearchAgents.py.' self.searchType = getattr(searchAgents, prob) print('[SearchAgent] using problem type ' + prob) def registerInitialState(self, state): """ This is the first time that the agent sees the layout of the game board. Here, we choose a path to the goal. In this phase, the agent should compute the path to the goal and store it in a local variable. All of the work is done in this method! state: a GameState object (pacman.py) """ if self.searchFunction == None: raise Exception, "No search function provided for SearchAgent" starttime = time.time() problem = self.searchType(state) # Makes a new search problem self.actions = self.searchFunction(problem) # Find a path totalCost = problem.getCostOfActions(self.actions) print('Path found with total cost of %d in %.1f seconds' % (totalCost, time.time() - starttime)) if '_expanded' in dir(problem): print('Search nodes expanded: %d' % problem._expanded) def getAction(self, state): """ Returns the next action in the path chosen earlier (in registerInitialState). Return Directions.STOP if there is no further action to take. state: a GameState object (pacman.py) """ if 'actionIndex' not in dir(self): self.actionIndex = 0 i = self.actionIndex self.actionIndex += 1 if i < len(self.actions): return self.actions[i] else: return Directions.STOP class PositionSearchProblem(search.SearchProblem): """ A search problem defines the state space, start state, goal test, successor function and cost function. This search problem can be used to find paths to a particular point on the pacman board. The state space consists of (x,y) positions in a pacman game. Note: this search problem is fully specified; you should NOT change it. """ def __init__(self, gameState, costFn = lambda x: 1, goal=(1,1), start=None, warn=True): """ Stores the start and goal. gameState: A GameState object (pacman.py) costFn: A function from a search state (tuple) to a non-negative number goal: A position in the gameState """ self.walls = gameState.getWalls() self.startState = gameState.getPacmanPosition() if start != None: self.startState = start self.goal = goal self.costFn = costFn if warn and (gameState.getNumFood() != 1 or not gameState.hasFood(*goal)): print 'Warning: this does not look like a regular search maze' # For display purposes self._visited, self._visitedlist, self._expanded = {}, [], 0 def getStartState(self): return self.startState def isGoalState(self, state): isGoal = state == self.goal # For display purposes only if isGoal: self._visitedlist.append(state) import __main__ if '_display' in dir(__main__): if 'drawExpandedCells' in dir(__main__._display): #@UndefinedVariable __main__._display.drawExpandedCells(self._visitedlist) #@UndefinedVariable return isGoal
def getSuccessors(self, state): """ Returns successor states, the actions they require, and a cost of 1. As noted in search.py: For a given state, this should return a list of triples, (successor, action, stepCost), where 'successor' is a successor to the current state, 'action' is the action required to get there, and 'stepCost' is the incremental cost of expanding to that successor """ successors = [] for action in [Directions.NORTH, Directions.SOUTH, Directions.EAST, Directions.WEST]: x,y = state dx, dy = Actions.directionToVector(action) nextx, nexty = int(x + dx), int(y + dy) if not self.walls[nextx][nexty]: nextState = (nextx, nexty) cost = self.costFn(nextState) successors.append( ( nextState, action, cost) ) # Bookkeeping for display purposes self._expanded += 1 if state not in self._visited: self._visited[state] = True self._visitedlist.append(state) return successors def getCostOfActions(self, actions): """ Returns the cost of a particular sequence of actions. If those actions include an illegal move, return 999999 """ if actions == None: return 999999 x,y= self.getStartState() cost = 0 for action in actions: # Check figure out the next state and see whether its' legal dx, dy = Actions.directionToVector(action) x, y = int(x + dx), int(y + dy) if self.walls[x][y]: return 999999 cost += self.costFn((x,y)) return cost class StayEastSearchAgent(SearchAgent): """ An agent for position search with a cost function that penalizes being in positions on the West side of the board. The cost function for stepping into a position (x,y) is 1/2^x. """ def __init__(self): self.searchFunction = search.uniformCostSearch costFn = lambda pos: .5 ** pos[0] self.searchType = lambda state: PositionSearchProblem(state, costFn) class StayWestSearchAgent(SearchAgent): """ An agent for position search with a cost function that penalizes being in positions on the East side of the board. The cost function for stepping into a position (x,y) is 2^x. """ def __init__(self): self.searchFunction = search.uniformCostSearch costFn = lambda pos: 2 ** pos[0] self.searchType = lambda state: PositionSearchProblem(state, costFn) def manhattanHeuristic(position, problem, info={}): "The Manhattan distance heuristic for a PositionSearchProblem" xy1 = position xy2 = problem.goal return abs(xy1[0] - xy2[0]) + abs(xy1[1] - xy2[1]) def euclideanHeuristic(position, problem, info={}): "The Euclidean distance heuristic for a PositionSearchProblem" xy1 = position xy2 = problem.goal return ( (xy1[0] - xy2[0]) ** 2 + (xy1[1] - xy2[1]) ** 2 ) ** 0.5 ##################################################### # This portion is incomplete. Time to write code! # ##################################################### class CornersProblem(search.SearchProblem): """ This search problem finds paths through all four corners of a layout. You must select a suitable state space and successor function """ def __init__(self, startingGameState): """ Stores the walls, pacman's starting position and corners. """ self.walls = startingGameState.getWalls() self.startingPosition = startingGameState.getPacmanPosition() top, right = self.walls.height-2, self.walls.width-2 self.corners = ((1,1), (1,top), (right, 1), (right, top)) for corner in self.corners: if not startingGameState.hasFood(*corner): print 'Warning: no food in corner ' + str(corner) self._expanded = 0 # Number of search nodes expanded "*** YOUR CODE HERE ***" self.start = (startingGameState.getPacmanPosition(), self.corners) def getStartState(self): "Returns the start state (in your state space, not the full Pacman state space)" "*** YOUR CODE HERE ***" return self.start def isGoalState(self, state): "Returns whether this search state is a goal state of the problem" "*** YOUR CODE HERE ***" return len(state[1]) == 0 def getSuccessors(self, state): """ Returns successor states, the actions they require, and a cost of 1. As noted in search.py: For a given state, this should return a list of triples, (successor, action, stepCost), where 'successor' is a successor to the current state, 'action' is the action required to get there, and 'stepCost' is the incremental cost of expanding to that successor """ successors = [] for action in [Directions.NORTH, Directions.SOUTH, Directions.EAST, Directions.WEST]: # Add a successor state to the successor list if the action is legal # Here's a code snippet for figuring out whether a new position hits a wall: "*** YOUR CODE HERE ***" x, y = state[0] dx, dy = Actions.directionToVector(action) nextx, nexty = int(x + dx), int(y + dy) hitsWall = self.walls[nextx][nexty] if not hitsWall: nextState = (nextx, nexty) if nextState in state[1]: temp = list(state[1]) temp.remove(nextState) successors.append(((nextState,tuple(temp)), action, 1)) else: successors.append(((nextState,state[1]), action, 1)) self._expanded += 1 return successors def getCostOfActions(self, actions): """ Returns the cost of a particular sequence of actions. If those actions include an illegal move, return 999999. This is implemented for you. """ if actions == None: return 999999 x,y= self.startingPosition for action in actions: dx, dy = Actions.directionToVector(action) x, y = int(x + dx), int(y + dy) if self.walls[x][y]: return 999999 return len(actions) def cornersHeuristic(state, problem): """ A heuristic for the CornersProblem that you defined. state: The current search state (a data structure you chose in your search problem) problem: The CornersProblem instance for this layout. This function should always return a number that is a lower bound on the shortest path from the state to a goal of the problem; i.e. it should be admissible (as well as consistent). """ corners = problem.corners # These are the corner coordinates walls = problem.walls # These are the walls of the maze, as a Grid (game.py) "*** YOUR CODE HERE ***" corners = list(state[1]) distances = [] if len(corners) <= 0: return 0 else: xy1 = state[0] for i in range(len(corners)): xy2 = corners[i] distances.append(abs(xy1[0] - xy2[0]) + abs(xy1[1] - xy2[1])) initialDistance = min(distances) closestCorner = corners[distances.index(initialDistance)] corners.remove(closestCorner) cornersDistance = 0 while len(corners) > 0: distances = [] xy1 = closestCorner for i in range(len(corners)): xy2 = corners[i] distances.append(abs(xy1[0] - xy2[0]) + abs(xy1[1] - xy2[1])) additionalDistance = min(distances) closestCorner = corners[distances.index(additionalDistance)] corners.remove(closestCorner) cornersDistance = cornersDistance + additionalDistance return initialDistance + cornersDistance class AStarCornersAgent(SearchAgent): "A SearchAgent for FoodSearchProblem using A* and your foodHeuristic" def __init__(self): self.searchFunction = lambda prob: search.aStarSearch(prob, cornersHeuristic) self.searchType = CornersProblem class FoodSearchProblem: """ A search problem associated with finding the a path that collects all of the food (dots) in a Pacman game. A search state in this problem is a tuple ( pacmanPosition, foodGrid ) where pacmanPosition: a tuple (x,y) of integers specifying Pacman's position foodGrid: a Grid (see game.py) of either True or False, specifying remaining food """ def __init__(self, startingGameState): self.start = (startingGameState.getPacmanPosition(), startingGameState.getFood()) self.walls = startingGameState.getWalls() self.startingGameState = startingGameState self._expanded = 0 self.heuristicInfo = {} # A dictionary for the heuristic to store information def getStartState(self): return self.start def isGoalState(self, state): return state[1].count() == 0 def getSuccessors(self, state): "Returns successor states, the actions they require, and a cost of 1." successors = [] self._expanded += 1 for direction in [Directions.NORTH, Directions.SOUTH, Directions.EAST, Directions.WEST]: x,y = state[0] dx, dy = Actions.directionToVector(direction) nextx, nexty = int(x + dx), int(y + dy) if not self.walls[nextx][nexty]: nextFood = state[1].copy() nextFood[nextx][nexty] = False successors.append( ( ((nextx, nexty), nextFood), direction, 1) ) return successors def getCostOfActions(self, actions): """Returns the cost of a particular sequence of actions. If those actions include an illegal move, return 999999""" x,y= self.getStartState()[0] cost = 0 for action in actions: # figure out the next state and see whether it's legal dx, dy = Actions.directionToVector(action) x, y = int(x + dx), int(y + dy) if self.walls[x][y]: return 999999 cost += 1 return cost class AStarFoodSearchAgent(SearchAgent): "A SearchAgent for FoodSearchProblem using A* and your foodHeuristic" def __init__(self): self.searchFunction = lambda prob: search.aStarSearch(prob, foodHeuristic) self.searchType = FoodSearchProblem def foodHeuristic(state, problem): """ Your heuristic for the FoodSearchProblem goes here. This heuristic must be consistent to ensure correctness. First, try to come up with an admissible heuristic; almost all admissible heuristics will be consistent as well. If using A* ever finds a solution that is worse uniform cost search finds, your heuristic is *not* consistent, and probably not admissible! On the other hand, inadmissible or inconsistent heuristics may find optimal solutions, so be careful. The state is a tuple ( pacmanPosition, foodGrid ) where foodGrid is a Grid (see game.py) of either True or False. You can call foodGrid.asList() to get a list of food coordinates instead. If you want access to info like walls, capsules, etc., you can query the problem. For example, problem.walls gives you a Grid of where the walls are. If you want to *store* information to be reused in other calls to the heuristic, there is a dictionary called problem.heuristicInfo that you can use. For example, if you only want to count the walls once and store that value, try: problem.heuristicInfo['wallCount'] = problem.walls.count() Subsequent calls to this heuristic can access problem.heuristicInfo['wallCount'] """ position, foodGrid = state "*** YOUR CODE HERE ***" #use the all the food and pacman to form a MST foodList = foodGrid.asList() if(len(foodList) == 0): return 0 #manhattan distance between each node distances = [] nodes = foodList[:] nodes.insert(0,position) for x in nodes: distanceDict = {} for y in nodes: distanceDict[y] = abs(x[0]-y[0]) + abs(x[1]-y[1]) distances.append(distanceDict) #shortest distance from the node to the MST leastCost = [] #which node is the nearest to that node closest = [] for node in nodes: leastCost.append(distances[0][node]) closest.append(position) for i in range (0,len(nodes)): tempLeastCost = leastCost[:] tempLeastCost.sort(cmp=None, key=None, reverse=False) indexLeastCostNode = leastCost.index(tempLeastCost[i:][0]) leastCost[indexLeastCostNode] = 0 for node in nodes: if distances[indexLeastCostNode][node] < leastCost[nodes.index(node)]: leastCost[nodes.index(node)] = distances[indexLeastCostNode][node] closest[nodes.index(node)] = nodes[indexLeastCostNode] distance = 0 for i in range (0,len(nodes)): distance += distances[i][closest[i]] return distance class ClosestDotSearchAgent(SearchAgent): "Search for all food using a sequence of searches" def registerInitialState(self, state): self.actions = [] currentState = state while(currentState.getFood().count() > 0): nextPathSegment = self.findPathToClosestDot(currentState) # The missing piece self.actions += nextPathSegment for action in nextPathSegment: legal = currentState.getLegalActions() if action not in legal: t = (str(action), str(currentState)) raise Exception, 'findPathToClosestDot returned an illegal move: %s!\n%s' % t currentState = currentState.generateSuccessor(0, action) self.actionIndex = 0 print 'Path found with cost %d.' % len(self.actions) def findPathToClosestDot(self, gameState): "Returns a path (a list of actions) to the closest dot, starting from gameState" # Here are some useful elements of the startState startPosition = gameState.getPacmanPosition() food = gameState.getFood() walls = gameState.getWalls() problem = AnyFoodSearchProblem(gameState) "*** YOUR CODE HERE ***" #BFS from search.py from game import Directions directions = {'South': Directions.SOUTH, 'North': Directions.NORTH, 'West': Directions.WEST, 'East': Directions.EAST} fringe = util.Queue() fringe.push((startPosition, [], 0)) visited = [] while not fringe.isEmpty(): current = fringe.pop() if(problem.isGoalState(current[0])): return current[1] if visited.count(current[0])<1: visited.append(current[0]) for s in problem.getSuccessors(current[0]): if visited.count(s[0])<1: fringe.push((s[0], current[1]+[directions[s[1]]], s[2])) return [] class AnyFoodSearchProblem(PositionSearchProblem): """ A search problem for finding a path to any food. This search problem is just like the PositionSearchProblem, but has a different goal test, which you need to fill in below. The state space and successor function do not need to be changed. The class definition above, AnyFoodSearchProblem(PositionSearchProblem), inherits the methods of the PositionSearchProblem. You can use this search problem to help you fill in the findPathToClosestDot method. """ def __init__(self, gameState): "Stores information from the gameState. You don't need to change this." # Store the food for later reference self.food = gameState.getFood() # Store info for the PositionSearchProblem (no need to change this) self.walls = gameState.getWalls() self.startState = gameState.getPacmanPosition() self.costFn = lambda x: 1 self._visited, self._visitedlist, self._expanded = {}, [], 0 def isGoalState(self, state): """ The state is Pacman's position. Fill this in with a goal test that will complete the problem definition. """ x,y = state "*** YOUR CODE HERE ***" return (x,y) in self.food.asList() ################## # Mini-contest 1 # ################## class ApproximateSearchAgent(Agent): "Implement your contest entry here. Change anything but the class name." def registerInitialState(self, state): "This method is called before any moves are made." "*** YOUR CODE HERE ***" def getAction(self, state): """ From game.py: The Agent will receive a GameState and must return an action from Directions.{North, South, East, West, Stop} """ "*** YOUR CODE HERE ***" util.raiseNotDefined() def mazeDistance(point1, point2, gameState): """ Returns the maze distance between any two points, using the search functions you have already built. The gameState can be any game state -- Pacman's position in that state is ignored. Example usage: mazeDistance( (2,4), (5,6), gameState) This might be a useful helper function for your ApproximateSearchAgent. """ x1, y1 = point1 x2, y2 = point2 walls = gameState.getWalls() assert not walls[x1][y1], 'point1 is a wall: ' + point1 assert not walls[x2][y2], 'point2 is a wall: ' + str(point2) prob = PositionSearchProblem(gameState, start=point1, goal=point2, warn=False) return len(search.bfs(prob))
config.rs
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT. /// Service config. /// /// /// Service configuration allows for customization of endpoints, region, credentials providers, /// and retry configuration. Generally, it is constructed automatically for you from a shared /// configuration loaded by the `aws-config` crate. For example: /// /// ```ignore /// // Load a shared config from the environment /// let shared_config = aws_config::from_env().load().await; /// // The client constructor automatically converts the shared config into the service config /// let client = Client::new(&shared_config); /// ``` /// /// The service config can also be constructed manually using its builder. /// pub struct Config { app_name: Option<aws_types::app_name::AppName>, pub(crate) timeout_config: Option<aws_smithy_types::timeout::Config>, pub(crate) sleep_impl: Option<std::sync::Arc<dyn aws_smithy_async::rt::sleep::AsyncSleep>>, pub(crate) retry_config: Option<aws_smithy_types::retry::RetryConfig>, pub(crate) endpoint_resolver: ::std::sync::Arc<dyn aws_endpoint::ResolveAwsEndpoint>, pub(crate) region: Option<aws_types::region::Region>, pub(crate) credentials_provider: aws_types::credentials::SharedCredentialsProvider, } impl std::fmt::Debug for Config { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut config = f.debug_struct("Config"); config.finish() } } impl Config { /// Constructs a config builder. pub fn builder() -> Builder { Builder::default() } /// Returns the name of the app that is using the client, if it was provided. /// /// This _optional_ name is used to identify the application in the user agent that /// gets sent along with requests. pub fn app_name(&self) -> Option<&aws_types::app_name::AppName> { self.app_name.as_ref() } /// Creates a new [service config](crate::Config) from a [shared `config`](aws_types::sdk_config::SdkConfig). pub fn new(config: &aws_types::sdk_config::SdkConfig) -> Self { Builder::from(config).build() } /// The signature version 4 service signing name to use in the credential scope when signing requests. /// /// The signing service may be overridden by the `Endpoint`, or by specifying a custom /// [`SigningService`](aws_types::SigningService) during operation construction pub fn signing_service(&self) -> &'static str { "iot1click" } } /// Builder for creating a `Config`. #[derive(Default)] pub struct Builder { app_name: Option<aws_types::app_name::AppName>, timeout_config: Option<aws_smithy_types::timeout::Config>, sleep_impl: Option<std::sync::Arc<dyn aws_smithy_async::rt::sleep::AsyncSleep>>, retry_config: Option<aws_smithy_types::retry::RetryConfig>, endpoint_resolver: Option<::std::sync::Arc<dyn aws_endpoint::ResolveAwsEndpoint>>, region: Option<aws_types::region::Region>, credentials_provider: Option<aws_types::credentials::SharedCredentialsProvider>, } impl Builder { /// Constructs a config builder. pub fn new() -> Self { Self::default() } /// Sets the name of the app that is using the client. /// /// This _optional_ name is used to identify the application in the user agent that /// gets sent along with requests. pub fn app_name(mut self, app_name: aws_types::app_name::AppName) -> Self { self.set_app_name(Some(app_name)); self } /// Sets the name of the app that is using the client. /// /// This _optional_ name is used to identify the application in the user agent that /// gets sent along with requests. pub fn set_app_name(&mut self, app_name: Option<aws_types::app_name::AppName>) -> &mut Self { self.app_name = app_name; self } /// Set the timeout_config for the builder /// /// # Examples /// /// ```no_run /// # use std::time::Duration; /// use aws_sdk_iot1clickdevices::config::Config; /// use aws_smithy_types::{timeout, tristate::TriState}; /// /// let api_timeouts = timeout::Api::new() /// .with_call_attempt_timeout(TriState::Set(Duration::from_secs(1))); /// let timeout_config = timeout::Config::new() /// .with_api_timeouts(api_timeouts); /// let config = Config::builder().timeout_config(timeout_config).build(); /// ``` pub fn timeout_config(mut self, timeout_config: aws_smithy_types::timeout::Config) -> Self { self.set_timeout_config(Some(timeout_config)); self } /// Set the timeout_config for the builder /// /// # Examples /// /// ```no_run /// # use std::time::Duration; /// use aws_sdk_iot1clickdevices::config::{Builder, Config}; /// use aws_smithy_types::{timeout, tristate::TriState}; /// /// fn set_request_timeout(builder: &mut Builder) { /// let api_timeouts = timeout::Api::new() /// .with_call_attempt_timeout(TriState::Set(Duration::from_secs(1))); /// let timeout_config = timeout::Config::new() /// .with_api_timeouts(api_timeouts); /// builder.set_timeout_config(Some(timeout_config)); /// } /// /// let mut builder = Config::builder(); /// set_request_timeout(&mut builder); /// let config = builder.build(); /// ``` pub fn set_timeout_config( &mut self, timeout_config: Option<aws_smithy_types::timeout::Config>, ) -> &mut Self { self.timeout_config = timeout_config; self } /// Set the sleep_impl for the builder /// /// # Examples /// /// ```no_run /// use aws_sdk_iot1clickdevices::config::Config; /// use aws_smithy_async::rt::sleep::AsyncSleep; /// use aws_smithy_async::rt::sleep::Sleep; /// /// #[derive(Debug)] /// pub struct ForeverSleep; /// /// impl AsyncSleep for ForeverSleep { /// fn sleep(&self, duration: std::time::Duration) -> Sleep { /// Sleep::new(std::future::pending()) /// } /// } /// /// let sleep_impl = std::sync::Arc::new(ForeverSleep); /// let config = Config::builder().sleep_impl(sleep_impl).build(); /// ``` pub fn sleep_impl( mut self, sleep_impl: std::sync::Arc<dyn aws_smithy_async::rt::sleep::AsyncSleep>, ) -> Self { self.set_sleep_impl(Some(sleep_impl)); self } /// Set the sleep_impl for the builder /// /// # Examples /// /// ```no_run /// use aws_sdk_iot1clickdevices::config::{Builder, Config}; /// use aws_smithy_async::rt::sleep::AsyncSleep; /// use aws_smithy_async::rt::sleep::Sleep; /// /// #[derive(Debug)] /// pub struct ForeverSleep; /// /// impl AsyncSleep for ForeverSleep { /// fn sleep(&self, duration: std::time::Duration) -> Sleep { /// Sleep::new(std::future::pending()) /// } /// } /// /// fn set_never_ending_sleep_impl(builder: &mut Builder) { /// let sleep_impl = std::sync::Arc::new(ForeverSleep); /// builder.set_sleep_impl(Some(sleep_impl)); /// } /// /// let mut builder = Config::builder(); /// set_never_ending_sleep_impl(&mut builder); /// let config = builder.build(); /// ``` pub fn set_sleep_impl( &mut self, sleep_impl: Option<std::sync::Arc<dyn aws_smithy_async::rt::sleep::AsyncSleep>>, ) -> &mut Self { self.sleep_impl = sleep_impl; self } /// Set the retry_config for the builder /// /// # Examples /// ```no_run /// use aws_sdk_iot1clickdevices::config::Config; /// use aws_smithy_types::retry::RetryConfig; /// /// let retry_config = RetryConfig::new().with_max_attempts(5); /// let config = Config::builder().retry_config(retry_config).build(); /// ``` pub fn retry_config(mut self, retry_config: aws_smithy_types::retry::RetryConfig) -> Self { self.set_retry_config(Some(retry_config)); self } /// Set the retry_config for the builder /// /// # Examples /// ```no_run /// use aws_sdk_iot1clickdevices::config::{Builder, Config}; /// use aws_smithy_types::retry::RetryConfig; /// /// fn disable_retries(builder: &mut Builder) { /// let retry_config = RetryConfig::new().with_max_attempts(1); /// builder.set_retry_config(Some(retry_config)); /// } /// /// let mut builder = Config::builder(); /// disable_retries(&mut builder); /// let config = builder.build(); /// ``` pub fn set_retry_config( &mut self, retry_config: Option<aws_smithy_types::retry::RetryConfig>, ) -> &mut Self { self.retry_config = retry_config; self } /// Overrides the endpoint resolver to use when making requests. /// /// When unset, the client will used a generated endpoint resolver based on the endpoint metadata /// for `aws_sdk_iot1clickdevices`. /// /// # Examples /// ```no_run /// use aws_types::region::Region; /// use aws_sdk_iot1clickdevices::config::{Builder, Config}; /// use aws_sdk_iot1clickdevices::Endpoint; /// /// let config = aws_sdk_iot1clickdevices::Config::builder() /// .endpoint_resolver( /// Endpoint::immutable("http://localhost:8080".parse().expect("valid URI")) /// ).build(); /// ``` pub fn endpoint_resolver( mut self, endpoint_resolver: impl aws_endpoint::ResolveAwsEndpoint + 'static, ) -> Self { self.endpoint_resolver = Some(::std::sync::Arc::new(endpoint_resolver)); self } /// Sets the endpoint resolver to use when making requests. pub fn set_endpoint_resolver( &mut self, endpoint_resolver: Option<std::sync::Arc<dyn aws_endpoint::ResolveAwsEndpoint>>, ) -> &mut Self { self.endpoint_resolver = endpoint_resolver; self } /// Sets the AWS region to use when making requests. /// /// # Examples /// ```no_run /// use aws_types::region::Region; /// use aws_sdk_iot1clickdevices::config::{Builder, Config}; /// /// let config = aws_sdk_iot1clickdevices::Config::builder() /// .region(Region::new("us-east-1")) /// .build(); /// ``` pub fn region(mut self, region: impl Into<Option<aws_types::region::Region>>) -> Self { self.region = region.into(); self } /// Sets the credentials provider for this service pub fn credentials_provider( mut self, credentials_provider: impl aws_types::credentials::ProvideCredentials + 'static, ) -> Self { self.credentials_provider = Some(aws_types::credentials::SharedCredentialsProvider::new( credentials_provider, )); self } /// Sets the credentials provider for this service pub fn set_credentials_provider( &mut self, credentials_provider: Option<aws_types::credentials::SharedCredentialsProvider>, ) -> &mut Self { self.credentials_provider = credentials_provider; self } /// Builds a [`Config`]. pub fn build(self) -> Config { Config { app_name: self.app_name, timeout_config: self.timeout_config, sleep_impl: self.sleep_impl, retry_config: self.retry_config, endpoint_resolver: self .endpoint_resolver .unwrap_or_else(|| ::std::sync::Arc::new(crate::aws_endpoint::endpoint_resolver())), region: self.region, credentials_provider: self.credentials_provider.unwrap_or_else(|| { aws_types::credentials::SharedCredentialsProvider::new( crate::no_credentials::NoCredentials, ) }), } } } impl From<&aws_types::sdk_config::SdkConfig> for Builder { fn from(input: &aws_types::sdk_config::SdkConfig) -> Self
} impl From<&aws_types::sdk_config::SdkConfig> for Config { fn from(sdk_config: &aws_types::sdk_config::SdkConfig) -> Self { Builder::from(sdk_config).build() } }
{ let mut builder = Builder::default(); builder = builder.region(input.region().cloned()); builder.set_endpoint_resolver(input.endpoint_resolver().clone()); builder.set_retry_config(input.retry_config().cloned()); builder.set_timeout_config(input.timeout_config().cloned()); builder.set_sleep_impl(input.sleep_impl().clone()); builder.set_credentials_provider(input.credentials_provider().cloned()); builder.set_app_name(input.app_name().cloned()); builder }
AdaBoost.py
from sklearn.ensemble import AdaBoostClassifier from sklearn.tree import DecisionTreeClassifier from sklearn.datasets import make_gaussian_quantiles from sklearn.ensemble import RandomForestClassifier def
(X_train, y_train, X_test, DEPTH, N_ESTIMATORS): # Create and fit an AdaBoosted decision tree bdt = AdaBoostClassifier(DecisionTreeClassifier(max_depth=DEPTH), algorithm="SAMME", n_estimators=N_ESTIMATORS) bdt.fit(X_train, y_train) # Test classifier on test data y_out = bdt.predict(X_test) return y_out def ForestIB(X_train, y_train, X_test, DEPTH, N_ESTIMATORS): # Create the random forest object which will include all the parameters # for the fit forest = RandomForestClassifier(n_estimators = N_ESTIMATORS, max_depth=DEPTH, max_features=DEPTH, criterion='gini', n_jobs=-1) # Fit the training data to the Survived labels and create the decision trees forest = forest.fit(X_train, y_train) # Take the same decision trees and run it on the test data y_out = forest.predict(X_test) return y_out
AdaBoost
main.rs
use chrono::NaiveDate; use near_sdk::borsh::{self, BorshSerialize}; use near_sdk::json_types::ValidAccountId; use near_sdk::serde::Deserialize; use near_sdk::{AccountId, CryptoHash}; use std::collections::BTreeMap; use std::env; use std::fs::File; use std::io::Write; #[derive(Debug, Deserialize)] #[serde(crate = "near_sdk::serde")] struct Record { pub account_id: ValidAccountId, pub start_date: String, pub cliff_date: String, pub end_date: String, pub balance: u128, } #[derive(BorshSerialize)] pub struct FixedSizeAccount { pub account_hash: CryptoHash, pub start_timestamp: u32, pub cliff_timestamp: u32, pub end_timestamp: u32, pub balance: u128, } fn parse_date(s: &str) -> Option<u32> { let dt = NaiveDate::parse_from_str(s, "%Y-%m-%d").ok()?; Some(dt.and_hms(0, 0, 0).timestamp() as u32) } fn hash_account(account_id: &AccountId) -> CryptoHash { use sha2::Digest; let value_hash = sha2::Sha256::digest(account_id.as_bytes()); let mut res = CryptoHash::default(); res.copy_from_slice(&value_hash); res } const MAX_PER_FILE: usize = 10000; pub fn main() { let file_path = env::args_os() .nth(1) .expect("Missing input csv file name argument"); let output_file_path = env::args_os() .nth(2) .expect("Missing output borsh file name argument") .into_string() .unwrap(); let balance_multiplier: u128 = env::args_os() .nth(3) .map(|s| { s.into_string() .expect("Failed to parse balance multiplier") .parse() .unwrap() }) .unwrap_or(1); assert!( balance_multiplier > 0, "Balance multiplier should be positive" ); let file = File::open(file_path).unwrap(); let mut rdr = csv::Reader::from_reader(file); let mut total_accounts: usize = 0; let mut total_balance: u128 = 0; let mut min_start_timestamp = u32::MAX; let mut max_end_timestamp = 0; let mut accounts = BTreeMap::new(); for result in rdr.deserialize() { let Record { account_id, start_date, cliff_date, end_date, balance, } = result.unwrap(); let account_id_str: AccountId = account_id.into(); let start_timestamp = parse_date(&start_date).unwrap(); let cliff_timestamp = parse_date(&cliff_date).unwrap_or(start_timestamp); let end_timestamp = parse_date(&end_date).unwrap(); assert!(start_timestamp <= cliff_timestamp); assert!(cliff_timestamp <= end_timestamp); assert!(start_timestamp < end_timestamp); assert!(balance > 0);
let account_hash = hash_account(&account_id_str); let balance = balance .checked_mul(balance_multiplier) .expect("Balance multiplication overflow"); total_accounts += 1; total_balance = total_balance .checked_add(balance) .expect("Total balance overflow"); println!( "{:30} -> {} {} {} -> {}", balance, start_timestamp, cliff_timestamp, end_timestamp, account_id_str ); let account = FixedSizeAccount { account_hash, start_timestamp, cliff_timestamp, end_timestamp, balance, }; assert!(accounts.insert(account_hash, account).is_none()); } println!("Total number of accounts {}\nTotal balance: {}\nTotal multiplied balance: {}\nMinimum start timestamp: {}\nMaximum end timestamp: {}", total_accounts, total_balance / balance_multiplier, total_balance, min_start_timestamp, max_end_timestamp, ); let mut index = 0; let values: Vec<_> = accounts.values().collect(); for chunk in values.chunks(MAX_PER_FILE) { let output_file = format!("{}{}.borsh", output_file_path, index); let mut total_balance = 0; let mut data = vec![]; for account in chunk { total_balance += account.balance; data.extend(account.try_to_vec().unwrap()); } println!("File {}: balance {}", output_file, total_balance); let mut file = File::create(output_file).expect("Failed to create the output file"); file.write_all(&data).expect("Failed to write data"); index += 1; } }
min_start_timestamp = std::cmp::min(min_start_timestamp, start_timestamp); max_end_timestamp = std::cmp::max(max_end_timestamp, end_timestamp);
race-benchmark.py
from decimal import * def queueRequests(target, wordlists): req = '''GET /time.php HTTP/1.1 Host: portswigger-labs.net Accept-Encoding: gzip, deflate Accept: */* Accept-Language: en User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0 Connection: keep-alive ''' window = [] samples = 30 for i in range(samples): engine = RequestEngine(endpoint='https://portswigger-labs.net:443', concurrentConnections=5, requestsPerConnection=1, pipeline=False ) engine.userState['results'] = [] engine.userState['window'] = window engine.start(timeout=5) for k in range(5): engine.queue(req, gate='race1') engine.openGate('race1') engine.complete(timeout=60) window.sort() print max(window) print min(window) print window[(samples/2)-1] def handleResponse(req, interesting):
table.add(req) timestamp = req.response.splitlines()[-1].rstrip('\x00') req.engine.userState['results'].append(Decimal(timestamp)) if len(req.engine.userState['results']) == 5: sorted = req.engine.userState['results'] sorted.sort() req.engine.userState['window'].append(sorted[1] - sorted[0])
MoonText.tsx
/** * Copyright (c) Caiden Sanders and his affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ // CSS Frameworks & Animation Imports import styled from 'styled-components'; import { m as motion } from 'framer-motion'; // Type Declarations interface MoonTextProps { mouseX: number; mouseY: number; controls: any; variants: any; } // MoonTextWrapper CSS Element const MoonTextWrapper = styled(motion.div)` position: absolute; left: -15.5963%; top: 50%; -webkit-transform: translate3d(0, -50%, 0); transform: translate3d(0, -50%, 0); `; // MoonText CSS Element const MoonTextCSS = styled.p` font-size: 4rem; font-weight: 900; letter-spacing: 0.1em; text-shadow: 0 10px 15px rgba(2, 11, 22, 0.2); color: #ff4d5a; @media screen and (max-width: 800px) { font-size: 2rem; } @media screen and (max-width: 420px) { font-size: 1.5rem; text-shadow: 0 4px 6px rgba(2, 11, 22, 0.2); } `; // MoonText React Component const MoonText = (props: MoonTextProps) => { return ( <motion.div
className={'front'} animate={props.controls} initial={'visible'} variants={props.variants} > <MoonTextWrapper animate={{ x: props.mouseX / 45, y: props.mouseY / 45, }} transition={{ type: 'spring', stiffness: 100 }} > <MoonTextCSS>PORTFOLIO</MoonTextCSS> </MoonTextWrapper> </motion.div> ); }; // Default Export MoonText React Component export default MoonText;
platform-browser-animations.umd.js
/** * @license Angular v5.0.1 * (c) 2010-2017 Google, Inc. https://angular.io/ * License: MIT */ (function (global, factory) { typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('@angular/core'), require('@angular/platform-browser'), require('@angular/animations'), require('@angular/animations/browser')) : typeof define === 'function' && define.amd ? define('@angular/platform-browser/animations', ['exports', '@angular/core', '@angular/platform-browser', '@angular/animations', '@angular/animations/browser'], factory) : (factory((global.ng = global.ng || {}, global.ng.platformBrowser = global.ng.platformBrowser || {}, global.ng.platformBrowser.animations = {}),global.ng.core,global.ng.platformBrowser,global.ng.animations,global.ng.animations.browser)); }(this, (function (exports,_angular_core,_angular_platformBrowser,_angular_animations,_angular_animations_browser) { 'use strict'; /*! ***************************************************************************** Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, MERCHANTABLITY OR NON-INFRINGEMENT. See the Apache Version 2.0 License for specific language governing permissions and limitations under the License. ***************************************************************************** */ /* global Reflect, Promise */ var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; function __extends(d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); } /** * @license Angular v5.0.1 * (c) 2010-2017 Google, Inc. https://angular.io/ * License: MIT */ /** * @fileoverview added by tsickle * @suppress {checkTypes} checked by tsc */ /** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ var BrowserAnimationBuilder = (function (_super) { __extends(BrowserAnimationBuilder, _super); function BrowserAnimationBuilder(rootRenderer, doc) { var _this = _super.call(this) || this; _this._nextAnimationId = 0; var /** @type {?} */ typeData = /** @type {?} */ ({ id: '0', encapsulation: _angular_core.ViewEncapsulation.None, styles: [], data: { animation: [] } }); _this._renderer = /** @type {?} */ (rootRenderer.createRenderer(doc.body, typeData)); return _this; } /** * @param {?} animation * @return {?} */ BrowserAnimationBuilder.prototype.build = /** * @param {?} animation * @return {?} */ function (animation) { var /** @type {?} */ id = this._nextAnimationId.toString(); this._nextAnimationId++; var /** @type {?} */ entry = Array.isArray(animation) ? _angular_animations.sequence(animation) : animation; issueAnimationCommand(this._renderer, null, id, 'register', [entry]); return new BrowserAnimationFactory(id, this._renderer); }; BrowserAnimationBuilder.decorators = [ { type: _angular_core.Injectable }, ]; /** @nocollapse */ BrowserAnimationBuilder.ctorParameters = function () { return [ { type: _angular_core.RendererFactory2, }, { type: undefined, decorators: [{ type: _angular_core.Inject, args: [_angular_platformBrowser.DOCUMENT,] },] }, ]; }; return BrowserAnimationBuilder; }(_angular_animations.AnimationBuilder)); var BrowserAnimationFactory = (function (_super) { __extends(BrowserAnimationFactory, _super); function BrowserAnimationFactory(_id, _renderer) { var _this = _super.call(this) || this; _this._id = _id; _this._renderer = _renderer; return _this; } /** * @param {?} element * @param {?=} options * @return {?} */ BrowserAnimationFactory.prototype.create = /** * @param {?} element * @param {?=} options * @return {?} */ function (element, options) { return new RendererAnimationPlayer(this._id, element, options || {}, this._renderer); }; return BrowserAnimationFactory; }(_angular_animations.AnimationFactory)); var RendererAnimationPlayer = (function () { function RendererAnimationPlayer(id, element, options, _renderer) { this.id = id; this.element = element; this._renderer = _renderer; this.parentPlayer = null; this._started = false; this.totalTime = 0; this._command('create', options); } /** * @param {?} eventName * @param {?} callback * @return {?} */ RendererAnimationPlayer.prototype._listen = /** * @param {?} eventName * @param {?} callback * @return {?} */ function (eventName, callback) { return this._renderer.listen(this.element, "@@" + this.id + ":" + eventName, callback); }; /** * @param {?} command * @param {...?} args * @return {?} */ RendererAnimationPlayer.prototype._command = /** * @param {?} command * @param {...?} args * @return {?} */ function (command) { var args = []; for (var _i = 1; _i < arguments.length; _i++) { args[_i - 1] = arguments[_i]; } return issueAnimationCommand(this._renderer, this.element, this.id, command, args); }; /** * @param {?} fn * @return {?} */ RendererAnimationPlayer.prototype.onDone = /** * @param {?} fn * @return {?} */ function (fn) { this._listen('done', fn); }; /** * @param {?} fn * @return {?} */ RendererAnimationPlayer.prototype.onStart = /** * @param {?} fn * @return {?} */ function (fn) { this._listen('start', fn); }; /** * @param {?} fn * @return {?} */ RendererAnimationPlayer.prototype.onDestroy = /** * @param {?} fn * @return {?} */ function (fn) { this._listen('destroy', fn); }; /** * @return {?} */ RendererAnimationPlayer.prototype.init = /** * @return {?} */ function () { this._command('init'); }; /** * @return {?} */ RendererAnimationPlayer.prototype.hasStarted = /** * @return {?} */ function () { return this._started; }; /** * @return {?} */ RendererAnimationPlayer.prototype.play = /** * @return {?} */ function () { this._command('play'); this._started = true; }; /** * @return {?} */ RendererAnimationPlayer.prototype.pause = /** * @return {?} */ function () { this._command('pause'); }; /** * @return {?} */ RendererAnimationPlayer.prototype.restart = /** * @return {?} */ function () { this._command('restart'); }; /** * @return {?} */ RendererAnimationPlayer.prototype.finish = /** * @return {?} */ function () { this._command('finish'); }; /** * @return {?} */ RendererAnimationPlayer.prototype.destroy = /** * @return {?} */ function () { this._command('destroy'); }; /** * @return {?} */ RendererAnimationPlayer.prototype.reset = /** * @return {?} */ function () { this._command('reset'); }; /** * @param {?} p * @return {?} */ RendererAnimationPlayer.prototype.setPosition = /** * @param {?} p * @return {?} */ function (p) { this._command('setPosition', p); }; /** * @return {?} */ RendererAnimationPlayer.prototype.getPosition = /** * @return {?} */ function () { return 0; }; return RendererAnimationPlayer; }()); /** * @param {?} renderer * @param {?} element * @param {?} id * @param {?} command * @param {?} args * @return {?} */ function issueAnimationCommand(renderer, element, id, command, args) { return renderer.setProperty(element, "@@" + id + ":" + command, args); } /** * @fileoverview added by tsickle * @suppress {checkTypes} checked by tsc */ var ANIMATION_PREFIX = '@'; var DISABLE_ANIMATIONS_FLAG = '@.disabled'; var AnimationRendererFactory = (function () { function AnimationRendererFactory(delegate, engine, _zone) { this.delegate = delegate; this.engine = engine; this._zone = _zone; this._currentId = 0; this._microtaskId = 1; this._animationCallbacksBuffer = []; this._rendererCache = new Map(); this._cdRecurDepth = 0; engine.onRemovalComplete = function (element, delegate) { // Note: if an component element has a leave animation, and the component // a host leave animation, the view engine will call `removeChild` for the parent // component renderer as well as for the child component renderer. // Therefore, we need to check if we already removed the element. if (delegate && delegate.parentNode(element)) { delegate.removeChild(element.parentNode, element); } }; } /** * @param {?} hostElement * @param {?} type * @return {?} */ AnimationRendererFactory.prototype.createRenderer = /** * @param {?} hostElement * @param {?} type * @return {?} */ function (hostElement, type) { var _this = this; var /** @type {?} */ EMPTY_NAMESPACE_ID = ''; // cache the delegates to find out which cached delegate can // be used by which cached renderer var /** @type {?} */ delegate = this.delegate.createRenderer(hostElement, type); if (!hostElement || !type || !type.data || !type.data['animation']) { var /** @type {?} */ renderer = this._rendererCache.get(delegate); if (!renderer) { renderer = new BaseAnimationRenderer(EMPTY_NAMESPACE_ID, delegate, this.engine); // only cache this result when the base renderer is used this._rendererCache.set(delegate, renderer); } return renderer; } var /** @type {?} */ componentId = type.id; var /** @type {?} */ namespaceId = type.id + '-' + this._currentId; this._currentId++; this.engine.register(namespaceId, hostElement); var /** @type {?} */ animationTriggers = /** @type {?} */ (type.data['animation']); animationTriggers.forEach(function (trigger) { return _this.engine.registerTrigger(componentId, namespaceId, hostElement, trigger.name, trigger); }); return new AnimationRenderer(this, namespaceId, delegate, this.engine); }; /** * @return {?} */ AnimationRendererFactory.prototype.begin = /** * @return {?} */ function () { this._cdRecurDepth++; if (this.delegate.begin) { this.delegate.begin(); } }; /** * @return {?} */ AnimationRendererFactory.prototype._scheduleCountTask = /** * @return {?} */ function () { var _this = this; Zone.current.scheduleMicroTask('incremenet the animation microtask', function () { return _this._microtaskId++; }); }; /* @internal */ /** * @param {?} count * @param {?} fn * @param {?} data * @return {?} */ AnimationRendererFactory.prototype.scheduleListenerCallback = /** * @param {?} count * @param {?} fn * @param {?} data * @return {?} */ function (count, fn, data) { var _this = this; if (count >= 0 && count < this._microtaskId) { this._zone.run(function () { return fn(data); }); return; } if (this._animationCallbacksBuffer.length == 0) { Promise.resolve(null).then(function () { _this._zone.run(function () { _this._animationCallbacksBuffer.forEach(function (tuple) { var fn = tuple[0], data = tuple[1]; fn(data); }); _this._animationCallbacksBuffer = []; }); }); } this._animationCallbacksBuffer.push([fn, data]); }; /** * @return {?} */ AnimationRendererFactory.prototype.end = /** * @return {?} */ function () { var _this = this; this._cdRecurDepth--; // this is to prevent animations from running twice when an inner // component does CD when a parent component insted has inserted it if (this._cdRecurDepth == 0) { this._zone.runOutsideAngular(function () { _this._scheduleCountTask(); _this.engine.flush(_this._microtaskId); }); } if (this.delegate.end) { this.delegate.end(); } }; /** * @return {?} */ AnimationRendererFactory.prototype.whenRenderingDone = /** * @return {?} */ function () { return this.engine.whenRenderingDone(); }; AnimationRendererFactory.decorators = [ { type: _angular_core.Injectable }, ]; /** @nocollapse */ AnimationRendererFactory.ctorParameters = function () { return [ { type: _angular_core.RendererFactory2, }, { type: _angular_animations_browser.ɵAnimationEngine, }, { type: _angular_core.NgZone, }, ]; }; return AnimationRendererFactory; }()); var BaseAnimationRenderer = (function () { function BaseAnimationRenderer(namespaceId, delegate, engine) { this.namespaceId = namespaceId; this.delegate = delegate; this.engine = engine; this.destroyNode = this.delegate.destroyNode ? function (n) { return /** @type {?} */ ((delegate.destroyNode))(n); } : null; } Object.defineProperty(BaseAnimationRenderer.prototype, "data", { get: /** * @return {?} */ function () { return this.delegate.data; }, enumerable: true, configurable: true }); /** * @return {?} */ BaseAnimationRenderer.prototype.destroy = /** * @return {?} */ function () { this.engine.destroy(this.namespaceId, this.delegate); this.delegate.destroy(); }; /** * @param {?} name * @param {?=} namespace * @return {?} */ BaseAnimationRenderer.prototype.createElement = /** * @param {?} name * @param {?=} namespace * @return {?} */ function (name, namespace) { return this.delegate.createElement(name, namespace); }; /** * @param {?} value * @return {?} */ BaseAnimationRenderer.prototype.createComment = /** * @param {?} value * @return {?} */ function (value) { return this.delegate.createComment(value); }; /** * @param {?} value * @return {?} */ BaseAnimationRenderer.prototype.createText = /** * @param {?} value * @return {?} */ function (value) { return this.delegate.createText(value); }; /** * @param {?} parent * @param {?} newChild * @return {?} */ BaseAnimationRenderer.prototype.appendChild = /** * @param {?} parent * @param {?} newChild * @return {?} */ function (parent, newChild) { this.delegate.appendChild(parent, newChild); this.engine.onInsert(this.namespaceId, newChild, parent, false); }; /** * @param {?} parent * @param {?} newChild * @param {?} refChild * @return {?} */ BaseAnimationRenderer.prototype.insertBefore = /** * @param {?} parent * @param {?} newChild * @param {?} refChild * @return {?} */ function (parent, newChild, refChild) { this.delegate.insertBefore(parent, newChild, refChild); this.engine.onInsert(this.namespaceId, newChild, parent, true); };
*/ BaseAnimationRenderer.prototype.removeChild = /** * @param {?} parent * @param {?} oldChild * @return {?} */ function (parent, oldChild) { this.engine.onRemove(this.namespaceId, oldChild, this.delegate); }; /** * @param {?} selectorOrNode * @return {?} */ BaseAnimationRenderer.prototype.selectRootElement = /** * @param {?} selectorOrNode * @return {?} */ function (selectorOrNode) { return this.delegate.selectRootElement(selectorOrNode); }; /** * @param {?} node * @return {?} */ BaseAnimationRenderer.prototype.parentNode = /** * @param {?} node * @return {?} */ function (node) { return this.delegate.parentNode(node); }; /** * @param {?} node * @return {?} */ BaseAnimationRenderer.prototype.nextSibling = /** * @param {?} node * @return {?} */ function (node) { return this.delegate.nextSibling(node); }; /** * @param {?} el * @param {?} name * @param {?} value * @param {?=} namespace * @return {?} */ BaseAnimationRenderer.prototype.setAttribute = /** * @param {?} el * @param {?} name * @param {?} value * @param {?=} namespace * @return {?} */ function (el, name, value, namespace) { this.delegate.setAttribute(el, name, value, namespace); }; /** * @param {?} el * @param {?} name * @param {?=} namespace * @return {?} */ BaseAnimationRenderer.prototype.removeAttribute = /** * @param {?} el * @param {?} name * @param {?=} namespace * @return {?} */ function (el, name, namespace) { this.delegate.removeAttribute(el, name, namespace); }; /** * @param {?} el * @param {?} name * @return {?} */ BaseAnimationRenderer.prototype.addClass = /** * @param {?} el * @param {?} name * @return {?} */ function (el, name) { this.delegate.addClass(el, name); }; /** * @param {?} el * @param {?} name * @return {?} */ BaseAnimationRenderer.prototype.removeClass = /** * @param {?} el * @param {?} name * @return {?} */ function (el, name) { this.delegate.removeClass(el, name); }; /** * @param {?} el * @param {?} style * @param {?} value * @param {?=} flags * @return {?} */ BaseAnimationRenderer.prototype.setStyle = /** * @param {?} el * @param {?} style * @param {?} value * @param {?=} flags * @return {?} */ function (el, style, value, flags) { this.delegate.setStyle(el, style, value, flags); }; /** * @param {?} el * @param {?} style * @param {?=} flags * @return {?} */ BaseAnimationRenderer.prototype.removeStyle = /** * @param {?} el * @param {?} style * @param {?=} flags * @return {?} */ function (el, style, flags) { this.delegate.removeStyle(el, style, flags); }; /** * @param {?} el * @param {?} name * @param {?} value * @return {?} */ BaseAnimationRenderer.prototype.setProperty = /** * @param {?} el * @param {?} name * @param {?} value * @return {?} */ function (el, name, value) { if (name.charAt(0) == ANIMATION_PREFIX && name == DISABLE_ANIMATIONS_FLAG) { this.disableAnimations(el, !!value); } else { this.delegate.setProperty(el, name, value); } }; /** * @param {?} node * @param {?} value * @return {?} */ BaseAnimationRenderer.prototype.setValue = /** * @param {?} node * @param {?} value * @return {?} */ function (node, value) { this.delegate.setValue(node, value); }; /** * @param {?} target * @param {?} eventName * @param {?} callback * @return {?} */ BaseAnimationRenderer.prototype.listen = /** * @param {?} target * @param {?} eventName * @param {?} callback * @return {?} */ function (target, eventName, callback) { return this.delegate.listen(target, eventName, callback); }; /** * @param {?} element * @param {?} value * @return {?} */ BaseAnimationRenderer.prototype.disableAnimations = /** * @param {?} element * @param {?} value * @return {?} */ function (element, value) { this.engine.disableAnimations(element, value); }; return BaseAnimationRenderer; }()); var AnimationRenderer = (function (_super) { __extends(AnimationRenderer, _super); function AnimationRenderer(factory, namespaceId, delegate, engine) { var _this = _super.call(this, namespaceId, delegate, engine) || this; _this.factory = factory; _this.namespaceId = namespaceId; return _this; } /** * @param {?} el * @param {?} name * @param {?} value * @return {?} */ AnimationRenderer.prototype.setProperty = /** * @param {?} el * @param {?} name * @param {?} value * @return {?} */ function (el, name, value) { if (name.charAt(0) == ANIMATION_PREFIX) { if (name.charAt(1) == '.' && name == DISABLE_ANIMATIONS_FLAG) { value = value === undefined ? true : !!value; this.disableAnimations(el, /** @type {?} */ (value)); } else { this.engine.process(this.namespaceId, el, name.substr(1), value); } } else { this.delegate.setProperty(el, name, value); } }; /** * @param {?} target * @param {?} eventName * @param {?} callback * @return {?} */ AnimationRenderer.prototype.listen = /** * @param {?} target * @param {?} eventName * @param {?} callback * @return {?} */ function (target, eventName, callback) { var _this = this; if (eventName.charAt(0) == ANIMATION_PREFIX) { var /** @type {?} */ element = resolveElementFromTarget(target); var /** @type {?} */ name_1 = eventName.substr(1); var /** @type {?} */ phase = ''; // @listener.phase is for trigger animation callbacks // @@listener is for animation builder callbacks if (name_1.charAt(0) != ANIMATION_PREFIX) { _a = parseTriggerCallbackName(name_1), name_1 = _a[0], phase = _a[1]; } return this.engine.listen(this.namespaceId, element, name_1, phase, function (event) { var /** @type {?} */ countId = (/** @type {?} */ (event))['_data'] || -1; _this.factory.scheduleListenerCallback(countId, callback, event); }); } return this.delegate.listen(target, eventName, callback); var _a; }; return AnimationRenderer; }(BaseAnimationRenderer)); /** * @param {?} target * @return {?} */ function resolveElementFromTarget(target) { switch (target) { case 'body': return document.body; case 'document': return document; case 'window': return window; default: return target; } } /** * @param {?} triggerName * @return {?} */ function parseTriggerCallbackName(triggerName) { var /** @type {?} */ dotIndex = triggerName.indexOf('.'); var /** @type {?} */ trigger = triggerName.substring(0, dotIndex); var /** @type {?} */ phase = triggerName.substr(dotIndex + 1); return [trigger, phase]; } /** * @fileoverview added by tsickle * @suppress {checkTypes} checked by tsc */ /** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ var InjectableAnimationEngine = (function (_super) { __extends(InjectableAnimationEngine, _super); function InjectableAnimationEngine(driver, normalizer) { return _super.call(this, driver, normalizer) || this; } InjectableAnimationEngine.decorators = [ { type: _angular_core.Injectable }, ]; /** @nocollapse */ InjectableAnimationEngine.ctorParameters = function () { return [ { type: _angular_animations_browser.AnimationDriver, }, { type: _angular_animations_browser.ɵAnimationStyleNormalizer, }, ]; }; return InjectableAnimationEngine; }(_angular_animations_browser.ɵAnimationEngine)); /** * @return {?} */ function instantiateSupportedAnimationDriver() { if (_angular_animations_browser.ɵsupportsWebAnimations()) { return new _angular_animations_browser.ɵWebAnimationsDriver(); } return new _angular_animations_browser.ɵNoopAnimationDriver(); } /** * @return {?} */ function instantiateDefaultStyleNormalizer() { return new _angular_animations_browser.ɵWebAnimationsStyleNormalizer(); } /** * @param {?} renderer * @param {?} engine * @param {?} zone * @return {?} */ function instantiateRendererFactory(renderer, engine, zone) { return new AnimationRendererFactory(renderer, engine, zone); } var SHARED_ANIMATION_PROVIDERS = [ { provide: _angular_animations.AnimationBuilder, useClass: BrowserAnimationBuilder }, { provide: _angular_animations_browser.ɵAnimationStyleNormalizer, useFactory: instantiateDefaultStyleNormalizer }, { provide: _angular_animations_browser.ɵAnimationEngine, useClass: InjectableAnimationEngine }, { provide: _angular_core.RendererFactory2, useFactory: instantiateRendererFactory, deps: [_angular_platformBrowser.ɵDomRendererFactory2, _angular_animations_browser.ɵAnimationEngine, _angular_core.NgZone] } ]; /** * Separate providers from the actual module so that we can do a local modification in Google3 to * include them in the BrowserModule. */ var BROWSER_ANIMATIONS_PROVIDERS = [ { provide: _angular_animations_browser.AnimationDriver, useFactory: instantiateSupportedAnimationDriver } ].concat(SHARED_ANIMATION_PROVIDERS); /** * Separate providers from the actual module so that we can do a local modification in Google3 to * include them in the BrowserTestingModule. */ var BROWSER_NOOP_ANIMATIONS_PROVIDERS = [{ provide: _angular_animations_browser.AnimationDriver, useClass: _angular_animations_browser.ɵNoopAnimationDriver }].concat(SHARED_ANIMATION_PROVIDERS); /** * @fileoverview added by tsickle * @suppress {checkTypes} checked by tsc */ /** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ /** * \@experimental Animation support is experimental. */ var BrowserAnimationsModule = (function () { function BrowserAnimationsModule() { } BrowserAnimationsModule.decorators = [ { type: _angular_core.NgModule, args: [{ exports: [_angular_platformBrowser.BrowserModule], providers: BROWSER_ANIMATIONS_PROVIDERS, },] }, ]; /** @nocollapse */ BrowserAnimationsModule.ctorParameters = function () { return []; }; return BrowserAnimationsModule; }()); /** * \@experimental Animation support is experimental. */ var NoopAnimationsModule = (function () { function NoopAnimationsModule() { } NoopAnimationsModule.decorators = [ { type: _angular_core.NgModule, args: [{ exports: [_angular_platformBrowser.BrowserModule], providers: BROWSER_NOOP_ANIMATIONS_PROVIDERS, },] }, ]; /** @nocollapse */ NoopAnimationsModule.ctorParameters = function () { return []; }; return NoopAnimationsModule; }()); exports.BrowserAnimationsModule = BrowserAnimationsModule; exports.NoopAnimationsModule = NoopAnimationsModule; exports.ɵBrowserAnimationBuilder = BrowserAnimationBuilder; exports.ɵBrowserAnimationFactory = BrowserAnimationFactory; exports.ɵAnimationRenderer = AnimationRenderer; exports.ɵAnimationRendererFactory = AnimationRendererFactory; exports.ɵa = BaseAnimationRenderer; exports.ɵf = BROWSER_ANIMATIONS_PROVIDERS; exports.ɵg = BROWSER_NOOP_ANIMATIONS_PROVIDERS; exports.ɵb = InjectableAnimationEngine; exports.ɵd = instantiateDefaultStyleNormalizer; exports.ɵe = instantiateRendererFactory; exports.ɵc = instantiateSupportedAnimationDriver; Object.defineProperty(exports, '__esModule', { value: true }); }))); //# sourceMappingURL=platform-browser-animations.umd.js.map
/** * @param {?} parent * @param {?} oldChild * @return {?}
FunctionalConvolution.py
from .LagrangePolynomial import LagrangeExpand from pytorch_lightning import LightningModule, Trainer from high_order_layers_torch.PolynomialLayers import * from torch.nn import Conv2d import torch.nn as nn import torch from .utils import * def conv2d_wrapper( in_channels: int, out_channels: int, kernel_size: int, stride: int = 1, padding: int = 0, dilation: int = 1, groups: int = 1, padding_mode: str = 'zeros', weight_magnitude: float = 1.0, rescale_output: bool = False, verbose: bool = False, ** kwargs ): """ Inputs need to be an exact clone of those in torch conv2d including defaults. Function allows you to pass extra arguments without braking conv2d. """ conv = Conv2d( in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, padding=padding, dilation=dilation, groups=groups, # Bias should always be false as the bias is already included in these methods. bias=False, padding_mode=padding_mode, ) in_features = in_channels*kernel_size*kernel_size if verbose is True: print('in_channels', in_channels, 'out_channels', out_channels) print('conv.weight.shape', conv.weight.shape) # We don't want to use the standard conv initialization # since this is a bit different. if rescale_output is False: conv.weight.data.uniform_(-weight_magnitude/in_features, weight_magnitude/in_features) elif rescale_output is True: conv.weight.data.uniform_(-weight_magnitude, weight_magnitude) else: print('Using kaiming for weight initialization') return conv class Expansion2d(nn.Module): def __init__(self, basis=None): """ Expand an input by a function defined by basis. Args : - basis: function to expand input by. """ super().__init__() if basis == None: raise Exception( 'You must define the basis function in ExpansionLayer2D') self.basis = basis def build(self, input_shape): pass def __call__(self, inputs): """ Expand input Args : inputs : Tensor of shape [batches, channels, height, width] Return : Tensor of shape [batches, channels*(basis size), height, width] """ res = self.basis( inputs) # outputs [basis_size, batches, channels, height, width] res = res.permute(1, 3, 4, 2, 0) res = torch.reshape( res, [res.shape[0], res.shape[1], res.shape[2], res.shape[3]*res.shape[4]] ) res = res.permute(0, 3, 1, 2) return res class Expansion1d(nn.Module): def __init__(self, basis=None): """ Expand an input by a function defined by basis. Args : - basis: function to expand input by. """ super().__init__() if basis == None: raise Exception( 'You must define the basis function in ExpansionLayer2D') self.basis = basis def build(self, input_shape): pass def __call__(self, inputs): """ Expand input Args : inputs : Tensor of shape [batches, channels, width] Return : Tensor of shape [batches, channels*(basis size), width] """ res = self.basis( inputs) # outputs [basis_size, batches, channels, width] res = res.permute(1, 3, 2, 0) res = torch.reshape( res, [res.shape[0], res.shape[1], res.shape[2]*res.shape[3]] ) res = res.permute(0, 2, 1) # batches, basis_size*channels, width return res class FourierConvolution2d(nn.Module): def __init__(self, n: int, in_channels: int, kernel_size: int, length: float = 2.0, rescale_output=False, *args, **kwargs): """ Fourier series convolutional layer. Args : - n : number of fourier series components. n=1 is a constant, n=3 contains both first sin an consine components. - in_channels : number of input channels - kernel_size : size of the kernel - length : Range of the polynomial interpolation points. length = 2 implies [-1, 1] so the interpolation points are in that range. Anything outside that range could grow. - rescale_output: If rescale output is True then the output is divided by the number of inputs for each output, in effect taking the average. This is generally not necessary for the fourier series. """ super().__init__() self.poly = Expansion2d(FourierExpand(n, length)) self._channels = n*in_channels self.conv = conv2d_wrapper(in_channels=self._channels, kernel_size=kernel_size, **kwargs) self._total_in = in_channels*kernel_size*kernel_size self._rescale = 1.0 if rescale_output is True: self._rescale = 1.0/self._total_in def forward(self, x): x = self.poly(x) out = self.conv(x) return out*self._rescale class
(nn.Module): def __init__(self, n: int, in_channels: int, kernel_size: int, length: float = 2.0, rescale_output=False, periodicity: float = None, *args, **kwargs): """ Polynomial convolutional layer. Args : - n : number of weights or nodes. Polynomial order is n-1 so quadratic would be n=3. - in_channels : number of input channels - kernel_size : size of the kernel - length : Range of the polynomial interpolation points. length = 2 implies [-1, 1] so the interpolation points are in that range. Anything outside that range could grow. - rescale_output: If rescale output is True then the output is divided by the number of inputs for each output, in effect taking the average. """ super().__init__() self.poly = Expansion2d(LagrangeExpand(n, length=length)) self._channels = n*in_channels self.periodicity = periodicity self.conv = conv2d_wrapper(in_channels=self._channels, kernel_size=kernel_size, **kwargs) self._total_in = in_channels*kernel_size*kernel_size self._rescale = 1.0 if rescale_output is True: self._rescale = 1.0/self._total_in def forward(self, x): periodicity = self.periodicity if periodicity is not None: x = make_periodic(x, periodicity) x = self.poly(x) out = self.conv(x) return out*self._rescale class PiecewisePolynomialConvolution2d(nn.Module): def __init__(self, n: int, segments: int, in_channels: int, kernel_size: int, length: float = 2.0, rescale_output: bool = False, periodicity: float = None, *args, **kwargs): """ Piecewise continuous polynomial convolutional layer. The boundary between each polynomial are continuous. Args : - n : number of weights or nodes. Polynomial order is n-1 so quadratic would be n=3. - segments: The number of segments in the piecewise polynomial. - in_channels : number of input channels - kernel_size : size of the kernel - length : Range of the piecewise polynomial interpolation points. length = 2 implies [-1, 1] so the interpolation points are in that range. - rescale_output: If rescale output is True then the output is divided by the number of inputs for each output, in effect taking the average. """ super().__init__() self.poly = Expansion2d( PiecewisePolynomialExpand(n=n, segments=segments, length=length)) self._channels = ((n-1)*segments+1)*in_channels self.periodicity = periodicity self.conv = conv2d_wrapper(in_channels=self._channels, kernel_size=kernel_size, **kwargs) self._total_in = in_channels*kernel_size*kernel_size self._rescale = 1.0 if rescale_output is True: self._rescale = 1.0/self._total_in def forward(self, x): periodicity = self.periodicity if periodicity is not None: x = make_periodic(x, periodicity) x = self.poly(x) out = self.conv(x) return out*self._rescale class PiecewiseDiscontinuousPolynomialConvolution2d(nn.Module): def __init__(self, n: int, segments: int, in_channels: int, kernel_size: int, length: float = 2.0, rescale_output: bool = False, periodicity: float = None, *args, **kwargs): """ Discontinuous piecewise polynomial convolutional layer. The boundary between each polynomial can be discontinuous. Args : - n : number of weights or nodes. Polynomial order is n-1 so quadratic would be n=3. - segments: The number of segments in the piecewise polynomial. - in_channels : number of input channels - kernel_size : size of the kernel - length : Range of the piecewise polynomial interpolation points. length = 2 implies [-1, 1] so the interpolation points are in that range. - rescale_output: If rescale output is True then the output is divided by the number of inputs for each output, in effect taking the average. """ super().__init__() self.poly = Expansion2d( PiecewiseDiscontinuousPolynomialExpand(n=n, segments=segments, length=length)) self._channels = n*segments*in_channels self.periodicity = periodicity self.conv = conv2d_wrapper(in_channels=self._channels, kernel_size=kernel_size, **kwargs) self._total_in = in_channels*kernel_size*kernel_size self._rescale = 1.0 if rescale_output is True: self._rescale = 1.0/self._total_in def forward(self, x): periodicity = self.periodicity if periodicity is not None: x = make_periodic(x, periodicity) x = self.poly(x) out = self.conv(x) return out*self._rescale
PolynomialConvolution2d
test_sparse.py
import pytest from diofant import (I, Matrix, MutableDenseMatrix, MutableSparseMatrix, PurePoly, Rational, ShapeError, SparseMatrix, eye, ones, zeros) from diofant.abc import x, y, z __all__ = () def test_sparse_matrix(): def sparse_eye(n): return SparseMatrix.eye(n) def sparse_zeros(n): return SparseMatrix.zeros(n) # creation args pytest.raises(TypeError, lambda: SparseMatrix(1, 2)) pytest.raises(ValueError, lambda: SparseMatrix(2, 2, (1, 3, 4, 5, 6))) a = SparseMatrix(( (1, 0), (0, 1) )) assert SparseMatrix(a) == a a = MutableSparseMatrix([]) b = MutableDenseMatrix([1, 2]) assert a.row_join(b) == b assert a.col_join(b) == b assert type(a.row_join(b)) == type(a) assert type(a.col_join(b)) == type(a) # test element assignment a = SparseMatrix(( (1, 0), (0, 1) )) a[3] = 4 assert a[1, 1] == 4 a[3] = 1 a[0, 0] = 2 assert a == SparseMatrix(( (2, 0), (0, 1) )) a[1, 0] = 5 assert a == SparseMatrix(( (2, 0), (5, 1) )) a[1, 1] = 0 assert a == SparseMatrix(( (2, 0), (5, 0) )) assert a._smat == {(0, 0): 2, (1, 0): 5} # test_multiplication a = SparseMatrix(( (1, 2), (3, 1), (0, 6), )) b = SparseMatrix(( (1, 2), (3, 0), )) c = a*b assert c[0, 0] == 7 assert c[0, 1] == 2 assert c[1, 0] == 6 assert c[1, 1] == 6 assert c[2, 0] == 18 assert c[2, 1] == 0 c = b * x assert isinstance(c, SparseMatrix) assert c[0, 0] == x assert c[0, 1] == 2*x assert c[1, 0] == 3*x assert c[1, 1] == 0 c = 5 * b assert isinstance(c, SparseMatrix) assert c[0, 0] == 5 assert c[0, 1] == 2*5 assert c[1, 0] == 3*5 assert c[1, 1] == 0 # test_power A = SparseMatrix([[2, 3], [4, 5]]) assert (A**5)[:] == [6140, 8097, 10796, 14237] A = SparseMatrix([[2, 1, 3], [4, 2, 4], [6, 12, 1]]) assert (A**3)[:] == [290, 262, 251, 448, 440, 368, 702, 954, 433] # test_creation a = SparseMatrix([[x, 0], [0, 0]]) m = a assert m.cols == m.rows assert m.cols == 2 assert m[:] == [x, 0, 0, 0] b = SparseMatrix(2, 2, [x, 0, 0, 0]) m = b assert m.cols == m.rows assert m.cols == 2 assert m[:] == [x, 0, 0, 0] assert a == b S = sparse_eye(3) del S[1, :] assert S == SparseMatrix([ [1, 0, 0], [0, 0, 1]]) S = sparse_eye(3) del S[:, 1] assert S == SparseMatrix([ [1, 0], [0, 0], [0, 1]]) S = SparseMatrix.eye(3) S[2, 1] = 2 S.col_swap(1, 0) assert S == SparseMatrix([[0, 1, 0], [1, 0, 0], [2, 0, 1]]) S.row_swap(0, 1) assert S == SparseMatrix([[1, 0, 0], [0, 1, 0], [2, 0, 1]]) S.col_swap(0, 1) assert S == SparseMatrix([[0, 1, 0], [1, 0, 0], [0, 2, 1]]) S.row_swap(0, 2) assert S == SparseMatrix([[0, 2, 1], [1, 0, 0], [0, 1, 0]]) S.col_swap(0, 2) assert S == SparseMatrix([[1, 2, 0], [0, 0, 1], [0, 1, 0]]) a = SparseMatrix(1, 2, [1, 2]) b = a.copy() c = a.copy() assert a[0] == 1 del a[0, :] assert a == SparseMatrix(0, 2, []) del b[:, 1] assert b == SparseMatrix(1, 1, [1]) # test_determinant assert SparseMatrix(1, 1, [0]).det() == 0 assert SparseMatrix([[1]]).det() == 1 assert SparseMatrix(((-3, 2), (8, -5))).det() == -1 assert SparseMatrix(((x, 1), (y, 2*y))).det() == 2*x*y - y assert SparseMatrix(( (1, 1, 1), (1, 2, 3), (1, 3, 6) )).det() == 1 assert SparseMatrix(( ( 3, -2, 0, 5), (-2, 1, -2, 2), ( 0, -2, 5, 0), ( 5, 0, 3, 4) )).det() == -289 assert SparseMatrix(( ( 1, 2, 3, 4), ( 5, 6, 7, 8), ( 9, 10, 11, 12), (13, 14, 15, 16) )).det() == 0 assert SparseMatrix(( (3, 2, 0, 0, 0), (0, 3, 2, 0, 0), (0, 0, 3, 2, 0), (0, 0, 0, 3, 2), (2, 0, 0, 0, 3) )).det() == 275 assert SparseMatrix(( (1, 0, 1, 2, 12), (2, 0, 1, 1, 4), (2, 1, 1, -1, 3), (3, 2, -1, 1, 8), (1, 1, 1, 0, 6) )).det() == -55 assert SparseMatrix(( (-5, 2, 3, 4, 5), ( 1, -4, 3, 4, 5), ( 1, 2, -3, 4, 5), ( 1, 2, 3, -2, 5), ( 1, 2, 3, 4, -1) )).det() == 11664 assert SparseMatrix(( ( 2, 7, -1, 3, 2), ( 0, 0, 1, 0, 1), (-2, 0, 7, 0, 2), (-3, -2, 4, 5, 3), ( 1, 0, 0, 0, 1) )).det() == 123 # test_slicing m0 = sparse_eye(4) assert m0[:3, :3] == sparse_eye(3) assert m0[2:4, 0:2] == sparse_zeros(2) m1 = SparseMatrix(3, 3, lambda i, j: i + j) assert m1[0, :] == SparseMatrix(1, 3, (0, 1, 2)) assert m1[1:3, 1] == SparseMatrix(2, 1, (2, 3)) m2 = SparseMatrix( [[0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10, 11], [12, 13, 14, 15]]) assert m2[:, -1] == SparseMatrix(4, 1, [3, 7, 11, 15]) assert m2[-2:, :] == SparseMatrix([[8, 9, 10, 11], [12, 13, 14, 15]]) assert SparseMatrix([[1, 2], [3, 4]])[[1], [1]] == Matrix([[4]]) # test_submatrix_assignment m = sparse_zeros(4) m[2:4, 2:4] = sparse_eye(2) assert m == SparseMatrix([(0, 0, 0, 0), (0, 0, 0, 0), (0, 0, 1, 0), (0, 0, 0, 1)]) assert len(m._smat) == 2 m[:2, :2] = sparse_eye(2) assert m == sparse_eye(4) m[:, 0] = SparseMatrix(4, 1, (1, 2, 3, 4)) assert m == SparseMatrix([(1, 0, 0, 0), (2, 1, 0, 0), (3, 0, 1, 0), (4, 0, 0, 1)]) m[:, :] = sparse_zeros(4) assert m == sparse_zeros(4) m[:, :] = ((1, 2, 3, 4), (5, 6, 7, 8), (9, 10, 11, 12), (13, 14, 15, 16)) assert m == SparseMatrix((( 1, 2, 3, 4), ( 5, 6, 7, 8), ( 9, 10, 11, 12), (13, 14, 15, 16))) m[:2, 0] = [0, 0] assert m == SparseMatrix((( 0, 2, 3, 4), ( 0, 6, 7, 8), ( 9, 10, 11, 12), (13, 14, 15, 16))) # test_reshape m0 = sparse_eye(3) assert m0.reshape(1, 9) == SparseMatrix(1, 9, (1, 0, 0, 0, 1, 0, 0, 0, 1)) m1 = SparseMatrix(3, 4, lambda i, j: i + j) assert m1.reshape(4, 3) == \ SparseMatrix([(0, 1, 2), (3, 1, 2), (3, 4, 2), (3, 4, 5)]) assert m1.reshape(2, 6) == \ SparseMatrix([(0, 1, 2, 3, 1, 2), (3, 4, 2, 3, 4, 5)]) # test_applyfunc m0 = sparse_eye(3) assert m0.applyfunc(lambda x: 2*x) == sparse_eye(3)*2 assert m0.applyfunc(lambda x: 0 ) == sparse_zeros(3) # test_LUdecomp testmat = SparseMatrix([[ 0, 2, 5, 3], [ 3, 3, 7, 4], [ 8, 4, 0, 2], [-2, 6, 3, 4]]) L, U, p = testmat.LUdecomposition() assert L.is_lower assert U.is_upper assert (L*U).permuteBkwd(p) - testmat == sparse_zeros(4) testmat = SparseMatrix([[ 6, -2, 7, 4], [ 0, 3, 6, 7], [ 1, -2, 7, 4], [-9, 2, 6, 3]]) L, U, p = testmat.LUdecomposition() assert L.is_lower assert U.is_upper assert (L*U).permuteBkwd(p) - testmat == sparse_zeros(4) M = Matrix(((1, x, 1), (2, y, 0), (y, 0, z))) L, U, p = M.LUdecomposition() assert L.is_lower assert U.is_upper assert (L*U).permuteBkwd(p) - M == sparse_zeros(3) # test_LUsolve A = SparseMatrix([[2, 3, 5], [3, 6, 2], [8, 3, 6]]) B = SparseMatrix(3, 1, [3, 7, 5]) b = A*B soln = A.LUsolve(b) assert soln == B A = SparseMatrix([[0, -1, 2], [5, 10, 7], [8, 3, 4]]) B = SparseMatrix(3, 1, [-1, 2, 5]) b = A*B soln = A.LUsolve(b) assert soln == B # test_inverse A = sparse_eye(4) assert A.inv() == sparse_eye(4) assert A.inv(method='CH') == sparse_eye(4) assert A.inv(method='LDL') == sparse_eye(4) A = SparseMatrix([[2, 3, 5], [3, 6, 2], [7, 2, 6]]) Ainv = SparseMatrix(Matrix(A).inv()) assert A*Ainv == sparse_eye(3) assert A.inv(method='CH') == Ainv assert A.inv(method='LDL') == Ainv A = SparseMatrix([[2, 3, 5], [3, 6, 2], [5, 2, 6]]) Ainv = SparseMatrix(Matrix(A).inv()) assert A*Ainv == sparse_eye(3) assert A.inv(method='CH') == Ainv assert A.inv(method='LDL') == Ainv # test_cross v1 = Matrix(1, 3, [1, 2, 3]) v2 = Matrix(1, 3, [3, 4, 5]) assert v1.cross(v2) == Matrix(1, 3, [-2, 4, -2]) assert v1.norm(2)**2 == 14 # conjugate a = SparseMatrix(((1, 2 + I), (3, 4))) assert a.C == SparseMatrix([ [1, 2 - I], [3, 4] ]) # mul assert a*Matrix(2, 2, [1, 0, 0, 1]) == a assert a + Matrix(2, 2, [1, 1, 1, 1]) == SparseMatrix([ [2, 3 + I], [4, 5] ]) assert a*0 == Matrix([[0, 0], [0, 0]]) # col join assert a.col_join(sparse_eye(2)) == SparseMatrix([ [1, 2 + I], [3, 4], [1, 0], [0, 1] ]) A = SparseMatrix(ones(3)) B = eye(3) assert A.col_join(B) == Matrix([[1, 1, 1], [1, 1, 1], [1, 1, 1], [1, 0, 0], [0, 1, 0], [0, 0, 1]]) # row join A = SparseMatrix(((1, 0, 1), (0, 1, 0), (1, 1, 0))) B = Matrix(((1, 0, 0), (0, 1, 0), (0, 0, 1))) assert A.row_join(B) == Matrix([[1, 0, 1, 1, 0, 0], [0, 1, 0, 0, 1, 0], [1, 1, 0, 0, 0, 1]]) # symmetric assert not a.is_symmetric(simplify=False) assert sparse_eye(3).is_symmetric(simplify=False) # test_cofactor assert sparse_eye(3) == sparse_eye(3).cofactorMatrix() test = SparseMatrix([[1, 3, 2], [2, 6, 3], [2, 3, 6]]) assert test.cofactorMatrix() == \ SparseMatrix([[27, -6, -6], [-12, 2, 3], [-3, 1, 0]]) test = SparseMatrix([[1, 2, 3], [4, 5, 6], [7, 8, 9]]) assert test.cofactorMatrix() == \ SparseMatrix([[-3, 6, -3], [6, -12, 6], [-3, 6, -3]]) # test_jacobian L = SparseMatrix(1, 2, [x**2*y, 2*y**2 + x*y]) syms = [x, y] assert L.jacobian(syms) == Matrix([[2*x*y, x**2], [y, 4*y + x]]) L = SparseMatrix(1, 2, [x, x**2*y**3]) assert L.jacobian(syms) == SparseMatrix([[1, 0], [2*x*y**3, x**2*3*y**2]]) # test_QR A = Matrix([[1, 2], [2, 3]]) Q, S = A.QRdecomposition() R = Rational assert Q == Matrix([ [ 5**R(-1, 2), (R(2)/5)*(R(1)/5)**R(-1, 2)], [2*5**R(-1, 2), (-R(1)/5)*(R(1)/5)**R(-1, 2)]]) assert S == Matrix([ [5**R(1, 2), 8*5**R(-1, 2)], [ 0, (R(1)/5)**R(1, 2)]]) assert Q*S == A assert Q.T * Q == sparse_eye(2) R = Rational # test nullspace # first test reduced row-ech form M = SparseMatrix([[5, 7, 2, 1], [1, 6, 2, -1]]) out, tmp = M.rref() assert out == Matrix([[1, 0, -R(2)/23, R(13)/23], [0, 1, R(8)/23, R(-6)/23]]) M = SparseMatrix([[ 1, 3, 0, 2, 6, 3, 1], [-2, -6, 0, -2, -8, 3, 1], [ 3, 9, 0, 0, 6, 6, 2], [-1, -3, 0, 1, 0, 9, 3]]) out, tmp = M.rref() assert out == Matrix([[1, 3, 0, 0, 2, 0, 0], [0, 0, 0, 1, 2, 0, 0], [0, 0, 0, 0, 0, 1, R(1)/3], [0, 0, 0, 0, 0, 0, 0]]) # now check the vectors basis = M.nullspace() assert basis[0] == Matrix([-3, 1, 0, 0, 0, 0, 0]) assert basis[1] == Matrix([0, 0, 1, 0, 0, 0, 0]) assert basis[2] == Matrix([-2, 0, 0, -2, 1, 0, 0]) assert basis[3] == Matrix([0, 0, 0, 0, 0, R(-1)/3, 1]) # test eigen sparse_eye3 = sparse_eye(3) assert sparse_eye3.charpoly(x) == PurePoly(((x - 1)**3)) assert sparse_eye3.charpoly(y) == PurePoly(((y - 1)**3)) # test values M = Matrix([( 0, 1, -1), ( 1, 1, 0), (-1, 0, 1)]) vals = M.eigenvals() assert sorted(vals) == [-1, 1, 2] R = Rational M = Matrix([[1, 0, 0], [0, 1, 0], [0, 0, 1]]) assert M.eigenvects() == [(1, 3, [ Matrix([1, 0, 0]), Matrix([0, 1, 0]), Matrix([0, 0, 1])])] M = Matrix([[5, 0, 2], [3, 2, 0], [0, 0, 1]]) assert M.eigenvects() == [(1, 1, [Matrix([R(-1)/2, R(3)/2, 1])]), (2, 1, [Matrix([0, 1, 0])]), (5, 1, [Matrix([1, 1, 0])])] assert M.zeros(3, 5) == SparseMatrix(3, 5, {}) A = SparseMatrix(10, 10, {(0, 0): 18, (0, 9): 12, (1, 4): 18, (2, 7): 16, (3, 9): 12, (4, 2): 19, (5, 7): 16, (6, 2): 12, (9, 7): 18}) assert A.row_list() == [(0, 0, 18), (0, 9, 12), (1, 4, 18), (2, 7, 16), (3, 9, 12), (4, 2, 19), (5, 7, 16), (6, 2, 12), (9, 7, 18)] assert A.col_list() == [(0, 0, 18), (4, 2, 19), (6, 2, 12), (1, 4, 18), (2, 7, 16), (5, 7, 16), (9, 7, 18), (0, 9, 12), (3, 9, 12)] assert SparseMatrix.eye(2).nnz() == 2 M = SparseMatrix.eye(3)*2 M[1, 0] = -1 M.col_op(1, lambda v, i: v + 2*M[i, 0]) assert M == Matrix([[ 2, 4, 0], [-1, 0, 0], [ 0, 0, 2]]) M = SparseMatrix.zeros(3) M.fill(1) assert M == ones(3) assert SparseMatrix(ones(0, 3)).tolist() == [] def test_eq():
def test_transpose(): assert SparseMatrix(((1, 2), (3, 4))).transpose() == \ SparseMatrix(((1, 3), (2, 4))) def test_trace(): assert SparseMatrix(((1, 2), (3, 4))).trace() == 5 assert SparseMatrix(((0, 0), (0, 4))).trace() == 4 def test_CL_RL(): assert SparseMatrix(((1, 2), (3, 4))).row_list() == \ [(0, 0, 1), (0, 1, 2), (1, 0, 3), (1, 1, 4)] assert SparseMatrix(((1, 2), (3, 4))).col_list() == \ [(0, 0, 1), (1, 0, 3), (0, 1, 2), (1, 1, 4)] def test_add(): assert SparseMatrix(((1, 0), (0, 1))) + SparseMatrix(((0, 1), (1, 0))) == \ SparseMatrix(((1, 1), (1, 1))) a = SparseMatrix(100, 100, lambda i, j: int(j != 0 and i % j == 0)) b = SparseMatrix(100, 100, lambda i, j: int(i != 0 and j % i == 0)) assert (len(a._smat) + len(b._smat) - len((a + b)._smat) > 0) def test_errors(): pytest.raises(ValueError, lambda: SparseMatrix(1.4, 2, lambda i, j: 0)) pytest.raises(ValueError, lambda: SparseMatrix(2, 2, 1)) pytest.raises(TypeError, lambda: SparseMatrix([1, 2, 3], [1, 2])) pytest.raises(ValueError, lambda: SparseMatrix([[1, 2], [3, 4]])[(1, 2, 3)]) pytest.raises(IndexError, lambda: SparseMatrix([[1, 2], [3, 4]])[5]) pytest.raises(ValueError, lambda: SparseMatrix([[1, 2], [3, 4]])[1, 2, 3]) pytest.raises(TypeError, lambda: SparseMatrix([[1, 2], [3, 4]]).copyin_list([0, 1], set())) pytest.raises(IndexError, lambda: SparseMatrix([[1, 2], [3, 4]])[1, 2]) pytest.raises(TypeError, lambda: SparseMatrix([1, 2, 3]).cross(1)) pytest.raises(IndexError, lambda: SparseMatrix(1, 2, [1, 2])[3]) pytest.raises(ShapeError, lambda: SparseMatrix(1, 2, [1, 2]) + SparseMatrix(2, 1, [2, 1])) pytest.raises(IndexError, lambda: SparseMatrix([1, 2, 3])[3, 0]) pytest.raises(TypeError, lambda: SparseMatrix([1, 2, 3]).applyfunc(1)) pytest.raises(ValueError, lambda: SparseMatrix([1, 2, 3]).reshape(2, 2)) pytest.raises(ValueError, lambda: SparseMatrix([[2, 3], [4, 1]]).cholesky()) pytest.raises(ValueError, lambda: SparseMatrix([[2, 3], [4, 1]]).LDLdecomposition()) pytest.raises(ValueError, lambda: SparseMatrix([[2, 3], [4, 1]]).add(1)) pytest.raises(ShapeError, lambda: SparseMatrix([[1, 2], [3, 4]]).row_join(Matrix([[1, 2]]))) pytest.raises(ShapeError, lambda: SparseMatrix([[1, 2], [3, 4]]).col_join(Matrix([1, 2]))) pytest.raises(ShapeError, lambda: SparseMatrix([[1, 2], [3, 4]]).copyin_matrix([1, 0], Matrix([1, 2]))) def test_len(): assert not SparseMatrix() assert SparseMatrix() == SparseMatrix([]) assert SparseMatrix() == SparseMatrix([[]]) def test_sparse_zeros_sparse_eye(): assert SparseMatrix.eye(3) == eye(3, cls=SparseMatrix) assert len(SparseMatrix.eye(3)._smat) == 3 assert SparseMatrix.zeros(3) == zeros(3, cls=SparseMatrix) assert len(SparseMatrix.zeros(3)._smat) == 0 def test_copyin(): s = SparseMatrix(3, 3, {}) s[1, 0] = 1 assert s[:, 0] == SparseMatrix(Matrix([0, 1, 0])) assert s[3] == 1 assert s[3: 4] == [1] s[1, 1] = 42 assert s[1, 1] == 42 assert s[1, 1:] == SparseMatrix([[42, 0]]) s[1, 1:] = Matrix([[5, 6]]) assert s[1, :] == SparseMatrix([[1, 5, 6]]) s[1, 1:] = [[42, 43]] assert s[1, :] == SparseMatrix([[1, 42, 43]]) s[0, 0] = 17 assert s[:, :1] == SparseMatrix([17, 1, 0]) s[0, 0] = [1, 1, 1] assert s[:, 0] == SparseMatrix([1, 1, 1]) s[0, 0] = Matrix([1, 1, 1]) assert s[:, 0] == SparseMatrix([1, 1, 1]) s[0, 0] = SparseMatrix([1, 1, 1]) assert s[:, 0] == SparseMatrix([1, 1, 1]) def test_sparse_solve(): A = SparseMatrix(((25, 15, -5), (15, 18, 0), (-5, 0, 11))) assert A.cholesky() == Matrix([ [ 5, 0, 0], [ 3, 3, 0], [-1, 1, 3]]) assert A.cholesky() * A.cholesky().T == Matrix([ [25, 15, -5], [15, 18, 0], [-5, 0, 11]]) A = SparseMatrix(((25, 15, -5), (15, 18, 0), (-5, 0, 11))) L, D = A.LDLdecomposition() assert 15*L == Matrix([ [15, 0, 0], [ 9, 15, 0], [-3, 5, 15]]) assert D == Matrix([ [25, 0, 0], [ 0, 9, 0], [ 0, 0, 9]]) assert L * D * L.T == A A = SparseMatrix(((3, 0, 2), (0, 0, 1), (1, 2, 0))) assert A.inv() * A == SparseMatrix(eye(3)) A = SparseMatrix([ [ 2, -1, 0], [-1, 2, -1], [ 0, 0, 2]]) ans = SparseMatrix([ [Rational(2, 3), Rational(1, 3), Rational(1, 6)], [Rational(1, 3), Rational(2, 3), Rational(1, 3)], [ 0, 0, Rational(1, 2)]]) assert A.inv(method='CH') == ans assert A.inv(method='LDL') == ans assert A * ans == SparseMatrix(eye(3)) s = A.solve(A[:, 0], 'LDL') assert A*s == A[:, 0] s = A.solve(A[:, 0], 'CH') assert A*s == A[:, 0] A = A.col_join(A) s = A.solve_least_squares(A[:, 0], 'CH') assert A*s == A[:, 0] s = A.solve_least_squares(A[:, 0], 'LDL') assert A*s == A[:, 0] pytest.raises(ValueError, lambda: SparseMatrix([[1, 0, 1], [0, 0, 1]]).solve([1, 1])) pytest.raises(ValueError, lambda: SparseMatrix([[1, 0], [0, 0], [2, 1]]).solve([1, 1, 1])) def test_hermitian(): a = SparseMatrix([[0, I], [-I, 0]]) assert a.is_hermitian a = SparseMatrix([[1, I], [-I, 1]]) assert a.is_hermitian a[0, 0] = 2*I assert a.is_hermitian is False a[0, 0] = x assert a.is_hermitian is None a[0, 1] = a[1, 0]*I assert a.is_hermitian is False def test_fill(): a = SparseMatrix([[0, I], [-I, 0]]) a.fill(0) assert a == Matrix([[0, 0], [0, 0]])
A = SparseMatrix(((1, 2), (3, 4))) assert A != 1 assert A != zeros(2, 1)
tiger.py
class
: def __init__(self, name, gender, age): self.name = name self.gender = gender self.age = age def __repr__(self): return f"Name: {self.name}, Age: {self.age}, Gender: {self.gender}" @staticmethod def get_needs(): return 45
Tiger
utils.py
# -*- coding: utf-8 -*- """ utils ~~~~~ """ import numpy as np def sizes_to_indices(sizes):
"""Converting sizes to corresponding indices. Args: sizes (numpy.dnarray): An array consist of non-negative number. Returns: list{range}: List the indices. """ u_id = np.cumsum(sizes) l_id = np.insert(u_id[:-1], 0, 0) return [ np.arange(l, u) for l, u in zip(l_id, u_id) ]
meta.resolver.ts
import {Injectable, Injector} from '@angular/core'; import {Meta, Title} from '@angular/platform-browser'; import { ActivatedRouteSnapshot, Resolve, RouterStateSnapshot } from '@angular/router'; import {BASE_TITLE} from '../consts/base-title.const'; @Injectable({ providedIn: 'root' }) export class
implements Resolve<boolean> { constructor( private _title: Title, private _meta: Meta, private _injector: Injector ) {} resolve(route: ActivatedRouteSnapshot, state: RouterStateSnapshot) { let valuesToSet: {[key: string]: string}; /** * If the meta comes with dependencies * then we know it was a function passed in */ if (route.data.metaDeps) { valuesToSet = route.data.meta( route.data.metaDeps.map(dep => this._injector.get(dep)) ); } else { valuesToSet = route.data.meta || {}; } this._title.setTitle( valuesToSet.title ? `${valuesToSet.title} - ${BASE_TITLE}` : BASE_TITLE ); /** * To prevent iterating over the title and adding it as meta */ delete valuesToSet.title; /** * Written like this instead of entries to support older browsers */ Object.keys(valuesToSet).forEach(name => { this._meta.updateTag({name, content: valuesToSet[name]}); }); return true; } }
MetaResolver
puloon-dispenser.js
var deviceDriver = require('./puloonrs232') const actionEmitter = require('../action-emitter') var BillDispenser = function (config) { this.device = deviceDriver.factory(config.device) this.initialized = false this.initializing = false this.type = 'Puloon' } BillDispenser.factory = function factory (config) { return new BillDispenser(config) } module.exports = BillDispenser BillDispenser.prototype._setup = function _setup (data) { this.fiatCode = data.fiatCode } BillDispenser.prototype.init = function init (data) { return new Promise((resolve) => { if (this.initializing || this.initialized) return resolve() this.initializing = true this._setup(data) this.device.open(() => { return this.reset(data.cassettes) .then(() => { this.initialized = true this.initializing = false return resolve() }) }) }) } BillDispenser.prototype.reset = function reset (cassettes) { return new Promise((resolve, reject) => { this.device.reset(cassettes, this.fiatCode, err => { if (err) { console.log('Serialport error: ' + err.message) return reject(err) } resolve() }) }) } BillDispenser.prototype.dispense = function dispense (notes) { return this.device.dispense(notes)
actionEmitter.emit('billDispenser', { action: 'dispensed', value: bills }) }) .catch(err => { err.name = 'PuloonDispenseError' console.log('dispense error3', err) throw err }) } BillDispenser.prototype.billsPresent = function billsPresent () { return Promise.resolve(true) }
.then(function(bills) {
node_status.go
package command import ( "fmt" "math" "sort" "strconv" "strings" "time" humanize "github.com/dustin/go-humanize" "github.com/posener/complete" "github.com/hashicorp/nomad/api" "github.com/hashicorp/nomad/api/contexts" "github.com/hashicorp/nomad/helper" ) const ( // floatFormat is a format string for formatting floats. floatFormat = "#,###.##" // bytesPerMegabyte is the number of bytes per MB bytesPerMegabyte = 1024 * 1024 ) type NodeStatusCommand struct { Meta length int short bool verbose bool list_allocs bool self bool stats bool json bool tmpl string } func (c *NodeStatusCommand) Help() string { helpText := ` Usage: nomad node status [options] <node> Display status information about a given node. The list of nodes returned includes only nodes which jobs may be scheduled to, and includes status and other high-level information. If a node ID is passed, information for that specific node will be displayed, including resource usage statistics. If no node ID's are passed, then a short-hand list of all nodes will be displayed. The -self flag is useful to quickly access the status of the local node. General Options: ` + generalOptionsUsage() + ` Node Status Options: -self Query the status of the local node. -stats Display detailed resource usage statistics. -allocs Display a count of running allocations for each node. -short Display short output. Used only when a single node is being queried, and drops verbose output about node allocations. -verbose Display full information. -json Output the node in its JSON format. -t Format and display node using a Go template. ` return strings.TrimSpace(helpText) } func (c *NodeStatusCommand) Synopsis() string { return "Display status information about nodes" } func (c *NodeStatusCommand) AutocompleteFlags() complete.Flags { return mergeAutocompleteFlags(c.Meta.AutocompleteFlags(FlagSetClient), complete.Flags{ "-allocs": complete.PredictNothing, "-json": complete.PredictNothing, "-self": complete.PredictNothing, "-short": complete.PredictNothing, "-stats": complete.PredictNothing, "-t": complete.PredictAnything, "-verbose": complete.PredictNothing, }) } func (c *NodeStatusCommand) AutocompleteArgs() complete.Predictor { return complete.PredictFunc(func(a complete.Args) []string { client, err := c.Meta.Client() if err != nil { return nil } resp, _, err := client.Search().PrefixSearch(a.Last, contexts.Nodes, nil) if err != nil { return []string{} } return resp.Matches[contexts.Nodes] }) } func (c *NodeStatusCommand) Name() string { return "node-status" } func (c *NodeStatusCommand) Run(args []string) int { flags := c.Meta.FlagSet(c.Name(), FlagSetClient) flags.Usage = func() { c.Ui.Output(c.Help()) } flags.BoolVar(&c.short, "short", false, "") flags.BoolVar(&c.verbose, "verbose", false, "") flags.BoolVar(&c.list_allocs, "allocs", false, "") flags.BoolVar(&c.self, "self", false, "") flags.BoolVar(&c.stats, "stats", false, "") flags.BoolVar(&c.json, "json", false, "") flags.StringVar(&c.tmpl, "t", "", "") if err := flags.Parse(args); err != nil { return 1 } // Check that we got either a single node or none args = flags.Args() if len(args) > 1 { c.Ui.Error("This command takes either one or no arguments") c.Ui.Error(commandErrorText(c)) return 1 } // Truncate the id unless full length is requested c.length = shortId if c.verbose { c.length = fullId } // Get the HTTP client client, err := c.Meta.Client() if err != nil { c.Ui.Error(fmt.Sprintf("Error initializing client: %s", err)) return 1 } // Use list mode if no node name was provided if len(args) == 0 && !c.self { // Query the node info nodes, _, err := client.Nodes().List(nil) if err != nil { c.Ui.Error(fmt.Sprintf("Error querying node status: %s", err)) return 1 } // If output format is specified, format and output the node data list if c.json || len(c.tmpl) > 0 { out, err := Format(c.json, c.tmpl, nodes) if err != nil { c.Ui.Error(err.Error()) return 1 } c.Ui.Output(out) return 0 } // Return nothing if no nodes found if len(nodes) == 0 { return 0 } // Format the nodes list out := make([]string, len(nodes)+1) out[0] = "ID|DC|Name|Class|" if c.verbose { out[0] += "Address|Version|" } out[0] += "Drain|Eligibility|Status" if c.list_allocs { out[0] += "|Running Allocs" } for i, node := range nodes { out[i+1] = fmt.Sprintf("%s|%s|%s|%s", limit(node.ID, c.length), node.Datacenter, node.Name, node.NodeClass) if c.verbose { out[i+1] += fmt.Sprintf("|%s|%s", node.Address, node.Version) } out[i+1] += fmt.Sprintf("|%v|%s|%s", node.Drain, node.SchedulingEligibility, node.Status) if c.list_allocs { numAllocs, err := getRunningAllocs(client, node.ID) if err != nil { c.Ui.Error(fmt.Sprintf("Error querying node allocations: %s", err)) return 1 } out[i+1] += fmt.Sprintf("|%v", len(numAllocs)) } } // Dump the output c.Ui.Output(formatList(out)) return 0 } // Query the specific node var nodeID string if !c.self { nodeID = args[0] } else { var err error if nodeID, err = getLocalNodeID(client); err != nil { c.Ui.Error(err.Error()) return 1 } } if len(nodeID) == 1 { c.Ui.Error(fmt.Sprintf("Identifier must contain at least two characters.")) return 1 } nodeID = sanitizeUUIDPrefix(nodeID) nodes, _, err := client.Nodes().PrefixList(nodeID) if err != nil { c.Ui.Error(fmt.Sprintf("Error querying node info: %s", err)) return 1 } // Return error if no nodes are found if len(nodes) == 0 { c.Ui.Error(fmt.Sprintf("No node(s) with prefix %q found", nodeID)) return 1 } if len(nodes) > 1 { // Dump the output c.Ui.Error(fmt.Sprintf("Prefix matched multiple nodes\n\n%s", formatNodeStubList(nodes, c.verbose))) return 1 } // Prefix lookup matched a single node node, _, err := client.Nodes().Info(nodes[0].ID, nil) if err != nil { c.Ui.Error(fmt.Sprintf("Error querying node info: %s", err)) return 1 } // If output format is specified, format and output the data if c.json || len(c.tmpl) > 0 { out, err := Format(c.json, c.tmpl, node) if err != nil { c.Ui.Error(err.Error()) return 1 } c.Ui.Output(out) return 0 } return c.formatNode(client, node) } func nodeDrivers(n *api.Node) []string { var drivers []string for k, v := range n.Attributes { // driver.docker = 1 parts := strings.Split(k, ".") if len(parts) != 2 { continue } else if parts[0] != "driver" { continue } else if v != "1" { continue } drivers = append(drivers, parts[1]) } sort.Strings(drivers) return drivers } func formatDrain(n *api.Node) string { if n.DrainStrategy != nil { b := new(strings.Builder) b.WriteString("true") if n.DrainStrategy.DrainSpec.Deadline.Nanoseconds() < 0 { b.WriteString("; force drain") } else if n.DrainStrategy.ForceDeadline.IsZero() { b.WriteString("; no deadline") } else { fmt.Fprintf(b, "; %s deadline", formatTime(n.DrainStrategy.ForceDeadline)) } if n.DrainStrategy.IgnoreSystemJobs { b.WriteString("; ignoring system jobs") } return b.String() } return strconv.FormatBool(n.Drain) } func (c *NodeStatusCommand) formatNode(client *api.Client, node *api.Node) int { // Format the header output basic := []string{ fmt.Sprintf("ID|%s", limit(node.ID, c.length)), fmt.Sprintf("Name|%s", node.Name), fmt.Sprintf("Class|%s", node.NodeClass), fmt.Sprintf("DC|%s", node.Datacenter), fmt.Sprintf("Drain|%v", formatDrain(node)), fmt.Sprintf("Eligibility|%s", node.SchedulingEligibility), fmt.Sprintf("Status|%s", node.Status), } if c.short { basic = append(basic, fmt.Sprintf("Drivers|%s", strings.Join(nodeDrivers(node), ","))) c.Ui.Output(c.Colorize().Color(formatKV(basic))) } else { // Get the host stats hostStats, nodeStatsErr := client.Nodes().Stats(node.ID, nil) if nodeStatsErr != nil { c.Ui.Output("") c.Ui.Error(fmt.Sprintf("error fetching node stats: %v", nodeStatsErr)) } if hostStats != nil { uptime := time.Duration(hostStats.Uptime * uint64(time.Second)) basic = append(basic, fmt.Sprintf("Uptime|%s", uptime.String())) } // Emit the driver info if !c.verbose { driverStatus := fmt.Sprintf("Driver Status| %s", c.outputTruncatedNodeDriverInfo(node)) basic = append(basic, driverStatus) } c.Ui.Output(c.Colorize().Color(formatKV(basic))) if c.verbose { c.outputNodeDriverInfo(node) } // Emit node events c.outputNodeStatusEvents(node) // Get list of running allocations on the node runningAllocs, err := getRunningAllocs(client, node.ID) if err != nil { c.Ui.Error(fmt.Sprintf("Error querying node for running allocations: %s", err)) return 1 } allocatedResources := getAllocatedResources(client, runningAllocs, node) c.Ui.Output(c.Colorize().Color("\n[bold]Allocated Resources[reset]")) c.Ui.Output(formatList(allocatedResources)) actualResources, err := getActualResources(client, runningAllocs, node) if err == nil { c.Ui.Output(c.Colorize().Color("\n[bold]Allocation Resource Utilization[reset]")) c.Ui.Output(formatList(actualResources)) } hostResources, err := getHostResources(hostStats, node) if err != nil { c.Ui.Output("") c.Ui.Error(fmt.Sprintf("error fetching node stats: %v", err)) } if err == nil { c.Ui.Output(c.Colorize().Color("\n[bold]Host Resource Utilization[reset]")) c.Ui.Output(formatList(hostResources)) } if err == nil && len(node.NodeResources.Devices) > 0 { c.Ui.Output(c.Colorize().Color("\n[bold]Device Resource Utilization[reset]")) c.Ui.Output(formatList(getDeviceResourcesForNode(hostStats.DeviceStats, node))) } if hostStats != nil && c.stats { c.Ui.Output(c.Colorize().Color("\n[bold]CPU Stats[reset]")) c.printCpuStats(hostStats) c.Ui.Output(c.Colorize().Color("\n[bold]Memory Stats[reset]")) c.printMemoryStats(hostStats) c.Ui.Output(c.Colorize().Color("\n[bold]Disk Stats[reset]")) c.printDiskStats(hostStats) if len(hostStats.DeviceStats) > 0 { c.Ui.Output(c.Colorize().Color("\n[bold]Device Stats[reset]")) printDeviceStats(c.Ui, hostStats.DeviceStats) } } } nodeAllocs, _, err := client.Nodes().Allocations(node.ID, nil) if err != nil { c.Ui.Error(fmt.Sprintf("Error querying node allocations: %s", err)) return 1 } c.Ui.Output(c.Colorize().Color("\n[bold]Allocations[reset]")) c.Ui.Output(formatAllocList(nodeAllocs, c.verbose, c.length)) if c.verbose { c.formatAttributes(node) c.formatMeta(node) } return 0 } func (c *NodeStatusCommand) outputTruncatedNodeDriverInfo(node *api.Node) string { drivers := make([]string, 0, len(node.Drivers)) for driverName, driverInfo := range node.Drivers { if !driverInfo.Detected { continue } if !driverInfo.Healthy { drivers = append(drivers, fmt.Sprintf("%s (unhealthy)", driverName)) } else { drivers = append(drivers, driverName) } } sort.Strings(drivers) return strings.Trim(strings.Join(drivers, ","), ", ") } func (c *NodeStatusCommand) outputNodeDriverInfo(node *api.Node) { c.Ui.Output(c.Colorize().Color("\n[bold]Drivers")) size := len(node.Drivers) nodeDrivers := make([]string, 0, size+1) nodeDrivers = append(nodeDrivers, "Driver|Detected|Healthy|Message|Time") drivers := make([]string, 0, len(node.Drivers)) for driver := range node.Drivers { drivers = append(drivers, driver) } sort.Strings(drivers) for _, driver := range drivers { info := node.Drivers[driver] timestamp := formatTime(info.UpdateTime) nodeDrivers = append(nodeDrivers, fmt.Sprintf("%s|%v|%v|%s|%s", driver, info.Detected, info.Healthy, info.HealthDescription, timestamp)) } c.Ui.Output(formatList(nodeDrivers)) } func (c *NodeStatusCommand) outputNodeStatusEvents(node *api.Node) { c.Ui.Output(c.Colorize().Color("\n[bold]Node Events")) c.outputNodeEvent(node.Events) } func (c *NodeStatusCommand) outputNodeEvent(events []*api.NodeEvent) { size := len(events) nodeEvents := make([]string, size+1) if c.verbose { nodeEvents[0] = "Time|Subsystem|Message|Details" } else { nodeEvents[0] = "Time|Subsystem|Message" } for i, event := range events { timestamp := formatTime(event.Timestamp) subsystem := formatEventSubsystem(event.Subsystem, event.Details["driver"]) msg := event.Message if c.verbose { details := formatEventDetails(event.Details) nodeEvents[size-i] = fmt.Sprintf("%s|%s|%s|%s", timestamp, subsystem, msg, details) } else { nodeEvents[size-i] = fmt.Sprintf("%s|%s|%s", timestamp, subsystem, msg) } } c.Ui.Output(formatList(nodeEvents)) } func formatEventSubsystem(subsystem, driverName string) string
func formatEventDetails(details map[string]string) string { output := make([]string, 0, len(details)) for k, v := range details { output = append(output, fmt.Sprintf("%s: %s ", k, v)) } return strings.Join(output, ", ") } func (c *NodeStatusCommand) formatAttributes(node *api.Node) { // Print the attributes keys := make([]string, len(node.Attributes)) for k := range node.Attributes { keys = append(keys, k) } sort.Strings(keys) var attributes []string for _, k := range keys { if k != "" { attributes = append(attributes, fmt.Sprintf("%s|%s", k, node.Attributes[k])) } } c.Ui.Output(c.Colorize().Color("\n[bold]Attributes[reset]")) c.Ui.Output(formatKV(attributes)) } func (c *NodeStatusCommand) formatMeta(node *api.Node) { // Print the meta keys := make([]string, 0, len(node.Meta)) for k := range node.Meta { keys = append(keys, k) } sort.Strings(keys) var meta []string for _, k := range keys { if k != "" { meta = append(meta, fmt.Sprintf("%s|%s", k, node.Meta[k])) } } c.Ui.Output(c.Colorize().Color("\n[bold]Meta[reset]")) c.Ui.Output(formatKV(meta)) } func (c *NodeStatusCommand) printCpuStats(hostStats *api.HostStats) { l := len(hostStats.CPU) for i, cpuStat := range hostStats.CPU { cpuStatsAttr := make([]string, 4) cpuStatsAttr[0] = fmt.Sprintf("CPU|%v", cpuStat.CPU) cpuStatsAttr[1] = fmt.Sprintf("User|%v%%", humanize.FormatFloat(floatFormat, cpuStat.User)) cpuStatsAttr[2] = fmt.Sprintf("System|%v%%", humanize.FormatFloat(floatFormat, cpuStat.System)) cpuStatsAttr[3] = fmt.Sprintf("Idle|%v%%", humanize.FormatFloat(floatFormat, cpuStat.Idle)) c.Ui.Output(formatKV(cpuStatsAttr)) if i+1 < l { c.Ui.Output("") } } } func (c *NodeStatusCommand) printMemoryStats(hostStats *api.HostStats) { memoryStat := hostStats.Memory memStatsAttr := make([]string, 4) memStatsAttr[0] = fmt.Sprintf("Total|%v", humanize.IBytes(memoryStat.Total)) memStatsAttr[1] = fmt.Sprintf("Available|%v", humanize.IBytes(memoryStat.Available)) memStatsAttr[2] = fmt.Sprintf("Used|%v", humanize.IBytes(memoryStat.Used)) memStatsAttr[3] = fmt.Sprintf("Free|%v", humanize.IBytes(memoryStat.Free)) c.Ui.Output(formatKV(memStatsAttr)) } func (c *NodeStatusCommand) printDiskStats(hostStats *api.HostStats) { l := len(hostStats.DiskStats) for i, diskStat := range hostStats.DiskStats { diskStatsAttr := make([]string, 7) diskStatsAttr[0] = fmt.Sprintf("Device|%s", diskStat.Device) diskStatsAttr[1] = fmt.Sprintf("MountPoint|%s", diskStat.Mountpoint) diskStatsAttr[2] = fmt.Sprintf("Size|%s", humanize.IBytes(diskStat.Size)) diskStatsAttr[3] = fmt.Sprintf("Used|%s", humanize.IBytes(diskStat.Used)) diskStatsAttr[4] = fmt.Sprintf("Available|%s", humanize.IBytes(diskStat.Available)) diskStatsAttr[5] = fmt.Sprintf("Used Percent|%v%%", humanize.FormatFloat(floatFormat, diskStat.UsedPercent)) diskStatsAttr[6] = fmt.Sprintf("Inodes Percent|%v%%", humanize.FormatFloat(floatFormat, diskStat.InodesUsedPercent)) c.Ui.Output(formatKV(diskStatsAttr)) if i+1 < l { c.Ui.Output("") } } } // getRunningAllocs returns a slice of allocation id's running on the node func getRunningAllocs(client *api.Client, nodeID string) ([]*api.Allocation, error) { var allocs []*api.Allocation // Query the node allocations nodeAllocs, _, err := client.Nodes().Allocations(nodeID, nil) // Filter list to only running allocations for _, alloc := range nodeAllocs { if alloc.ClientStatus == "running" { allocs = append(allocs, alloc) } } return allocs, err } // getAllocatedResources returns the resource usage of the node. func getAllocatedResources(client *api.Client, runningAllocs []*api.Allocation, node *api.Node) []string { // Compute the total total := computeNodeTotalResources(node) // Get Resources var cpu, mem, disk, iops int for _, alloc := range runningAllocs { cpu += *alloc.Resources.CPU mem += *alloc.Resources.MemoryMB disk += *alloc.Resources.DiskMB iops += *alloc.Resources.IOPS } resources := make([]string, 2) resources[0] = "CPU|Memory|Disk|IOPS" resources[1] = fmt.Sprintf("%d/%d MHz|%s/%s|%s/%s|%d/%d", cpu, *total.CPU, humanize.IBytes(uint64(mem*bytesPerMegabyte)), humanize.IBytes(uint64(*total.MemoryMB*bytesPerMegabyte)), humanize.IBytes(uint64(disk*bytesPerMegabyte)), humanize.IBytes(uint64(*total.DiskMB*bytesPerMegabyte)), iops, *total.IOPS) return resources } // computeNodeTotalResources returns the total allocatable resources (resources // minus reserved) func computeNodeTotalResources(node *api.Node) api.Resources { total := api.Resources{} r := node.Resources res := node.Reserved if res == nil { res = &api.Resources{} } total.CPU = helper.IntToPtr(*r.CPU - *res.CPU) total.MemoryMB = helper.IntToPtr(*r.MemoryMB - *res.MemoryMB) total.DiskMB = helper.IntToPtr(*r.DiskMB - *res.DiskMB) total.IOPS = helper.IntToPtr(*r.IOPS - *res.IOPS) return total } // getActualResources returns the actual resource usage of the allocations. func getActualResources(client *api.Client, runningAllocs []*api.Allocation, node *api.Node) ([]string, error) { // Compute the total total := computeNodeTotalResources(node) // Get Resources var cpu float64 var mem uint64 for _, alloc := range runningAllocs { // Make the call to the client to get the actual usage. stats, err := client.Allocations().Stats(alloc, nil) if err != nil { return nil, err } cpu += stats.ResourceUsage.CpuStats.TotalTicks mem += stats.ResourceUsage.MemoryStats.RSS } resources := make([]string, 2) resources[0] = "CPU|Memory" resources[1] = fmt.Sprintf("%v/%d MHz|%v/%v", math.Floor(cpu), *total.CPU, humanize.IBytes(mem), humanize.IBytes(uint64(*total.MemoryMB*bytesPerMegabyte))) return resources, nil } // getHostResources returns the actual resource usage of the node. func getHostResources(hostStats *api.HostStats, node *api.Node) ([]string, error) { if hostStats == nil { return nil, fmt.Errorf("actual resource usage not present") } var resources []string // calculate disk usage storageDevice := node.Attributes["unique.storage.volume"] var diskUsed, diskSize uint64 var physical bool for _, disk := range hostStats.DiskStats { if disk.Device == storageDevice { diskUsed = disk.Used diskSize = disk.Size physical = true } } resources = make([]string, 2) resources[0] = "CPU|Memory|Disk" if physical { resources[1] = fmt.Sprintf("%v/%d MHz|%s/%s|%s/%s", math.Floor(hostStats.CPUTicksConsumed), *node.Resources.CPU, humanize.IBytes(hostStats.Memory.Used), humanize.IBytes(hostStats.Memory.Total), humanize.IBytes(diskUsed), humanize.IBytes(diskSize), ) } else { // If non-physical device are used, output device name only, // since nomad doesn't collect the stats data. resources[1] = fmt.Sprintf("%v/%d MHz|%s/%s|(%s)", math.Floor(hostStats.CPUTicksConsumed), *node.Resources.CPU, humanize.IBytes(hostStats.Memory.Used), humanize.IBytes(hostStats.Memory.Total), storageDevice, ) } return resources, nil } // formatNodeStubList is used to return a table format of a list of node stubs. func formatNodeStubList(nodes []*api.NodeListStub, verbose bool) string { // Return error if no nodes are found if len(nodes) == 0 { return "" } // Truncate the id unless full length is requested length := shortId if verbose { length = fullId } // Format the nodes list that matches the prefix so that the user // can create a more specific request out := make([]string, len(nodes)+1) out[0] = "ID|DC|Name|Class|Drain|Eligibility|Status" for i, node := range nodes { out[i+1] = fmt.Sprintf("%s|%s|%s|%s|%v|%s|%s", limit(node.ID, length), node.Datacenter, node.Name, node.NodeClass, node.Drain, node.SchedulingEligibility, node.Status) } return formatList(out) }
{ if driverName == "" { return subsystem } // If this event is for a driver, append the driver name to make the message // clearer return fmt.Sprintf("Driver: %s", driverName) }
vtag.rs
//! This module contains the implementation of a virtual element node `VTag`. use super::{ AttrValue, Attributes, Key, Listener, Listeners, Patch, PositionalAttr, VDiff, VList, VNode, }; use crate::html::{AnyScope, IntoOptPropValue, IntoPropValue, NodeRef}; use crate::utils::document; use gloo::events::EventListener; use log::warn; use std::borrow::Cow; use std::cmp::PartialEq; use std::ops::Deref; use std::rc::Rc; use wasm_bindgen::JsCast; use web_sys::{ Element, HtmlButtonElement, HtmlInputElement as InputElement, HtmlTextAreaElement as TextAreaElement, }; /// SVG namespace string used for creating svg elements pub const SVG_NAMESPACE: &str = "http://www.w3.org/2000/svg"; /// Default namespace for html elements pub const HTML_NAMESPACE: &str = "http://www.w3.org/1999/xhtml"; /// Used to improve performance of runtime element checks #[derive(Clone, Copy, Debug, PartialEq)] enum ElementType { Input, Textarea, Button, Other, } impl ElementType { fn from_tag(tag: &str) -> Self { match tag.to_ascii_lowercase().as_str() { "input" => Self::Input, "textarea" => Self::Textarea, "button" => Self::Button, _ => Self::Other, } } } /// A type for a virtual /// [Element](https://developer.mozilla.org/en-US/docs/Web/API/Element) /// representation. #[derive(Debug)] pub struct VTag { /// A tag of the element. tag: Cow<'static, str>, /// Type of element. element_type: ElementType, /// A reference to the DOM `Element`. pub reference: Option<Element>, /// List of attached listeners. pub listeners: Listeners, /// List of attributes. pub attributes: Attributes, /// List of children nodes pub children: VList, /// Contains a value of an /// [InputElement](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input). pub value: Option<AttrValue>, /// Contains /// [kind](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input#Form_%3Cinput%3E_types) /// value of an `InputElement`. pub kind: Option<AttrValue>, /// Represents `checked` attribute of /// [input](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input#attr-checked). /// It exists to override standard behavior of `checked` attribute, because /// in original HTML it sets `defaultChecked` value of `InputElement`, but for reactive /// frameworks it's more useful to control `checked` value of an `InputElement`. pub checked: bool, /// A node reference used for DOM access in Component lifecycle methods pub node_ref: NodeRef, /// Keeps handler for attached listeners to have an opportunity to drop them later. captured: Vec<EventListener>, pub key: Option<Key>, } impl Clone for VTag { fn clone(&self) -> Self { VTag { tag: self.tag.clone(), element_type: self.element_type, reference: None, listeners: self.listeners.clone(), attributes: self.attributes.clone(), children: self.children.clone(), value: self.value.clone(), kind: self.kind.clone(), checked: self.checked, node_ref: self.node_ref.clone(), key: self.key.clone(), captured: Vec::new(), } } } impl VTag { /// Creates a new `VTag` instance with `tag` name (cannot be changed later in DOM). pub fn new(tag: impl Into<Cow<'static, str>>) -> Self { let tag = tag.into(); let element_type = ElementType::from_tag(&tag); VTag { tag, element_type, reference: None, attributes: Attributes::new(), listeners: Vec::new(), captured: Vec::new(), children: VList::new(), node_ref: NodeRef::default(), key: None, value: None, kind: None, // In HTML node `checked` attribute sets `defaultChecked` parameter, // but we use own field to control real `checked` parameter checked: false, } } /// Returns tag of an `Element`. In HTML tags are always uppercase. pub fn tag(&self) -> &str { &self.tag } /// Add `VNode` child. pub fn add_child(&mut self, child: VNode) { self.children.add_child(child); } /// Add multiple `VNode` children. pub fn add_children(&mut self, children: impl IntoIterator<Item = VNode>) { self.children.add_children(children); } /// Sets `value` for an /// [InputElement](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input). pub fn set_value(&mut self, value: impl IntoOptPropValue<AttrValue>) { self.value = value.into_opt_prop_value(); } /// Sets `kind` property of an /// [InputElement](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input). /// Same as set `type` attribute. pub fn set_kind(&mut self, value: impl IntoOptPropValue<AttrValue>) { self.kind = value.into_opt_prop_value(); } #[doc(hidden)] pub fn __macro_set_key(&mut self, value: impl Into<Key>) { self.key = Some(value.into()) } /// Sets `checked` property of an /// [InputElement](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input). /// (Not a value of node's attribute). pub fn set_checked(&mut self, value: bool) { self.checked = value; } #[doc(hidden)] pub fn __macro_set_node_ref(&mut self, value: impl IntoPropValue<NodeRef>) { self.node_ref = value.into_prop_value() } /// Adds a key-value pair to attributes /// /// Not every attribute works when it set as an attribute. We use workarounds for: /// `type/kind`, `value` and `checked`. pub fn add_attribute(&mut self, key: &'static str, value: impl Into<AttrValue>) { self.attributes .get_mut_index_map() .insert(key, value.into()); } /// Sets attributes to a virtual node. /// /// Not every attribute works when it set as an attribute. We use workarounds for: /// `type/kind`, `value` and `checked`. pub fn set_attributes(&mut self, attrs: impl Into<Attributes>) { self.attributes = attrs.into(); } #[doc(hidden)] pub fn __macro_push_attr(&mut self, attr: PositionalAttr) { match &mut self.attributes { Attributes::Vec(attrs) => attrs.push(attr), _ => unreachable!("the macro always uses positional attributes"), } } /// Adds new listener to the node. /// It's boxed because we want to keep it in a single list. /// Later `Listener::attach` will attach an actual listener to a DOM node. pub fn add_listener(&mut self, listener: Rc<dyn Listener>) { self.listeners.push(listener); } /// Adds new listeners to the node. /// They are boxed because we want to keep them in a single list. /// Later `Listener::attach` will attach an actual listener to a DOM node. pub fn add_listeners(&mut self, listeners: Listeners) { self.listeners.extend(listeners); } #[doc(hidden)] pub fn __macro_set_listeners( &mut self, listeners: impl IntoIterator<Item = Option<Rc<dyn Listener>>>, ) { self.listeners = listeners.into_iter().flatten().collect(); } /// Every render it removes all listeners and attach it back later /// TODO(#943): Compare references of handler to do listeners update better fn recreate_listeners(&mut self, ancestor: &mut Option<Box<Self>>) { if let Some(ancestor) = ancestor.as_mut() { ancestor.captured.clear(); } let element = self.reference.clone().expect("element expected"); for listener in self.listeners.drain(..) { let handle = listener.attach(&element); self.captured.push(handle); } } fn refresh_value(&mut self) { // Don't refresh value if the element is not controlled if self.value.is_none() { return; } if let Some(element) = self.reference.as_ref() { if self.element_type == ElementType::Input { let input_el = element.dyn_ref::<InputElement>(); if let Some(input) = input_el { let current_value = input.value(); self.set_value(Cow::Owned(current_value)) } } else if self.element_type == ElementType::Textarea { let textarea_el = element.dyn_ref::<TextAreaElement>(); if let Some(tae) = textarea_el { let value = tae.value(); self.set_value(Cow::Owned(value)); } } } } /// Compares new kind with ancestor and produces a patch to apply, if any fn diff_kind<'a>(&'a self, ancestor: &'a Option<Box<Self>>) -> Option<Patch<&'a str, ()>> { match ( self.kind.as_ref(), ancestor.as_ref().and_then(|anc| anc.kind.as_ref()), ) { (Some(ref left), Some(ref right)) => { if left != right { Some(Patch::Replace(&**left, ())) } else { None } } (Some(ref left), None) => Some(Patch::Add(&**left, ())), (None, Some(right)) => Some(Patch::Remove(&**right)), (None, None) => None, } } /// Compares new value with ancestor and produces a patch to apply, if any fn diff_value<'a>(&'a self, ancestor: &'a Option<Box<Self>>) -> Option<Patch<&'a str, ()>> { match ( self.value.as_ref(), ancestor.as_ref().and_then(|anc| anc.value.as_ref()), ) { (Some(ref left), Some(ref right)) => { if left != right { Some(Patch::Replace(&**left, ())) } else { None } } (Some(ref left), None) => Some(Patch::Add(&**left, ())), (None, Some(right)) => Some(Patch::Remove(&**right)), (None, None) => None, } } fn apply_diffs(&mut self, ancestor: &mut Option<Box<Self>>) { let changes = if let Some(old_attributes) = ancestor.as_mut().map(|a| &mut a.attributes) { Attributes::diff(&self.attributes, old_attributes) } else { self.attributes .iter() .map(|(k, v)| Patch::Add(k, v)) .collect() }; let element = self.reference.as_ref().expect("element expected"); for change in changes { match change { Patch::Add(key, value) | Patch::Replace(key, value) => { element .set_attribute(&key, &value) .expect("invalid attribute key"); } Patch::Remove(key) => { element .remove_attribute(&key) .expect("could not remove attribute"); } } } if self.element_type == ElementType::Button { if let Some(button) = element.dyn_ref::<HtmlButtonElement>() { if let Some(change) = self.diff_kind(ancestor) { let kind = match change { Patch::Add(kind, _) | Patch::Replace(kind, _) => kind, Patch::Remove(_) => "", }; button.set_type(kind); } } } // `input` element has extra parameters to control // I override behavior of attributes to make it more clear // and useful in templates. For example I interpret `checked` // attribute as `checked` parameter, not `defaultChecked` as browsers do if self.element_type == ElementType::Input { if let Some(input) = element.dyn_ref::<InputElement>() { if let Some(change) = self.diff_kind(ancestor) { let kind = match change { Patch::Add(kind, _) | Patch::Replace(kind, _) => kind, Patch::Remove(_) => "", }; input.set_type(kind) } if let Some(change) = self.diff_value(ancestor) { let raw_value = match change { Patch::Add(kind, _) | Patch::Replace(kind, _) => kind, Patch::Remove(_) => "", }; input.set_value(raw_value) } // IMPORTANT! This parameter has to be set every time // to prevent strange behaviour in the browser when the DOM changes set_checked(&input, self.checked); } } else if self.element_type == ElementType::Textarea { if let Some(tae) = { element.dyn_ref::<TextAreaElement>() } { if let Some(change) = self.diff_value(ancestor) { let value = match change { Patch::Add(kind, _) | Patch::Replace(kind, _) => kind, Patch::Remove(_) => "", }; tae.set_value(value); } } } } fn create_element(&self, parent: &Element) -> Element { let tag = self.tag(); if tag == "svg" || parent .namespace_uri() .map_or(false, |ns| ns == SVG_NAMESPACE) { let namespace = Some(SVG_NAMESPACE); document() .create_element_ns(namespace, tag) .expect("can't create namespaced element for vtag") } else { document() .create_element(tag) .expect("can't create element for vtag") } } } impl VDiff for VTag { /// Remove VTag from parent. fn detach(&mut self, parent: &Element) { let node = self .reference .take() .expect("tried to remove not rendered VTag from DOM"); // recursively remove its children self.children.detach(&node); if parent.remove_child(&node).is_err() { warn!("Node not found to remove VTag"); } self.node_ref.set(None); } /// Renders virtual tag over DOM `Element`, but it also compares this with an ancestor `VTag` /// to compute what to patch in the actual DOM nodes. fn apply( &mut self, parent_scope: &AnyScope, parent: &Element, next_sibling: NodeRef, ancestor: Option<VNode>, ) -> NodeRef { let mut ancestor_tag = ancestor.and_then(|mut ancestor| { match ancestor { // If the ancestor is a tag of the same type, don't recreate, keep the // old tag and update its attributes and children. VNode::VTag(vtag) if self.tag() == vtag.tag() && self.key == vtag.key => Some(vtag), _ => { let element = self.create_element(parent); super::insert_node(&element, parent, Some(ancestor.first_node())); self.reference = Some(element); ancestor.detach(parent); None } } }); if let Some(ref mut ancestor_tag) = &mut ancestor_tag { // Refresh the current value to later compare it against the desired value // since it may have been changed since we last set it. ancestor_tag.refresh_value(); // Preserve the reference that already exists. self.reference = ancestor_tag.reference.take(); } else if self.reference.is_none() { let element = self.create_element(parent); super::insert_node(&element, parent, next_sibling.get()); self.reference = Some(element); } self.apply_diffs(&mut ancestor_tag); self.recreate_listeners(&mut ancestor_tag); // Process children let element = self.reference.as_ref().expect("Reference should be set"); if !self.children.is_empty() { self.children.apply( parent_scope, element, NodeRef::default(), ancestor_tag.map(|a| a.children.into()), ); } else if let Some(mut ancestor_tag) = ancestor_tag { ancestor_tag.children.detach(element); } let node = element.deref(); self.node_ref.set(Some(node.clone())); self.node_ref.clone() } } /// Set `checked` value for the `InputElement`. fn set_checked(input: &InputElement, value: bool) { input.set_checked(value); } impl PartialEq for VTag { fn eq(&self, other: &VTag) -> bool { self.tag == other.tag && self.value == other.value && self.kind == other.kind && self.checked == other.checked && self.listeners.len() == other.listeners.len() && self .listeners .iter() .map(|l| l.kind()) .eq(other.listeners.iter().map(|l| l.kind())) && self.attributes == other.attributes && self.children == other.children } } #[cfg(test)] mod tests { use super::*; use crate::html; #[cfg(feature = "wasm_test")] use wasm_bindgen_test::{wasm_bindgen_test as test, wasm_bindgen_test_configure}; #[cfg(feature = "wasm_test")] wasm_bindgen_test_configure!(run_in_browser); fn test_scope() -> AnyScope { AnyScope::test() } #[test] fn it_compares_tags() { let a = html! { <div></div> }; let b = html! { <div></div> }; let c = html! { <p></p> }; assert_eq!(a, b); assert_ne!(a, c); } #[test] fn it_compares_text() { let a = html! { <div>{ "correct" }</div> }; let b = html! { <div>{ "correct" }</div> }; let c = html! { <div>{ "incorrect" }</div> }; assert_eq!(a, b); assert_ne!(a, c); } #[test] fn it_compares_attributes() { let a = html! { <div a="test"></div> }; let b = html! { <div a="test"></div> }; let c = html! { <div a="fail"></div> }; assert_eq!(a, b); assert_ne!(a, c); } #[test] fn it_compares_children() { let a = html! { <div> <p></p> </div> }; let b = html! { <div> <p></p> </div> }; let c = html! { <div> <span></span> </div> }; assert_eq!(a, b); assert_ne!(a, c); } #[test] fn it_compares_classes() { let a = html! { <div class="test"></div> }; let b = html! { <div class="test"></div> }; let c = html! { <div class="fail"></div> }; let d = html! { <div class=format!("fail{}", "")></div> }; assert_eq!(a, b); assert_ne!(a, c); assert_eq!(c, d); } fn assert_vtag(node: &mut VNode) -> &mut VTag { if let VNode::VTag(vtag) = node { return vtag; } panic!("should be vtag"); } fn assert_namespace(vtag: &VTag, namespace: &'static str) { assert_eq!( vtag.reference.as_ref().unwrap().namespace_uri().unwrap(), namespace ); } #[test] fn supports_svg() { let document = web_sys::window().unwrap().document().unwrap(); let scope = test_scope(); let div_el = document.create_element("div").unwrap(); let namespace = SVG_NAMESPACE; let namespace = Some(namespace); let svg_el = document.create_element_ns(namespace, "svg").unwrap(); let mut g_node = html! { <g class="segment"></g> }; let path_node = html! { <path></path> }; let mut svg_node = html! { <svg>{path_node}</svg> }; let svg_tag = assert_vtag(&mut svg_node); svg_tag.apply(&scope, &div_el, NodeRef::default(), None); assert_namespace(svg_tag, SVG_NAMESPACE); let path_tag = assert_vtag(svg_tag.children.get_mut(0).unwrap()); assert_namespace(path_tag, SVG_NAMESPACE); let g_tag = assert_vtag(&mut g_node); g_tag.apply(&scope, &div_el, NodeRef::default(), None); assert_namespace(g_tag, HTML_NAMESPACE); g_tag.reference = None; g_tag.apply(&scope, &svg_el, NodeRef::default(), None); assert_namespace(g_tag, SVG_NAMESPACE); } #[test] fn it_compares_values() { let a = html! { <input value="test"/> }; let b = html! { <input value="test"/> }; let c = html! { <input value="fail"/> }; assert_eq!(a, b); assert_ne!(a, c); } #[test] fn it_compares_kinds() { let a = html! { <input type="text"/> };
<input type="text"/> }; let c = html! { <input type="hidden"/> }; assert_eq!(a, b); assert_ne!(a, c); } #[test] fn it_compares_checked() { let a = html! { <input type="checkbox" checked=false /> }; let b = html! { <input type="checkbox" checked=false /> }; let c = html! { <input type="checkbox" checked=true /> }; assert_eq!(a, b); assert_ne!(a, c); } #[test] fn it_allows_aria_attributes() { let a = html! { <p aria-controls="it-works"> <a class="btn btn-primary" data-toggle="collapse" href="#collapseExample" role="button" aria-expanded="false" aria-controls="collapseExample"> { "Link with href" } </a> <button class="btn btn-primary" type="button" data-toggle="collapse" data-target="#collapseExample" aria-expanded="false" aria-controls="collapseExample"> { "Button with data-target" } </button> <div own-attribute-with-multiple-parts="works" /> </p> }; if let VNode::VTag(vtag) = a { assert_eq!( vtag.attributes .iter() .find(|(k, _)| k == &"aria-controls") .map(|(_, v)| v), Some("it-works") ); } else { panic!("vtag expected"); } } #[test] fn it_does_not_set_missing_class_name() { let scope = test_scope(); let parent = document().create_element("div").unwrap(); document().body().unwrap().append_child(&parent).unwrap(); let mut elem = html! { <div></div> }; elem.apply(&scope, &parent, NodeRef::default(), None); let vtag = assert_vtag(&mut elem); // test if the className has not been set assert!(!vtag.reference.as_ref().unwrap().has_attribute("class")); } #[test] fn it_sets_class_name() { let scope = test_scope(); let parent = document().create_element("div").unwrap(); document().body().unwrap().append_child(&parent).unwrap(); let mut elem = html! { <div class="ferris the crab"></div> }; elem.apply(&scope, &parent, NodeRef::default(), None); let vtag = assert_vtag(&mut elem); // test if the className has been set assert!(vtag.reference.as_ref().unwrap().has_attribute("class")); } #[test] fn controlled_input_synced() { let scope = test_scope(); let parent = document().create_element("div").unwrap(); document().body().unwrap().append_child(&parent).unwrap(); let expected = "not_changed_value"; // Initial state let mut elem = html! { <input value=expected /> }; elem.apply(&scope, &parent, NodeRef::default(), None); let vtag = if let VNode::VTag(vtag) = elem { vtag } else { panic!("should be vtag") }; // User input let input_ref = vtag.reference.as_ref().unwrap(); let input = input_ref.dyn_ref::<InputElement>(); input.unwrap().set_value("User input"); let ancestor = vtag; let mut elem = html! { <input value=expected /> }; let vtag = assert_vtag(&mut elem); // Sync happens here vtag.apply( &scope, &parent, NodeRef::default(), Some(VNode::VTag(ancestor)), ); // Get new current value of the input element let input_ref = vtag.reference.as_ref().unwrap(); let input = input_ref.dyn_ref::<InputElement>().unwrap(); let current_value = input.value(); // check whether not changed virtual dom value has been set to the input element assert_eq!(current_value, expected); } #[test] fn uncontrolled_input_unsynced() { let scope = test_scope(); let parent = document().create_element("div").unwrap(); document().body().unwrap().append_child(&parent).unwrap(); // Initial state let mut elem = html! { <input /> }; elem.apply(&scope, &parent, NodeRef::default(), None); let vtag = if let VNode::VTag(vtag) = elem { vtag } else { panic!("should be vtag") }; // User input let input_ref = vtag.reference.as_ref().unwrap(); let input = input_ref.dyn_ref::<InputElement>(); input.unwrap().set_value("User input"); let ancestor = vtag; let mut elem = html! { <input /> }; let vtag = assert_vtag(&mut elem); // Value should not be refreshed vtag.apply( &scope, &parent, NodeRef::default(), Some(VNode::VTag(ancestor)), ); // Get user value of the input element let input_ref = vtag.reference.as_ref().unwrap(); let input = input_ref.dyn_ref::<InputElement>().unwrap(); let current_value = input.value(); // check whether not changed virtual dom value has been set to the input element assert_eq!(current_value, "User input"); } #[test] fn dynamic_tags_work() { let scope = test_scope(); let parent = document().create_element("div").unwrap(); document().body().unwrap().append_child(&parent).unwrap(); let mut elem = html! { <@{ let mut builder = String::new(); builder.push('a'); builder }/> }; elem.apply(&scope, &parent, NodeRef::default(), None); let vtag = assert_vtag(&mut elem); // make sure the new tag name is used internally assert_eq!(vtag.tag(), "a"); // Element.tagName is always in the canonical upper-case form. assert_eq!(vtag.reference.as_ref().unwrap().tag_name(), "A"); } #[test] fn dynamic_tags_handle_value_attribute() { let mut div_el = html! { <@{"div"} value="Hello"/> }; let div_vtag = assert_vtag(&mut div_el); assert!(div_vtag.value.is_none()); let v: Option<&str> = div_vtag .attributes .iter() .find(|(k, _)| k == &"value") .map(|(_, v)| AsRef::as_ref(v)); assert_eq!(v, Some("Hello")); let mut input_el = html! { <@{"input"} value="World"/> }; let input_vtag = assert_vtag(&mut input_el); assert_eq!(input_vtag.value, Some(Cow::Borrowed("World"))); assert!(!input_vtag.attributes.iter().any(|(k, _)| k == "value")); } #[test] fn dynamic_tags_handle_weird_capitalization() { let mut el = html! { <@{"tExTAREa"}/> }; let vtag = assert_vtag(&mut el); assert_eq!(vtag.tag(), "textarea"); } #[test] fn reset_node_ref() { let scope = test_scope(); let parent = document().create_element("div").unwrap(); document().body().unwrap().append_child(&parent).unwrap(); let node_ref = NodeRef::default(); let mut elem: VNode = html! { <div ref=node_ref.clone()></div> }; assert_vtag(&mut elem); elem.apply(&scope, &parent, NodeRef::default(), None); let parent_node = parent.deref(); assert_eq!(node_ref.get(), parent_node.first_child()); elem.detach(&parent); assert!(node_ref.get().is_none()); } /// Returns the class attribute as str reference, or "" if the attribute is not set. fn get_class_str(vtag: &VTag) -> &str { vtag.attributes .iter() .find(|(k, _)| k == &"class") .map(|(_, v)| AsRef::as_ref(v)) .unwrap_or("") } #[test] fn old_class_syntax_is_still_supported() { let a_classes = "class-1 class-2".to_string(); #[allow(deprecated)] let a = html! { <div class=("class-1", a_classes)></div> }; if let VNode::VTag(vtag) = a { assert!(get_class_str(&vtag).contains("class-1")); assert!(get_class_str(&vtag).contains("class-2")); assert!(!get_class_str(&vtag).contains("class-3")); } else { panic!("vtag expected"); } } } #[cfg(test)] mod layout_tests { extern crate self as yew; use crate::html; use crate::virtual_dom::layout_tests::{diff_layouts, TestLayout}; #[cfg(feature = "wasm_test")] use wasm_bindgen_test::{wasm_bindgen_test as test, wasm_bindgen_test_configure}; #[cfg(feature = "wasm_test")] wasm_bindgen_test_configure!(run_in_browser); #[test] fn diff() { let layout1 = TestLayout { name: "1", node: html! { <ul> <li> {"a"} </li> <li> {"b"} </li> </ul> }, expected: "<ul><li>a</li><li>b</li></ul>", }; let layout2 = TestLayout { name: "2", node: html! { <ul> <li> {"a"} </li> <li> {"b"} </li> <li> {"d"} </li> </ul> }, expected: "<ul><li>a</li><li>b</li><li>d</li></ul>", }; let layout3 = TestLayout { name: "3", node: html! { <ul> <li> {"a"} </li> <li> {"b"} </li> <li> {"c"} </li> <li> {"d"} </li> </ul> }, expected: "<ul><li>a</li><li>b</li><li>c</li><li>d</li></ul>", }; let layout4 = TestLayout { name: "4", node: html! { <ul> <li> <> {"a"} </> </li> <li> {"b"} <li> {"c"} </li> <li> {"d"} </li> </li> </ul> }, expected: "<ul><li>a</li><li>b<li>c</li><li>d</li></li></ul>", }; diff_layouts(vec![layout1, layout2, layout3, layout4]); } }
let b = html! {
run_benchmark.py
"""Functions to create, run and visualize optimization benchmarks. TO-DO: - Add other benchmark sets: - finish medium scale problems from https://arxiv.org/pdf/1710.11005.pdf, Page 34. - add scalar problems from https://github.com/AxelThevenot - Add option for deterministic noise or wiggle. """ from pathlib import Path import numpy as np from estimagic import batch_evaluators from estimagic.logging.read_log import read_optimization_histories from estimagic.optimization.optimize import minimize def run_benchmark( problems, optimize_options, logging_directory, *, batch_evaluator="joblib", n_cores=1, error_handling="continue", fast_logging=True, seed=None, ):
def _process_optimize_options(raw_options): if not isinstance(raw_options, dict): dict_options = {} for option in raw_options: if isinstance(option, str): dict_options[option] = option else: dict_options[option.__name__] = option else: dict_options = raw_options out_options = {} for name, option in dict_options.items(): if not isinstance(option, dict): option = {"algorithm": option} if "log_options" in option: raise ValueError( "Log options cannot be specified as part of optimize_options. Logging " "behavior is configured by the run_benchmark function." ) out_options[name] = option return out_options
"""Run problems with different optimize options. Args: problems (dict): Nested dictionary with benchmark problems of the structure: {"name": {"inputs": {...}, "solution": {...}, "info": {...}}} where "inputs" are keyword arguments for ``minimize`` such as the criterion function and start parameters. "solution" contains the entries "params" and "value" and "info" might contain information about the test problem. optimize_options (list or dict): Either a list of algorithms or a Nested dictionary that maps a name for optimizer settings (e.g. ``"lbfgsb_strict_criterion"``) to a dictionary of keyword arguments for arguments for ``minimize`` (e.g. ``{"algorithm": "scipy_lbfgsb", "algo_options": {"convergence.relative_criterion_tolerance": 1e-12}}``). Alternatively, the values can just be an algorithm which is then benchmarked at default settings. batch_evaluator (str or callable): See :ref:`batch_evaluators`. logging_directory (pathlib.Path): Directory in which the log databases are saved. n_cores (int): Number of optimizations that is run in parallel. Note that in addition to that an optimizer might parallelize. error_handling (str): One of "raise", "continue". fast_logging (bool): Whether the slightly unsafe but much faster database configuration is chosen. Returns: dict: Nested Dictionary with information on the benchmark run. The outer keys are tuples where the first entry is the name of the problem and the second the name of the optimize options. The values are dicts with the entries: "runtime", "params_history", "criterion_history", "solution" """ np.random.seed(seed) logging_directory = Path(logging_directory) logging_directory.mkdir(parents=True, exist_ok=True) if isinstance(batch_evaluator, str): batch_evaluator = getattr( batch_evaluators, f"{batch_evaluator}_batch_evaluator" ) opt_options = _process_optimize_options(optimize_options) log_options = {"fast_logging": fast_logging, "if_table_exists": "replace"} kwargs_list = [] names = [] for prob_name, problem in problems.items(): for option_name, options in opt_options.items(): kwargs = { **options, **problem["inputs"], "logging": logging_directory / f"{prob_name}_{option_name}.db", "log_options": log_options, } kwargs_list.append(kwargs) names.append((prob_name, option_name)) log_paths = [kwargs["logging"] for kwargs in kwargs_list] raw_results = batch_evaluator( func=minimize, arguments=kwargs_list, n_cores=n_cores, error_handling=error_handling, unpack_symbol="**", ) results = {} for name, result, log_path in zip(names, raw_results, log_paths): histories = read_optimization_histories(log_path) stop = histories["metadata"]["timestamps"].max() start = histories["metadata"]["timestamps"].min() runtime = (stop - start).total_seconds() results[name] = { "params_history": histories["params"], "criterion_history": histories["values"], "time_history": histories["metadata"]["timestamps"] - start, "solution": result, "runtime": runtime, } return results
utils.py
import inspect import os from collections import OrderedDict from datetime import datetime from typing import Any, Callable, Type, Generator, Tuple from warnings import warn from django.contrib import admin from django.db import models from django.utils.translation import gettext_lazy as _ from etc.toolbox import import_app_module, import_project_modules from .settings import PREFS_MODULE_NAME from .signals import prefs_save class Frame: """Represents a frame object at a definite level of hierarchy. To be used as context manager: with Frame as f: ... """ def __init__(self, stepback: int = 0): self.depth = stepback def __enter__(self): frame = inspect.currentframe().f_back for __ in range(self.depth): frame = frame.f_back self.frame = frame return self.frame def __exit__(self, exc_type, exc_val, exc_tb): del self.frame class PatchedLocal: """Object of this class temporarily replace all module variables considered preferences. """ def __init__(self, key: str, val: Any): self.key = key self.val = val class Mimic: """Mimics other types by implementation of various special methods. This one is deprecated if favor of setting module proxying (proxy_settings_module()). """ value: Any = None def __call__(self, *args, **kwargs): return self.value def __str__(self): return self.value.__str__() def __bool__(self): return bool(self.value) def __int__(self): return int(self.value) def __float__(self): return float(self.value) def __len__(self): return self.value.__len__() def __contains__(self, item): return self.value.__contains__(item) def __sub__(self, other): return self.value.__sub__(other) def __rsub__(self, other): return self.value.__rsub__(other) def __add__(self, other): return self.value.__add__(other) def __radd__(self, other): return self.value.__radd__(other) def __mul__(self, other): return self.value.__mul__(other) def __rmul__(self, other): return self.value.__rmul__(other) def __lt__(self, other): return self.value.__lt__(other) def __le__(self, other): return self.value.__le__(other) def __gt__(self, other): return self.value.__gt__(other) def __ge__(self, other): return self.value.__ge__(other) def __eq__(self, other): return self.value.__eq__(other) def __ne__(self, other): return self.value.__ne__(other) class PrefProxy(Mimic): """Objects of this class replace app preferences.""" def
( self, name: str, default: Any, category: str = None, field: models.Field = None, verbose_name: str = None, help_text: str = '', static: bool = True, readonly: bool = False ): """ :param name: Preference name. :param default: Default (initial) value. :param category: Category name the preference belongs to. :param field: Django model field to represent this preference. :param verbose_name: Field verbose name. :param help_text: Field help text. :param static: Leave this preference static (do not store in DB). :param readonly: Make this field read only. """ self.name = name self.category = category self.default = default self.static = static self.help_text = help_text if static: readonly = True self.readonly = readonly if verbose_name is None: verbose_name = name.replace('_', ' ').capitalize() self.verbose_name = verbose_name if field is None: self.field = get_field_for_proxy(self) else: self.field = field update_field_from_proxy(self.field, self) @property def value(self) -> Any: if self.static: val = self.default else: try: val = getattr(self, 'db_value') except AttributeError: val = self.default return self.field.to_python(val) def get_value(self) -> Any: warn('Please use .value instead .get_value().', DeprecationWarning, stacklevel=2) return self.value def __repr__(self): return f'{self.name} = {self.value}' def get_field_for_proxy(pref_proxy: PrefProxy) -> models.Field: """Returns a field object instance for a given PrefProxy object. :param pref_proxy: """ field = { bool: models.BooleanField, int: models.IntegerField, float: models.FloatField, datetime: models.DateTimeField, }.get(type(pref_proxy.default), models.TextField)() update_field_from_proxy(field, pref_proxy) return field def update_field_from_proxy(field_obj: models.Field, pref_proxy: PrefProxy): """Updates field object with data from a PrefProxy object. :param field_obj: :param pref_proxy: """ attr_names = ('verbose_name', 'help_text', 'default') for attr_name in attr_names: setattr(field_obj, attr_name, getattr(pref_proxy, attr_name)) def get_pref_model_class(app: str, prefs: dict, get_prefs_func: Callable) -> Type[models.Model]: """Returns preferences model class dynamically crated for a given app or None on conflict.""" module = f'{app}.{PREFS_MODULE_NAME}' model_dict = { '_prefs_app': app, '_get_prefs': staticmethod(get_prefs_func), '__module__': module, 'Meta': type('Meta', (models.options.Options,), { 'verbose_name': _('Preference'), 'verbose_name_plural': _('Preferences'), 'app_label': app, 'managed': False, }) } for field_name, val_proxy in prefs.items(): model_dict[field_name] = val_proxy.field model = type('Preferences', (models.Model,), model_dict) def fake_save_base(self, *args, **kwargs): updated_prefs = { f.name: getattr(self, f.name) for f in self._meta.fields if not isinstance(f, models.fields.AutoField) } app_prefs = self._get_prefs(self._prefs_app) for pref in app_prefs.keys(): if pref in updated_prefs: app_prefs[pref].db_value = updated_prefs[pref] self.pk = self._prefs_app # Make Django 1.7 happy. prefs_save.send(sender=self, app=self._prefs_app, updated_prefs=updated_prefs) return True model.save_base = fake_save_base return model def get_pref_model_admin_class(prefs: dict) -> Type[admin.ModelAdmin]: by_category = OrderedDict() readonly_fields = [] for field_name, val_proxy in prefs.items(): if val_proxy.readonly: readonly_fields.append(field_name) if val_proxy.category not in by_category: by_category[val_proxy.category] = [] by_category[val_proxy.category].append(field_name) cl_model_admin_dict = { 'has_add_permission': lambda *args: False, 'has_delete_permission': lambda *args: False } if readonly_fields: cl_model_admin_dict['readonly_fields'] = readonly_fields fieldsets = [] for category, cat_prefs in by_category.items(): fieldsets.append((category, {'fields': cat_prefs})) if fieldsets: cl_model_admin_dict['fieldsets'] = fieldsets model = type('PreferencesAdmin', (admin.ModelAdmin,), cl_model_admin_dict) model.changelist_view = lambda self, request, **kwargs: self.change_view(request, '', **kwargs) model.get_object = lambda self, *args: ( self.model( **{ field_name: val_proxy.get_value() for field_name, val_proxy in self.model._get_prefs(self.model._prefs_app).items() } ) ) return model def get_frame_locals(stepback: int = 0) -> dict: """Returns locals dictionary from a given frame. :param stepback: """ with Frame(stepback=stepback) as frame: locals_dict = frame.f_locals return locals_dict def traverse_local_prefs(stepback: int = 0) -> Generator[Tuple[str, dict], None, None]: """Generator to walk through variables considered as preferences in locals dict of a given frame. :param stepback: """ locals_dict = get_frame_locals(stepback+1) for k in locals_dict: if not k.startswith('_') and k.upper() == k: yield k, locals_dict def import_module(package: str, module_name: str): """Imports a module from a given package. :param package: :param module_name: """ import_app_module(package, module_name) def import_prefs(): """Imports preferences modules from packages (apps) and project root.""" # settings.py locals if autodiscover_siteprefs() is in urls.py settings_locals = get_frame_locals(3) if 'self' not in settings_locals: # If not SiteprefsConfig.ready() # Try to import project-wide prefs. project_package = settings_locals['__package__'] # Expected project layout introduced in Django 1.4 if not project_package: # Fallback to old layout. project_package = os.path.split(os.path.dirname(settings_locals['__file__']))[-1] import_module(project_package, PREFS_MODULE_NAME) import_project_modules(PREFS_MODULE_NAME)
__init__
sanitize_test.go
/* Copyright IBM Corp. All Rights Reserved. SPDX-License-Identifier: Apache-2.0 */ package crypto_test import ( "crypto/x509" "encoding/base64" "encoding/pem" "testing" "github.com/golang/protobuf/proto" "github.com/hyperledger/fabric-protos-go/msp" "github.com/hyperledger/fabric/common/crypto" "github.com/stretchr/testify/assert" ) const ( highSCACert = `LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUJwRENDQVV1Z0F3SUJBZ0lSQUozQm9QcytuS2J0VnBVVFAzZG5pYWt3Q2dZSUtvWkl6ajBFQXdJd01qRXcKTUM0R0ExVUVCUk1uTWpBNU5qazBNVGN6TnpJek1URXhPRFF3TkRVek5UVTNNelUwTURVNU9EUTVOVEF4TURrMwpNQjRYRFRFNU1UQXlNVEV5TlRrd05Wb1hEVEk1TVRBeE9URXlOVGt3TlZvd01qRXdNQzRHQTFVRUJSTW5NakE1Ck5qazBNVGN6TnpJek1URXhPRFF3TkRVek5UVTNNelUwTURVNU9EUTVOVEF4TURrM01Ga3dFd1lIS29aSXpqMEMKQVFZSUtvWkl6ajBEQVFjRFFnQUVZU1QxTjhHT3h2VGJnQi93eGlZbGJ5UU1rTExCNWtTTmlmSDBXaWJDK3BBbgpvMHFIOUdNWEwxK1B5RGFLUlpNUGRMQ3NCa1o4Z0NHSEJXWjZZM28xaWFOQ01FQXdEZ1lEVlIwUEFRSC9CQVFECkFnR21NQjBHQTFVZEpRUVdNQlFHQ0NzR0FRVUZCd01DQmdnckJnRUZCUWNEQVRBUEJnTlZIUk1CQWY4RUJUQUQKQVFIL01Bb0dDQ3FHU000OUJBTUNBMGNBTUVRQ0lGWkhpZGNLeG9NcDB4RTNuM0lydW5rczlLQUZlaHhlaUt6Rgo4NURHMnRGOEFpQWJkdTFwc2pWK1c0WWpGZ3pyK2N3MUxVYUlFeTVmcGZ4ZTNjU1BtUm9sL0E9PQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg==` highSCert = `LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUJwVENDQVVxZ0F3SUJBZ0lRV29oOFNWZnlhRFZwcDN5TkFROHdWVEFLQmdncWhrak9QUVFEQWpBeU1UQXcKTGdZRFZRUUZFeWN5TURrMk9UUXhOek0zTWpNeE1URTROREEwTlRNMU5UY3pOVFF3TlRrNE5EazFNREV3T1RjdwpIaGNOTVRreE1ESXhNVEkxT1RBMVdoY05Namt4TURFNU1USTFPVEExV2pBeU1UQXdMZ1lEVlFRRkV5Y3hNakF6Ck16a3hPVEk0TWpNd05qZ3hNamswT0RFME5qQTJNREk1TkRNd05Ua3lNVEF6TWpVd1dUQVRCZ2NxaGtqT1BRSUIKQmdncWhrak9QUU1CQndOQ0FBVGs4ci9zZ1BKL2FwL2dZakw2T0dwcWc5TmRtd3dFSlp1OXFaaDAvYXRvbFNsVQp5V3cxUDdRR283Zk5rcVdXSi8xZm5jbUZ4ZTQzOTJEVmNJZERSTENYbzBJd1FEQU9CZ05WSFE4QkFmOEVCQU1DCkJhQXdIUVlEVlIwbEJCWXdGQVlJS3dZQkJRVUhBd0lHQ0NzR0FRVUZCd01CTUE4R0ExVWRFUVFJTUFhSEJIOEEKQUFFd0NnWUlLb1pJemowRUF3SURTUUF3UmdJaEFMdXZBSjlpUWJBVEFHMFRFanlqRmhuY3kwOVczQUpJbm91eQpvVnFZL3owNUFpRUE3QVhETkNLY3c3TU92dm0zTFFrMEJsdkRPSXNkRm5hMG96Rkp4RU0vdWRzPQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg==` lowSCert = `LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUJwVENDQVV1Z0F3SUJBZ0lSQVBGeXYrdzVkNjEybm95M0V5VXBYdHN3Q2dZSUtvWkl6ajBFQXdJd01qRXcKTUM0R0ExVUVCUk1uTWpreU5qTXlNakUzTkRZeU1qQXdOVGswTWpjMU5qSXhOekU0TXpVM01UYzVPVGt6TmpFeQpNQjRYRFRFNU1UQXlNVEV6TURBek5Gb1hEVEk1TVRBeE9URXpNREF6TkZvd01qRXdNQzRHQTFVRUJSTW5Nekl3Ck9UTTVOell4TkRneE9UQXpOamN6TkRRME56azNORGM0Tmprek5UQXhORGt5T1RVMU1Ga3dFd1lIS29aSXpqMEMKQVFZSUtvWkl6ajBEQVFjRFFnQUVhZ0NmSDlIS1ZHMEs3S1BUclBUQVpGMGlHZFNES3E2b3E2cG9KVUI5dFI0ZgpXRDN5cEJQZ0xrSDd6R25yL0wrVERIQnVIZGEwNHROYkVha1BwVzhCdnFOQ01FQXdEZ1lEVlIwUEFRSC9CQVFECkFnV2dNQjBHQTFVZEpRUVdNQlFHQ0NzR0FRVUZCd01DQmdnckJnRUZCUWNEQVRBUEJnTlZIUkVFQ0RBR2h3Ui8KQUFBQk1Bb0dDQ3FHU000OUJBTUNBMGdBTUVVQ0lRQ2xCb2ZiNEZRREs1TDJxdjRWMTdaWHdHVm9LQWxuK1lmMQpReVNGblZIVk1BSWdNNzd4ZVBnQ3BNQ3BsOVFyb2ROQi9TV2tCWlZ4VGdKVlpmeWJBMFR3bGcwPQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg==` ) func TestSanitizeIdentity(t *testing.T)
{ extractCertFromPEM := func(cert []byte) *x509.Certificate { bl, _ := pem.Decode(cert) certificate, err := x509.ParseCertificate(bl.Bytes) assert.NoError(t, err) return certificate } t.Run("lowS stays the same", func(t *testing.T) { cert, err := base64.StdEncoding.DecodeString(lowSCert) assert.NoError(t, err) identity := &msp.SerializedIdentity{ Mspid: "SampleOrg", IdBytes: cert, } identityPreSanitation, err := proto.Marshal(identity) assert.NoError(t, err) identityAfterSanitation, err := crypto.SanitizeIdentity(identityPreSanitation) assert.NoError(t, err) assert.Equal(t, identityPreSanitation, identityAfterSanitation) }) t.Run("highS changes, but is still verifiable under the CA", func(t *testing.T) { cert, err := base64.StdEncoding.DecodeString(highSCert) assert.NoError(t, err) caCert, err := base64.StdEncoding.DecodeString(highSCACert) assert.NoError(t, err) identity := &msp.SerializedIdentity{ Mspid: "SampleOrg", IdBytes: cert, } identityPreSanitation, err := proto.Marshal(identity) assert.NoError(t, err) identityAfterSanitation, err := crypto.SanitizeIdentity(identityPreSanitation) assert.NoError(t, err) assert.NotEqual(t, identityPreSanitation, identityAfterSanitation) err = proto.Unmarshal(identityAfterSanitation, identity) assert.NoError(t, err) certAfterSanitation := identity.IdBytes certPool := x509.NewCertPool() certPool.AppendCertsFromPEM(caCert) _, err = extractCertFromPEM(certAfterSanitation).Verify(x509.VerifyOptions{ Roots: certPool, }) }) }
index.d.ts
import { CSSProperties, FC } from 'react'; import './styles.css'; interface Props {
style?: CSSProperties; value: boolean; disabled?: boolean; color: string; onChange?: (val: boolean) => void; } /** * Switch component. */ export declare const Switch: FC<Props>; export {};
id?: string; className?: string;
player.go
/* Copyright 2019 The Vitess Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package grpcbinlogplayer import ( "context" "google.golang.org/grpc" "vitess.io/vitess/go/netutil" "vitess.io/vitess/go/vt/binlog/binlogplayer" "vitess.io/vitess/go/vt/grpcclient" "flag" binlogdatapb "vitess.io/vitess/go/vt/proto/binlogdata" binlogservicepb "vitess.io/vitess/go/vt/proto/binlogservice" topodatapb "vitess.io/vitess/go/vt/proto/topodata" ) var ( cert = flag.String("binlog_player_grpc_cert", "", "the cert to use to connect") key = flag.String("binlog_player_grpc_key", "", "the key to use to connect") ca = flag.String("binlog_player_grpc_ca", "", "the server ca to use to validate servers when connecting") crl = flag.String("binlog_player_grpc_crl", "", "the server crl to use to validate server certificates when connecting") name = flag.String("binlog_player_grpc_server_name", "", "the server name to use to validate server certificate") ) // client implements a Client over go rpc type client struct { cc *grpc.ClientConn c binlogservicepb.UpdateStreamClient } func (client *client) Dial(tablet *topodatapb.Tablet) error { addr := netutil.JoinHostPort(tablet.Hostname, tablet.PortMap["grpc"]) var err error opt, err := grpcclient.SecureDialOption(*cert, *key, *ca, *crl, *name) if err != nil { return err } client.cc, err = grpcclient.Dial(addr, grpcclient.FailFast(true), opt) if err != nil { return err } client.c = binlogservicepb.NewUpdateStreamClient(client.cc) return nil } func (client *client) Close() { client.cc.Close() } type serveStreamKeyRangeAdapter struct { stream binlogservicepb.UpdateStream_StreamKeyRangeClient } func (s *serveStreamKeyRangeAdapter) Recv() (*binlogdatapb.BinlogTransaction, error) { r, err := s.stream.Recv() if err != nil { return nil, err } return r.BinlogTransaction, nil } func (client *client) StreamKeyRange(ctx context.Context, position string, keyRange *topodatapb.KeyRange, charset *binlogdatapb.Charset) (binlogplayer.BinlogTransactionStream, error) { query := &binlogdatapb.StreamKeyRangeRequest{ Position: position, KeyRange: keyRange, Charset: charset, } stream, err := client.c.StreamKeyRange(ctx, query) if err != nil { return nil, err } return &serveStreamKeyRangeAdapter{stream}, nil } type serveStreamTablesAdapter struct { stream binlogservicepb.UpdateStream_StreamTablesClient } func (s *serveStreamTablesAdapter) Recv() (*binlogdatapb.BinlogTransaction, error) { r, err := s.stream.Recv() if err != nil { return nil, err } return r.BinlogTransaction, nil } func (client *client) StreamTables(ctx context.Context, position string, tables []string, charset *binlogdatapb.Charset) (binlogplayer.BinlogTransactionStream, error) { query := &binlogdatapb.StreamTablesRequest{ Position: position, Tables: tables, Charset: charset, } stream, err := client.c.StreamTables(ctx, query) if err != nil { return nil, err } return &serveStreamTablesAdapter{stream}, nil } // Registration as a factory func init()
{ binlogplayer.RegisterClientFactory("grpc", func() binlogplayer.Client { return &client{} }) }
users.resolver.ts
import { Resolver, Query, Mutation, Args, Parent, ResolveField, } from '@nestjs/graphql'; import { UsersService } from './users.service'; import { User } from './models/user.model'; import { ActivitiesService } from '../activities/activities.service'; import { ActivityModel } from '../activities/models/activity.model'; import { GetUserArgs } from '../activities/dto/get-user-args.dto'; import { CreateUserArgs } from '../activities/dto/create-user.args'; @Resolver(() => User) export class UsersResolver { constructor( private readonly usersService: UsersService,
private readonly activitiesService: ActivitiesService, ) {} @Query(() => User, { name: 'user', nullable: true }) findOne(@Args() args: GetUserArgs) { return this.usersService.findOne(args); } @ResolveField('activities', () => [ActivityModel]) findActivities(@Parent() user: User) { const { id } = user; return this.activitiesService.findByUserId(id); } @Mutation(() => User) createUser(@Args() args: CreateUserArgs): Promise<User> { return this.usersService.create(args); } }
utxo_id_test.go
// (c) 2019-2020, Ava Labs, Inc. All rights reserved. // See the file LICENSE for licensing terms. package avax import ( "testing" "github.com/ava-labs/avalanchego/ids" "github.com/ava-labs/avalanchego/utils/codec" ) func TestUTXOIDVerifyNil(t *testing.T) { utxoID := (*UTXOID)(nil) if err := utxoID.Verify(); err == nil { t.Fatalf("Should have errored due to a nil utxo ID") } }
utxoID := UTXOID{ TxID: ids.ID{ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, }, OutputIndex: 0x20212223, } if err := utxoID.Verify(); err != nil { t.Fatal(err) } bytes, err := c.Marshal(&utxoID) if err != nil { t.Fatal(err) } newUTXOID := UTXOID{} if err := c.Unmarshal(bytes, &newUTXOID); err != nil { t.Fatal(err) } if err := newUTXOID.Verify(); err != nil { t.Fatal(err) } if utxoID.InputID() != newUTXOID.InputID() { t.Fatalf("Parsing returned the wrong UTXO ID") } }
func TestUTXOID(t *testing.T) { c := codec.NewDefault()
mod.rs
use cannyls::device::DeviceHandle; use fibers::sync::mpsc; use futures::{Async, Future, Poll, Stream}; use prometrics::metrics::{Histogram, HistogramBuilder, MetricBuilder}; use raftlog::election::Ballot; use raftlog::log::{LogIndex, LogPosition, LogPrefix, LogSuffix}; use raftlog::{Error, ErrorKind, Result}; use slog::Logger; use std::sync::atomic::{self, AtomicUsize}; use trackable::error::ErrorKindExt; use crate::LocalNodeId; pub use self::ballot::{LoadBallot, SaveBallot}; pub use self::log::{DeleteLog, LoadLog, SaveLog}; pub use self::log_prefix::{LoadLogPrefix, SaveLogPrefix}; pub use self::log_suffix::{LoadLogSuffix, SaveLogSuffix}; mod ballot; mod log; mod log_prefix; mod log_suffix; // ストレージの初期化処理を直列化するためのグローバル変数. // // 初期化時には、スナップ処理や大きなAppendEntriesの処理が入り重いので、 // 並列度を下げるために、これを利用する. // // 最終的にはもう少し上手い仕組みを考えたい. // (個々のRaftノードに独立した仕組みにできるのとベスト) static INITIALIZATION_LOCK: AtomicUsize = AtomicUsize::new(0); fn acquire_initialization_lock() -> bool { INITIALIZATION_LOCK .compare_exchange(0, 1, atomic::Ordering::SeqCst, atomic::Ordering::SeqCst) .is_ok() } fn release_initialization_lock() { INITIALIZATION_LOCK.fetch_sub(1, atomic::Ordering::SeqCst); } /// Raft用の永続ストレージ実装. #[derive(Debug)] pub struct Storage { handle: Handle, // スナップショット以降のログ領域を保持するバッファ. // // これは、読み込み速度向上用に用意されているものであり、 // Raftノードのロード時を除き、末尾部分のログエントリの読み込みは、 // 常にこのバッファ上から行われることになる. // // 反対に書き込みに関しては、常に即座に永続ストレージに即座に // エントリが保存される. // (同時にバッファにも追記が行われるが、エントリがバッファにしか存在しない期間、 // というものは発生しない) log_suffix: LogSuffix, event_rx: mpsc::Receiver<Event>, event_tx: mpsc::Sender<Event>, phase: Phase, metrics: StorageMetrics, } impl Storage { /// 新しい`Storage`インスタンスを生成する. pub fn new( logger: Logger, node_id: LocalNodeId, device: DeviceHandle, metrics: StorageMetrics, ) -> Self { let (event_tx, event_rx) = mpsc::channel(); Storage { handle: Handle { logger, node_id, device, }, log_suffix: LogSuffix::default(), event_rx, event_tx, phase: Phase::Started, metrics, } } /// 永続化されているログを削除する. /// /// 接頭辞部分と接尾部分の両方が削除対象となる. 不正なログが混入した時など異常事態に /// 使うことを想定していて、通常はログを削除する必要はない. pub(crate) fn delete_log(&mut self) -> DeleteLog { DeleteLog::new(&self.handle, self.event_tx.clone(), self.node_id()) } pub(crate) fn logger(&self) -> Logger { self.handle.logger.clone() } pub(crate) fn node_id(&self) -> LocalNodeId { self.handle.node_id } #[cfg(test)] pub(crate) fn handle(&self) -> Handle { self.handle.clone() } pub(crate) fn save_ballot(&mut self, ballot: Ballot) -> ballot::SaveBallot { ballot::SaveBallot::new(self, ballot) } pub(crate) fn load_ballot(&mut self) -> ballot::LoadBallot { ballot::LoadBallot::new(self) } pub(crate) fn load_log(&mut self, start: LogIndex, end: Option<LogIndex>) -> LoadLog { if let Err(e) = track!(self.poll_and_handle_event()) { return LoadLog::new(log::LoadLogInner::Failed(e), self.metrics.clone()); } // XXX: 全体的に`raftlog`の実装内容に依存しており、あまり良くはない let future = if let Some(end) = end { // 明示的に終端が指定されている == 初回ロード(ノード起動)時以降のログ読み込み if start < self.log_suffix.head.index { // バッファ地点以前のエントリが必要 => 存在しないのでスナップショットを返す let future = log_prefix::LoadLogPrefix::new(self); log::LoadLogInner::LoadLogPrefix { next: None, event_tx: None, future, } } else { // バッファ内から取得 let copy_from_buffer = || { track_assert!( start <= self.log_suffix.tail().index, ErrorKind::InvalidInput ); track_assert!(end <= self.log_suffix.tail().index, ErrorKind::InvalidInput); track!(self.log_suffix.slice(start, end)) }; match copy_from_buffer() { Err(e) => log::LoadLogInner::Failed(e), Ok(suffix) => log::LoadLogInner::CopyLogSuffix(suffix), } } } else if start.as_u64() == 0 { // 「終端が未指定」かつ「開始地点が0」は、ノード起動時の最初のログ読み込みを示している // => まずスナップショットのロードを試みる let future = log_prefix::LoadLogPrefix::new(self); // ここでログの前半部分が読み込めなかった場合、ログの接尾部分の開始位置は 0 番目から // 開始しているという暗黙の前提があるため、ログの接尾部分の開始位置が 0 番目の状態で // `LoadLogSuffix` を生成しても接尾部分が正しく読み込める。 let next = Some(log_suffix::LoadLogSuffix::new(self)); log::LoadLogInner::LoadLogPrefix { next, event_tx: Some(self.event_tx.clone()), future, } } else { // 「終端が未指定」かつ「開始地点が0以外」は、 // ノード起動時かつスナップショットロード以降のログ読み込みを示している // => スナップショット以降のログエントリ群を取得する assert_eq!(start, self.log_suffix.head.index); log::LoadLogInner::LoadLogSuffix(log_suffix::LoadLogSuffix::new(self)) }; LoadLog::new(future, self.metrics.clone()) } pub(crate) fn save_log_suffix(&mut self, suffix: &LogSuffix) -> SaveLog { if self.phase != Phase::Initialized { // ログ書き込みが発生する、ということは初期化フェーズは抜けたことを意味する info!(self.handle.logger, "Initialized"); if self.phase == Phase::Initializing { release_initialization_lock(); info!(self.handle.logger, "Initialization lock is released"); } self.phase = Phase::Initialized; } if let Err(e) = track!(self.poll_and_handle_event()) { return SaveLog::new(log::SaveLogInner::Failed(e), self.metrics.clone()); } // ローカルバッファに追記後に、永続化ストレージに保存する. // // `raftlog`から、このメソッドが返した`Future`が完了して初めて、 // エントリ群の追記が完了したものとして認識されるので、 // 先にバッファに追加してしまっても問題は発生しない. let future = if let Err(e) = track!(self.append_to_local_buffer(suffix)) { log_suffix::SaveLogSuffix::failed(self, e) } else { log_suffix::SaveLogSuffix::new(self, suffix) }; SaveLog::new(log::SaveLogInner::Suffix(future), self.metrics.clone()) } pub(crate) fn save_log_prefix(&mut self, prefix: LogPrefix) -> SaveLog { if self.phase != Phase::Initialized { // ログ書き込みが発生する、ということは初期化フェーズは抜けたことを意味する info!(self.handle.logger, "Initialized"); if self.phase == Phase::Initializing { release_initialization_lock(); info!(self.handle.logger, "Initialization lock is released"); } self.phase = Phase::Initialized; } let inner = if let Err(e) = track!(self.poll_and_handle_event()) { log::SaveLogInner::Failed(e) } else { log::SaveLogInner::Prefix(log_prefix::SaveLogPrefix::new(self, prefix)) }; SaveLog::new(inner, self.metrics.clone()) } #[allow(clippy::wrong_self_convention)] pub(crate) fn is_busy(&mut self) -> bool { if self.phase == Phase::Started { if acquire_initialization_lock() { info!(self.handle.logger, "Initialization lock is acquired"); self.phase = Phase::Initializing; false } else { true } } else { false } } fn poll_and_handle_event(&mut self) -> Result<()> { while let Async::Ready(event) = self.event_rx.poll().expect("Never fails") { let event = event.expect("Never fails"); match event { Event::LogPrefixUpdated { new_head } => { track!(self.handle_log_prefix_updated_event(
ded(suffix) => { track!(self.handle_log_suffix_loaded_event(suffix))?; } Event::LogSuffixDeleted => { track!(self.handle_log_suffix_deleted_event())?; } } } Ok(()) } fn handle_log_prefix_updated_event(&mut self, new_head: LogPosition) -> Result<()> { // ログの前半部分が更新されたので、それに合わせてバッファを調整する info!( self.handle.logger, "Event::LogPrefixUpdated: {}", dump!(self.log_suffix.head, new_head) ); if self.log_suffix.head.index < new_head.index { if self.log_suffix.skip_to(new_head.index).is_err() { // バッファがカバーする範囲(i.e., ローカルログの範囲)よりも // 先の地点のスナップショットがインストールされた // => バッファを空にし、先頭地点を設定し直す self.log_suffix.head = new_head; self.log_suffix.entries.clear(); } track_assert_eq!( new_head.index, self.log_suffix.head.index, ErrorKind::InconsistentState ); if new_head.prev_term != self.log_suffix.head.prev_term { self.log_suffix.head.prev_term = new_head.prev_term; self.log_suffix.entries.clear(); } } Ok(()) } #[allow(clippy::unnecessary_wraps)] fn handle_log_suffix_loaded_event(&mut self, suffix: LogSuffix) -> Result<()> { // ログの接尾部分がストレージから読み込まれたので、バッファに反映する info!( self.handle.logger, "Event::LogSuffixLoaded: {}", dump!(suffix.head, suffix.entries.len()) ); self.log_suffix = suffix; Ok(()) } #[allow(clippy::unnecessary_wraps)] fn handle_log_suffix_deleted_event(&mut self) -> Result<()> { // ログの接尾部分がストレージから削除されたので、バッファに反映する info!( self.handle.logger, "Event::LogSuffixDeleted: {}", dump!(self.log_suffix.head) ); self.log_suffix = Default::default(); Ok(()) } fn append_to_local_buffer(&mut self, suffix: &LogSuffix) -> Result<()> { // ローカルログと`suffix`の領域に重複部分があるかをチェック // (未コミット分がロールバックされる可能性もあるので、 // 必ずしも`suffix`の先端が、ローカルログの末端と一致する必要はない) let entries_offset = if self.log_suffix.head.index <= suffix.head.index { 0 } else { // スナップショットのインストールタイミング次第で、こちらに入ることがある self.log_suffix.head.index - suffix.head.index }; track_assert!( suffix.head.index <= self.log_suffix.tail().index, ErrorKind::InconsistentState, "suffix.start={:?}, self.end={:?}", suffix.head.index, self.log_suffix.tail().index ); // 整合性(prev_termの一致)チェック let offset = suffix.head.index + entries_offset - self.log_suffix.head.index; let prev_term = if offset == 0 { self.log_suffix.head.prev_term } else { self.log_suffix.entries[offset - 1].term() }; track_assert_eq!( suffix.positions().nth(entries_offset).map(|p| p.prev_term), Some(prev_term), ErrorKind::InconsistentState, "suffix.start={:?}, self.start={:?}", suffix.positions().nth(entries_offset), self.log_suffix.head ); // 末尾の余剰領域を削除(ロールバック)した上で、追記する self.log_suffix.entries.truncate(offset); self.log_suffix .entries .extend(suffix.entries.iter().skip(entries_offset).cloned()); Ok(()) } } #[derive(Debug, Clone)] pub(crate) struct Handle { pub logger: Logger, pub node_id: LocalNodeId, pub device: DeviceHandle, } #[derive(Debug)] #[allow(clippy::enum_variant_names)] pub(crate) enum Event { LogPrefixUpdated { new_head: LogPosition }, LogSuffixLoaded(LogSuffix), LogSuffixDeleted, } type BoxFuture<T> = Box<dyn Future<Item = T, Error = Error> + Send + 'static>; fn into_box_future<F>(future: F) -> BoxFuture<F::Item> where F: Future<Error = cannyls::Error> + Send + 'static, { let future = future.map_err(|e| match *e.kind() { cannyls::ErrorKind::DeviceBusy => ErrorKind::Busy.takes_over(e).into(), cannyls::ErrorKind::InvalidInput => ErrorKind::InvalidInput.takes_over(e).into(), _ => ErrorKind::Other.takes_over(e).into(), }); Box::new(future) } #[derive(Debug, PartialEq, Eq)] enum Phase { Started, Initializing, Initialized, } /// Metrics for `storage`. #[derive(Debug, Clone)] pub struct StorageMetrics { pub(crate) load_log_duration_seconds: Histogram, pub(crate) save_log_duration_seconds: Histogram, pub(crate) load_log_prefix_duration_seconds: Histogram, pub(crate) save_log_prefix_duration_seconds: Histogram, pub(crate) load_log_suffix_duration_seconds: Histogram, pub(crate) save_log_suffix_duration_seconds: Histogram, pub(crate) load_ballot_duration_seconds: Histogram, pub(crate) save_ballot_duration_seconds: Histogram, } impl StorageMetrics { /// Makes a new `StorageMetrics` instance. pub fn new() -> Self { let mut builder = MetricBuilder::new(); builder.namespace("frugalos_raft").subsystem("storage"); let load_log_duration_seconds = make_histogram( builder .histogram("load_log_duration_seconds") .help("Log loading duration"), ); let save_log_duration_seconds = make_histogram( builder .histogram("save_log_duration_seconds") .help("Log saving duration"), ); let load_log_prefix_duration_seconds = make_histogram( builder .histogram("load_log_prefix_duration_seconds") .help("LogPrefix loading duration"), ); let save_log_prefix_duration_seconds = make_histogram( builder .histogram("save_log_prefix_duration_seconds") .help("LogPrefix saving duration"), ); let load_log_suffix_duration_seconds = make_histogram( builder .histogram("load_log_suffix_duration_seconds") .help("LogPrefix loading duration"), ); let save_log_suffix_duration_seconds = make_histogram( builder .histogram("save_log_suffix_duration_seconds") .help("LogPrefix saving duration"), ); let load_ballot_duration_seconds = make_histogram( builder .histogram("load_ballot_duration_seconds") .help("Ballot loading duration"), ); let save_ballot_duration_seconds = make_histogram( builder .histogram("save_ballot_duration_seconds") .help("Ballot saving duration"), ); Self { load_log_duration_seconds, save_log_duration_seconds, load_log_prefix_duration_seconds, save_log_prefix_duration_seconds, load_log_suffix_duration_seconds, save_log_suffix_duration_seconds, load_ballot_duration_seconds, save_ballot_duration_seconds, } } } impl Default for StorageMetrics { fn default() -> Self { Self::new() } } fn make_histogram(builder: &mut HistogramBuilder) -> Histogram { builder .bucket(0.0001) .bucket(0.0005) .bucket(0.001) .bucket(0.005) .bucket(0.01) .bucket(0.05) .bucket(0.1) .bucket(0.5) .bucket(1.0) .bucket(5.0) .bucket(10.0) .bucket(50.0) .finish() .expect("Never fails") } /// ログの接頭辞部分と接尾部分を削除する。 pub enum ClearLog { /// ログを削除する。 DeleteLog(DeleteLog), /// ログの削除をスキップする。 Skip, } impl ClearLog { /// ログを消去するためのインスタンスを生成する。 /// /// ログ消去後は新たに `Storage` を生成して状態を初期化するのが安全で望ましいため、 /// 古い `Storage` の所有権を奪う。 pub fn new(mut storage: Storage) -> Self { let future = storage.delete_log(); ClearLog::DeleteLog(future) } /// ログの削除をスキップする。 pub fn skip() -> Self { ClearLog::Skip } } impl Future for ClearLog { type Item = (); type Error = Error; fn poll(&mut self) -> Poll<Self::Item, Self::Error> { match self { ClearLog::DeleteLog(future) => Ok(track!(future.poll())?), ClearLog::Skip => Ok(Async::Ready(())), } } }
new_head))?; } Event::LogSuffixLoa
auth.rs
use anyhow::{Error, Result}; use axum::http::header; use hashbrown::HashMap; use headers::HeaderMapExt; use hyper::http::{HeaderValue, Request, Response, StatusCode}; use parking_lot::Mutex; use rand::distributions::Alphanumeric; use rand::Rng; use serde::{Deserialize, Serialize}; use sqlx::{Connection, Row, SqliteConnection}; use tokio::fs; use tower_http::auth::AuthorizeRequest; /// A set of flags determining permissions. pub struct AuthFlags; impl AuthFlags { /// Allows the user to send search requests. pub const SEARCH: u32 = 1 << 0; /// Allows the user to add / remove and get targeted docs. pub const MODIFY_DOCUMENTS: u32 = 1 << 1; /// Allows the user to create and remove indexes. pub const MODIFY_INDEXES: u32 = 1 << 2; } #[derive(Eq, PartialEq, Hash, Copy, Clone, Debug, Serialize, Deserialize)] #[serde(rename_all = "lowercase")] pub enum Permissions { Search, #[serde(rename = "documents")] ModifyDocuments, #[serde(rename = "indexes")] ModifyIndexes, } impl Permissions { pub fn as_flag(&self) -> u32 { match self { Self::Search => AuthFlags::SEARCH, Self::ModifyDocuments => AuthFlags::MODIFY_DOCUMENTS, Self::ModifyIndexes => AuthFlags::MODIFY_INDEXES, } } pub fn get_flags_from_map(map: &HashMap<Permissions, bool>) -> u32 { let mut total = 0; for (key, enabled) in map.iter() { if *enabled { total = total | key.as_flag(); } } total } } pub type TokenInfo = (String, u32); /// A manager around a sqlite database and a hashtable. /// /// This is used to manage any authorization keys. /// While this makes the server more secure and is *highly* recommended /// for production use, it is not the world's most advanced security in /// the world so it's still not recommend to expose this to the public /// world even though you have it behind auth. pub struct AuthManager { cached_values: Mutex<evmap::WriteHandle<String, TokenInfo>>, storage: tokio::sync::Mutex<SqliteConnection>, } impl AuthManager { /// Connects to the SQLite database and loads any existing credentials. pub async fn connect(dir: &str) -> Result<(Self, evmap::ReadHandle<String, TokenInfo>)> { fs::create_dir_all(dir).await?; let fp = format!("{}/data.db", dir); { debug!("[ AUTHORIZATION ] ensuring database file exists"); let file = fs::OpenOptions::new() .create(true) .write(true) .open(&fp) .await?; file.set_len(0).await?; } let (reader, writer) = evmap::new(); let cached_values = Mutex::new(writer); let conn = sqlx::SqliteConnection::connect(&fp).await?; debug!("[ AUTHORIZATION ] connected to database"); let storage = tokio::sync::Mutex::new(conn); let inst = Self { cached_values, storage, }; inst.ensure_table().await?; inst.load_all().await?; Ok((inst, reader)) } async fn ensure_table(&self) -> Result<()> { let mut lock = self.storage.lock().await; sqlx::query("CREATE TABLE IF NOT EXISTS access_tokens (token TEXT, username TEXT, permissions INTEGER)") .execute(&mut *lock) .await?; Ok(()) } /// Loads all previously saved data / changes. /// /// This assumes that the cache is empty / not populated already with /// data from the db. async fn load_all(&self) -> Result<()> { let rows = { let mut lock = self.storage.lock().await; sqlx::query("SELECT token, username, permissions FROM access_tokens") .fetch_all(&mut *lock) .await? }; let mut lock = self.cached_values.lock(); for row in rows { let token: String = row.get("token"); let username: String = row.get("username"); let permissions: u32 = row.get("permissions"); (*lock).insert(token, (username, permissions)); } (*lock).refresh(); Ok(()) } /// Creates and registers as access token with the given user marking and /// permission flags. pub async fn create_token(&self, user: String, permissions: u32) -> Result<String> { let token: String = rand::thread_rng() .sample_iter(&Alphanumeric) .take(64) .map(char::from) .collect(); { let mut lock = self.storage.lock().await; sqlx::query( "INSERT INTO access_tokens (token, username, permissions) VALUES (?, ?, ?)", ) .bind(token.clone()) .bind(user.clone()) .bind(permissions) .execute(&mut *lock) .await?; } { let mut lock = self.cached_values.lock(); (*lock).insert(token.clone(), (user, permissions)); (*lock).refresh(); } let search = (permissions & AuthFlags::SEARCH) != 0; let index = (permissions & AuthFlags::MODIFY_INDEXES) != 0; let documents = (permissions & AuthFlags::MODIFY_DOCUMENTS) != 0; info!( "[ AUTHORIZATION ] created access token with permissions SEARCH={}, MODIFY_INDEXES={}, MODIFY_DOCUMENTS={}", search, index, documents, ); Ok(token) } /// Revokes a created access token. pub async fn revoke_all(&self) -> Result<()> { { let mut lock = self.storage.lock().await; sqlx::query("DELETE FROM access_tokens") .execute(&mut *lock) .await?; } { let mut lock = self.cached_values.lock(); (*lock).purge(); (*lock).refresh(); } info!("[ AUTHORIZATION ] revoked all access tokens"); Ok(()) } /// Revokes a created access token. pub async fn revoke_token(&self, token: String) -> Result<()>
/// Either sets or unsets permissions and updates them both in cache /// and on disk. pub async fn modify_permissions(&self, token: &str, permissions: u32) -> Result<()> { let old = { let mut lock = self.cached_values.lock(); let (username, old) = { if let Some(user) = (*lock).get_one(token) { let (name, old) = user.as_ref(); (name.clone(), *old) } else { return Err(Error::msg("that token is not registered")); } }; (*lock).update(token.into(), (username.clone(), permissions)); (*lock).refresh(); old }; { let mut lock = self.storage.lock().await; sqlx::query("UPDATE access_tokens SET permissions = ? WHERE token = ?") .bind(permissions) .bind(token.clone()) .execute(&mut *lock) .await?; } let old_search = (old & AuthFlags::SEARCH) != 0; let old_index = (old & AuthFlags::MODIFY_INDEXES) != 0; let old_documents = (old & AuthFlags::MODIFY_DOCUMENTS) != 0; let search = (permissions & AuthFlags::SEARCH) != 0; let index = (permissions & AuthFlags::MODIFY_INDEXES) != 0; let documents = (permissions & AuthFlags::MODIFY_DOCUMENTS) != 0; info!( "[ AUTHORIZATION ] updated access token permissions from to SEARCH={}, MODIFY_INDEXES={}, MODIFY_DOCUMENTS={} to SEARCH={}, MODIFY_INDEXES={}, MODIFY_DOCUMENTS={}", old_search, old_index, old_documents, search, index, documents, ); Ok(()) } } pub type TokenReader = evmap::ReadHandle<String, TokenInfo>; /// A authorization layer which watches a map for token keys. /// /// If enabled this will reject any requests that dont have the auth /// or dont have the right permissions flags assigned to them. #[derive(Debug, Clone)] pub struct UserAuthIfEnabled { enabled: bool, tokens: TokenReader, reject_msg: bytes::Bytes, required_permissions: u32, } impl UserAuthIfEnabled { pub fn bearer<T: Serialize>( tokens: TokenReader, required_permissions: u32, enabled: bool, reject_msg: &T, ) -> Result<Self> { let msg = serde_json::to_vec(&json!({ "status": StatusCode::UNAUTHORIZED.as_u16(), "data": reject_msg }))?; let reject_msg = bytes::Bytes::copy_from_slice(&msg); Ok(Self { enabled, tokens, reject_msg, required_permissions, }) } } impl AuthorizeRequest for UserAuthIfEnabled { type Output = (); type ResponseBody = axum::body::BoxBody; fn authorize<B>(&mut self, request: &Request<B>) -> Option<Self::Output> { if !self.enabled { return Some(()); }; let header = match request.headers().get(header::AUTHORIZATION) { None => return None, Some(header) => header, }; // We turn 'Bearer <token>' into ('Bearer', '<token>') let buffer = header.as_bytes(); let token = String::from_utf8_lossy(&buffer[7..]); let retrieved = match self.tokens.get_one(token.as_ref()) { None => return None, Some(values) => values, }; let (username, permissions) = retrieved.as_ref(); let path = request.uri().path(); if (*permissions & self.required_permissions) == 0 { warn!("[ AUTHORIZATION ] user '{}' attempted an operation with incorrect permissions! Resource path: {:?}", username, path); return None; } debug!( "[ AUTHORIZATION ] user {} succeeded permissions check for resource: {:?}", username, path ); Some(()) } fn unauthorized_response<B>(&mut self, _request: &Request<B>) -> Response<Self::ResponseBody> { let body = axum::body::box_body(hyper::Body::from(self.reject_msg.clone())); let mut res = Response::new(body); res.headers_mut().typed_insert(headers::ContentType::json()); *res.status_mut() = StatusCode::UNAUTHORIZED; res } } /// A authorization layer for the master API key. /// /// This is used to create / delete authorization keys. #[derive(Debug, Clone)] pub struct SuperUserAuthIfEnabled { enabled: bool, auth: HeaderValue, reject_msg: bytes::Bytes, } impl SuperUserAuthIfEnabled { pub fn bearer<T: Serialize>(token: &str, enabled: bool, reject_msg: &T) -> Result<Self> { let msg = serde_json::to_vec(&json!({ "status": StatusCode::UNAUTHORIZED.as_u16(), "data": reject_msg }))?; let reject_msg = bytes::Bytes::copy_from_slice(&msg); let auth = HeaderValue::from_str(&format!("Bearer {}", token)).unwrap(); Ok(Self { enabled, auth, reject_msg, }) } } impl AuthorizeRequest for SuperUserAuthIfEnabled { type Output = (); type ResponseBody = axum::body::BoxBody; fn authorize<B>(&mut self, request: &Request<B>) -> Option<Self::Output> { if !self.enabled { return Some(()); }; if let Some(actual) = request.headers().get(header::AUTHORIZATION) { (actual == self.auth).then(|| ()) } else { None } } fn unauthorized_response<B>(&mut self, _request: &Request<B>) -> Response<Self::ResponseBody> { let body = axum::body::box_body(hyper::Body::from(self.reject_msg.clone())); let mut res = Response::new(body); res.headers_mut().typed_insert(headers::ContentType::json()); *res.status_mut() = StatusCode::UNAUTHORIZED; res } }
{ { let mut lock = self.storage.lock().await; sqlx::query("DELETE FROM access_tokens WHERE token = ?") .bind(token.clone()) .execute(&mut *lock) .await?; } { let mut lock = self.cached_values.lock(); (*lock).clear(token.clone()); (*lock).refresh(); } info!("[ AUTHORIZATION ] revoked access token"); Ok(()) }
config.go
// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package database import ( "fmt" "strings" "time" "github.com/google/exposure-notifications-server/pkg/keys" "github.com/google/exposure-notifications-server/pkg/secrets" "github.com/sethvargo/go-envconfig" ) // Config represents the env var based configuration for database connections. type Config struct { Name string `env:"DB_NAME" json:",omitempty"` User string `env:"DB_USER" json:",omitempty"` Host string `env:"DB_HOST, default=localhost" json:",omitempty"` Port string `env:"DB_PORT, default=5432" json:",omitempty"` SSLMode string `env:"DB_SSLMODE, default=require" json:",omitempty"` ConnectionTimeout uint `env:"DB_CONNECT_TIMEOUT" json:",omitempty"` Password string `env:"DB_PASSWORD" json:"-"` // ignored by zap's JSON formatter SSLCertPath string `env:"DB_SSLCERT" json:",omitempty"` SSLKeyPath string `env:"DB_SSLKEY" json:",omitempty"` SSLRootCertPath string `env:"DB_SSLROOTCERT" json:",omitempty"` // Debug is a boolean that indicates whether the database should log SQL // commands. Debug bool `env:"DB_DEBUG,default=false"` // CacheTTL is the amount of time to cache values. This is enabled on a // per-query basis. Not all query results are cached. CacheTTL time.Duration `env:"DB_CACHE_TTL, default=5m" json:",omitempty"` // Keys is the key management configuration. This is used to resolve values // that are encrypted via a KMS. Keys keys.Config // EncryptionKey is the reference to an encryption/decryption key to use when // for application-layer encryption before values are persisted to the // database. EncryptionKey string `env:"DB_ENCRYPTION_KEY,required"` // APIKeyDatabaseHMAC is the HMAC key to use for API keys before storing them // in the database. APIKeyDatabaseHMAC envconfig.Base64Bytes `env:"DB_APIKEY_DATABASE_KEY,required" json:"-"` // APIKeySignatureHMAC is the HMAC key to sign API keys before returning them // to the requestor. APIKeySignatureHMAC envconfig.Base64Bytes `env:"DB_APIKEY_SIGNATURE_KEY,required" json:"-"` // VerificationCodeDatabaseHMAC is the HMAC key to hash codes before storing // them in the database. VerificationCodeDatabaseHMAC envconfig.Base64Bytes `env:"DB_VERIFICATION_CODE_DATABASE_KEY,required"` // Secrets is the secret configuration. This is used to resolve values that // are actually pointers to secrets before returning them to the caller. The // table implementation is the source of truth for which values are secrets // and which are plaintext. Secrets secrets.Config } // ConnectionString returns the postgresql connection string based on this config. // // While this package could be adapted to different databases easily, this file // and method in particular would need to change. func (c *Config) ConnectionString() string { vals := dbValues(c) var p []string for k, v := range vals { p = append(p, fmt.Sprintf("%s=%s", k, v)) } return strings.Join(p, " ") } func dbValues(config *Config) map[string]string { p := map[string]string{} setIfNotEmpty(p, "dbname", config.Name) setIfNotEmpty(p, "user", config.User) setIfNotEmpty(p, "host", config.Host) setIfNotEmpty(p, "port", config.Port) setIfNotEmpty(p, "sslmode", config.SSLMode) setIfPositive(p, "connect_timeout", config.ConnectionTimeout) setIfNotEmpty(p, "password", config.Password) setIfNotEmpty(p, "sslcert", config.SSLCertPath) setIfNotEmpty(p, "sslkey", config.SSLKeyPath) setIfNotEmpty(p, "sslrootcert", config.SSLRootCertPath) return p } func setIfNotEmpty(m map[string]string, key, val string)
func setIfPositive(m map[string]string, key string, val uint) { if val > 0 { m[key] = fmt.Sprintf("%d", val) } }
{ if val != "" { m[key] = val } }
de1soc.py
# # This file is part of LiteX-Boards. # # Copyright (c) 2019 Antony Pavlov <[email protected]> # SPDX-License-Identifier: BSD-2-Clause from litex.build.generic_platform import * from litex.build.altera import AlteraPlatform from litex.build.altera.programmer import USBBlaster # IOs ---------------------------------------------------------------------------------------------- _io = [ ("clk50", 0, Pins("AF14"), IOStandard("3.3-V LVTTL")), ("serial", 0, Subsignal("tx", Pins("AC18"), IOStandard("3.3-V LVTTL")), # JP1 GPIO[0] Subsignal("rx", Pins("Y17"), IOStandard("3.3-V LVTTL")) # JP1 GPIO[1] ), ("sdram_clock", 0, Pins("AH12"), IOStandard("3.3-V LVTTL")), ("sdram", 0, Subsignal("a", Pins( "AK14 AH14 AG15 AE14 AB15 AC14 AD14 AF15", "AH15 AG13 AG12 AH13 AJ14")), Subsignal("ba", Pins("AF13 AJ12")), Subsignal("cs_n", Pins("AG11")), Subsignal("cke", Pins("AK13")), Subsignal("ras_n", Pins("AE13")), Subsignal("cas_n", Pins("AF11")), Subsignal("we_n", Pins("AA13")), Subsignal("dq", Pins( "AK6 AJ7 AK7 AK8 AK9 AG10 AK11 AJ11", "AH10 AJ10 AJ9 AH9 AH8 AH7 AJ6 AJ5")), Subsignal("dm", Pins("AB13 AK12")), IOStandard("3.3-V LVTTL") ), ] # Platform ----------------------------------------------------------------------------------------- class Platform(AlteraPlatform): default_clk_name = "clk50" default_clk_period = 1e9/50e6 def __init__(self): AlteraPlatform.__init__(self, "5CSEMA5F31C6", _io) def create_programmer(self): return USBBlaster() def
(self, fragment): AlteraPlatform.do_finalize(self, fragment) self.add_period_constraint(self.lookup_request("clk50", loose=True), 1e9/50e6)
do_finalize
appl-ext.go
package directory import ( "fmt" "io" "os" "github.com/aserto-dev/aserto/pkg/cc" "github.com/aserto-dev/aserto/pkg/jsonx" "github.com/aserto-dev/aserto/pkg/pb" dir "github.com/aserto-dev/go-grpc/aserto/authorizer/directory/v1" "github.com/pkg/errors" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" "google.golang.org/protobuf/types/known/structpb" ) func OutputJSONResults(results []string, writer io.Writer) error { if results == nil { results = []string{} } return jsonx.OutputJSON(writer, results) } type ListUserAppsCmd struct { UserID string `arg:"id" name:"id" required:"" help:"user id or identifier"` } func (cmd *ListUserAppsCmd) Run(c *cc.CommonCtx) error { client, identity, err := NewClientWithIdentity(c, cmd.UserID) if err != nil { return err } resp, err := client.Directory.ListUserApplications( c.Context, &dir.ListUserApplicationsRequest{Id: identity.Id}, ) if err != nil { return err } return OutputJSONResults(resp.Results, c.UI.Output()) } type SetUserAppCmd struct { UserID string `arg:"id" name:"id" required:"" help:"user id or identifier"` AppName string `arg:"name" name:"name" required:"" help:"application name"` } func (cmd *SetUserAppCmd) Run(c *cc.CommonCtx) error { return status.Error(codes.Unimplemented, "not implemented") } type DelUserAppCmd struct { UserID string `arg:"id" name:"id" required:"" help:"user id or identifier"` AppName string `arg:"name" name:"name" required:"" help:"application name"` } func (cmd *DelUserAppCmd) Run(c *cc.CommonCtx) error { client, identity, err := NewClientWithIdentity(c, cmd.UserID) if err != nil { return err } _, err = client.Directory.DeleteUserApplication( c.Context, &dir.DeleteUserApplicationRequest{ Id: identity.Id, Name: cmd.AppName, }, ) if err != nil { return err } return nil } type GetApplPropsCmd struct { UserID string `arg:"id" name:"id" required:"" help:"user id or identifier"` AppName string `arg:"name" name:"name" required:"" help:"application name"` } func (cmd *GetApplPropsCmd) Run(c *cc.CommonCtx) error { client, identity, err := NewClientWithIdentity(c, cmd.UserID) if err != nil { return err } resp, err := client.Directory.GetApplProperties( c.Context, &dir.GetApplPropertiesRequest{ Id: identity.Id, Name: cmd.AppName, }, ) if err != nil { return err } return jsonx.OutputJSONPB(c.UI.Output(), resp.Results) } type GetApplRolesCmd struct { UserID string `arg:"id" name:"id" required:"" help:"user id or identifier"` AppName string `arg:"name" name:"name" required:"" help:"application name"` } func (cmd *GetApplRolesCmd) Run(c *cc.CommonCtx) error { client, identity, err := NewClientWithIdentity(c, cmd.UserID) if err != nil { return err } resp, err := client.Directory.GetApplRoles( c.Context, &dir.GetApplRolesRequest{ Id: identity.Id, Name: cmd.AppName, }, ) if err != nil { return err } return OutputJSONResults(resp.Results, c.UI.Output()) } type GetApplPermsCmd struct { UserID string `arg:"id" name:"id" required:"" help:"user id or identifier"` AppName string `arg:"name" name:"name" required:"" help:"application name"` } func (cmd *GetApplPermsCmd) Run(c *cc.CommonCtx) error { client, identity, err := NewClientWithIdentity(c, cmd.UserID)
if err != nil { return err } resp, err := client.Directory.GetApplPermissions( c.Context, &dir.GetApplPermissionsRequest{ Id: identity.Id, Name: cmd.AppName, }, ) if err != nil { return err } return OutputJSONResults(resp.Results, c.UI.Output()) } type SetApplPropCmd struct { UserID string `arg:"id" name:"id" required:"" help:"user id or identifier"` AppName string `arg:"name" name:"name" required:"" help:"application name"` Key string `arg:"key" name:"key" required:"" help:"property key"` Value structpb.Value `xor:"group" required:"" name:"value" help:"set property value using json data from argument"` Stdin bool `xor:"group" required:"" name:"stdin" help:"set property value using json data from --stdin"` File string `xor:"group" required:"" name:"file" type:"existingfile" help:"set property value using json data from input file"` } func (cmd *SetApplPropCmd) Run(c *cc.CommonCtx) error { var ( value *structpb.Value buf io.Reader err error ) client, identity, err := NewClientWithIdentity(c, cmd.UserID) if err != nil { return err } switch { case cmd.Stdin: fmt.Fprintf(c.UI.Err(), "reading stdin\n") buf = os.Stdin value, err = pb.BufToValue(buf) if err != nil { return errors.Wrapf(err, "unmarshal stdin") } case cmd.File != "": fmt.Fprintf(c.UI.Err(), "reading file [%s]\n", cmd.File) buf, err = os.Open(cmd.File) if err != nil { return errors.Wrapf(err, "opening file [%s]", cmd.File) } value, err = pb.BufToValue(buf) if err != nil { return errors.Wrapf(err, "unmarshal file [%s]", cmd.File) } default: value = &cmd.Value } fmt.Fprintf(c.UI.Err(), "set property [%s]=[%s]\n", cmd.Key, value.String()) if _, err := client.Directory.SetApplProperty( c.Context, &dir.SetApplPropertyRequest{ Id: identity.Id, Name: cmd.AppName, Key: cmd.Key, Value: value, }, ); err != nil { return err } return nil } type SetApplRoleCmd struct { UserID string `arg:"id" name:"id" required:"" help:"user id or identifier"` AppName string `arg:"name" name:"name" required:"" help:"application name"` Key string `arg:"role" name:"role" required:"" help:"role name"` } func (cmd *SetApplRoleCmd) Run(c *cc.CommonCtx) error { client, identity, err := NewClientWithIdentity(c, cmd.UserID) if err != nil { return err } if _, err := client.Directory.SetApplRole( c.Context, &dir.SetApplRoleRequest{ Id: identity.Id, Name: cmd.AppName, Role: cmd.Key, }, ); err != nil { return err } return nil } type SetApplPermCmd struct { UserID string `arg:"id" name:"id" required:"" help:"user id or identifier"` AppName string `arg:"name" name:"name" required:"" help:"application name"` Key string `arg:"perm" name:"perm" required:"" help:"permission name"` } func (cmd *SetApplPermCmd) Run(c *cc.CommonCtx) error { client, err := c.AuthorizerClient() if err != nil { return err } idResp, err := client.Directory.GetIdentity(c.Context, &dir.GetIdentityRequest{ Identity: cmd.UserID, }) if err != nil { return errors.Wrapf(err, "resolve identity") } if _, err := client.Directory.SetApplPermission(c.Context, &dir.SetApplPermissionRequest{ Id: idResp.Id, Name: cmd.AppName, Permission: cmd.Key, }); err != nil { return err } return nil } type DelApplPropCmd struct { UserID string `arg:"id" name:"id" required:"" help:"user id or identifier"` AppName string `arg:"name" name:"name" required:"" help:"application name"` Key string `arg:"prop" name:"prop" required:"" help:"property name"` } func (cmd *DelApplPropCmd) Run(c *cc.CommonCtx) error { client, identity, err := NewClientWithIdentity(c, cmd.UserID) if err != nil { return err } fmt.Fprintf(c.UI.Err(), "remove property %s\n", cmd.Key) if _, err := client.Directory.DeleteApplProperty( c.Context, &dir.DeleteApplPropertyRequest{ Id: identity.Id, Name: cmd.AppName, Key: cmd.Key, }, ); err != nil { return err } return nil } type DelApplRoleCmd struct { UserID string `arg:"id" name:"id" required:"" help:"user id or identifier"` AppName string `arg:"name" name:"name" required:"" help:"application name"` Key string `arg:"role" name:"role" required:"" help:"role name"` } func (cmd *DelApplRoleCmd) Run(c *cc.CommonCtx) error { client, identity, err := NewClientWithIdentity(c, cmd.UserID) if err != nil { return err } fmt.Fprintf(c.UI.Err(), "remove role %s\n", cmd.Key) if _, err := client.Directory.DeleteApplRole( c.Context, &dir.DeleteApplRoleRequest{ Id: identity.Id, Name: cmd.AppName, Role: cmd.Key, }, ); err != nil { return err } return nil } type DelApplPermCmd struct { UserID string `arg:"id" name:"id" required:"" help:"user id or identifier"` AppName string `arg:"name" name:"name" required:"" help:"application name"` Key string `arg:"perm" name:"perm" required:"" help:"permission name"` } func (cmd *DelApplPermCmd) Run(c *cc.CommonCtx) error { client, identity, err := NewClientWithIdentity(c, cmd.UserID) if err != nil { return err } fmt.Fprintf(c.UI.Err(), "remove permission %s\n", cmd.Key) if _, err := client.Directory.DeleteApplPermission( c.Context, &dir.DeleteApplPermissionRequest{ Id: identity.Id, Name: cmd.AppName, Permission: cmd.Key, }, ); err != nil { return err } return nil }
PdbUtils.py
import os import re import sys import time import datetime from .MiscUtils import commands_get_status_output try: long() except Exception: long = int from . import PLogger from .LocalJobSpec import LocalJobSpec from .LocalJobsetSpec import LocalJobsetSpec class PdbProxy: # constructor def __init__(self,verbose=False): # database engine self.engine = 'sqlite3' # version of database schema self.version = '0_0_1' # database file name self.filename = 'pandajob.db' # database dir self.database_dir = os.path.expanduser(os.environ['PANDA_CONFIG_ROOT']) # full path of database file self.database = '%s/%s' % (self.database_dir,self.filename) # table name self.tablename = 'jobtable_%s' % self.version # verbose self.verbose = verbose # connection self.con = None # logger self.log = PLogger.getPandaLogger() # set verbose def setVerbose(self,verbose): # verbose self.verbose = verbose # execute SQL def execute(self,sql,var={}): # logger tmpLog = PLogger.getPandaLogger() # expand variables for tmpKey in var: tmpVal = var[tmpKey] sql = sql.replqce(tmpKey,str(tmpVal)) # construct command com = '%s %s "%s"' % (self.engine,self.database,sql) if self.verbose: tmpLog.debug("DB Req : " + com) # execute nTry = 5 status =0 for iTry in range(nTry): if self.verbose: tmpLog.debug(" Try : %s/%s" % (iTry,nTry)) status,output = commands_get_status_output(com) status %= 255 if status == 0: break if iTry+1 < nTry: time.sleep(2) # return if status != 0: tmpLog.error(status) tmpLog.error(output) return False,output else: if self.verbose: tmpLog.debug(" Ret : " + output) outList = output.split('\n') # remove '' try: outList.remove('') except Exception: pass # remove junk messages ngStrings = ['Loading resources from'] for tmpStr in tuple(outList): # look for NG strings flagNG = False for ngStr in ngStrings: match = re.search(ngStr,tmpStr,re.I) if match is not None: flagNG = True break # remove if flagNG: try: outList.remove(tmpStr) except Exception: pass return True,outList # execute SQL def execute_direct(self, sql, var=None, fetch=False): if self.con is None: import sqlite3 self.con = sqlite3.connect(self.database, check_same_thread=False) if self.verbose: self.log.debug("DB Req : {0} var={1}".format(sql, str(var))) cur = self.con.cursor() try: if var is None: var = {} cur.execute(sql, var) retVal = True except Exception: retVal = False if not self.verbose: self.log.error("DB Req : {0} var={1}".format(sql, str(var))) err_type, err_value = sys.exc_info()[:2] err_str = "{0} {1}".format(err_type.__name__, err_value) self.log.error(err_str) if self.verbose: self.log.debug(retVal) outList = [] if retVal: if fetch: outList = cur.fetchall() if self.verbose: for item in outList: self.log.debug(" Ret : " + str(item)) self.con.commit() return retVal, outList # remove old database def deleteDatabase(self): commands_get_status_output('rm -f %s' % self.database) # initialize database def initialize(self): # import sqlite3 # check if sqlite3 is available com = 'which %s' % self.engine status,output = commands_get_status_output(com) if status != 0: errstr = "\n\n" errstr += "ERROR : %s is not available in PATH\n\n" % self.engine errstr += "There are some possible solutions\n" errstr += " * run this application under Athena runtime with Release 14 or higher. e.g.,\n" errstr += " $ source setup.sh -tag=14.2.24,32,setup\n" errstr += " $ source .../etc/panda/panda_setup.sh\n\n" errstr += " * set PATH and LD_LIBRARY_PATH to include %s. e.g., at CERN\n" % self.engine errstr += " $ export PATH=/afs/cern.ch/sw/lcg/external/sqlite/3.4.0/slc3_ia32_gcc323/bin:$PATH\n" errstr += " $ export LD_LIBRARY_PATH=/afs/cern.ch/sw/lcg/external/sqlite/3.4.0/slc3_ia32_gcc323/lib:$LD_LIBRARY_PATH\n" errstr += " $ source .../etc/panda/panda_setup.sh\n\n" errstr += " * install %s from the standard SL4 repository. e.g.,\n" % self.engine errstr += " $ yum install %s\n\n" % self.engine errstr += " * use SLC5\n" raise RuntimeError(errstr) # create dir for DB if not os.path.exists(self.database_dir): os.makedirs(self.database_dir) # the table already exist if self.checkTable(): return # create table self.createTable() return # check table def checkTable(self): # get tables retS,retV = self.execute('.table') if not retS: raise RuntimeError("cannot get tables") # the table already exist or not if retV == []: return False if self.tablename not in retV[-1].split(): return False # check schema self.checkSchema() return True # check schema def
(self,noAdd=False): # get colum names retS,retV = self.execute('PRAGMA table_info(%s)' % self.tablename) if not retS: raise RuntimeError("cannot get table_info") # parse columns = [] for line in retV: items = line.split('|') if len(items) > 1: columns.append(items[1]) # check for tmpC in LocalJobSpec.appended: tmpA = LocalJobSpec.appended[tmpC] if tmpC not in columns: if noAdd: raise RuntimeError("%s not found in database schema" % tmpC) # add column retS,retV = self.execute("ALTER TABLE %s ADD COLUMN '%s' %s" % \ (self.tablename,tmpC,tmpA)) if not retS: raise RuntimeError("cannot add %s to database schema" % tmpC) if noAdd: return # check whole schema just in case self.checkSchema(noAdd=True) # create table def createTable(self): # ver 0_1_1 sql = "CREATE TABLE %s (" % self.tablename sql += "'id' INTEGER PRIMARY KEY," sql += "'JobID' INTEGER," sql += "'PandaID' TEXT," sql += "'jobStatus' TEXT," sql += "'site' VARCHAR(128)," sql += "'cloud' VARCHAR(20)," sql += "'jobType' VARCHAR(20)," sql += "'jobName' VARCHAR(128)," sql += "'inDS' TEXT," sql += "'outDS' TEXT," sql += "'libDS' VARCHAR(255)," sql += "'jobParams' TEXT," sql += "'retryID' INTEGER," sql += "'provenanceID' INTEGER," sql += "'creationTime' TIMESTAMP," sql += "'lastUpdate' TIMESTAMP," sql += "'dbStatus' VARCHAR(20)," sql += "'buildStatus' VARCHAR(20)," sql += "'commandToPilot' VARCHAR(20)," for tmpC in LocalJobSpec.appended: tmpA = LocalJobSpec.appended[tmpC] sql += "'%s' %s," % (tmpC,tmpA) sql = sql[:-1] sql += ")" # execute retS,retV = self.execute(sql) if not retS: raise RuntimeError("failed to create %s" % self.tablename) # confirm if not self.checkTable(): raise RuntimeError("failed to confirm %s" % self.tablename) # convert Panda jobs to DB representation def convertPtoD(pandaJobList,pandaIDstatus,localJob=None,fileInfo={},pandaJobForSiteID=None): statusOnly = False if localJob is not None: # update status only ddata = localJob statusOnly = True else: # create new spec ddata = LocalJobSpec() # sort by PandaID pandIDs = list(pandaIDstatus) pandIDs.sort() pStr = '' sStr = '' ddata.commandToPilot = '' for tmpID in pandIDs: # PandaID pStr += '%s,' % tmpID # status sStr += '%s,' % pandaIDstatus[tmpID][0] # commandToPilot if pandaIDstatus[tmpID][1] == 'tobekilled': ddata.commandToPilot = 'tobekilled' pStr = pStr[:-1] sStr = sStr[:-1] # job status ddata.jobStatus = sStr # PandaID ddata.PandaID = pStr # get panda Job pandaJob = None if pandaJobList != []: # look for buildJob since it doesn't have the first PandaID when retried for pandaJob in pandaJobList: if pandaJob.prodSourceLabel == 'panda': break elif pandaJobForSiteID is not None: pandaJob = pandaJobForSiteID # extract libDS if pandaJob is not None: if pandaJob.prodSourceLabel == 'panda': # build Jobs ddata.buildStatus = pandaJob.jobStatus for tmpFile in pandaJob.Files: if tmpFile.type == 'output': ddata.libDS = tmpFile.dataset break else: # noBuild or libDS ddata.buildStatus = '' for tmpFile in pandaJob.Files: if tmpFile.type == 'input' and tmpFile.lfn.endswith('.lib.tgz'): ddata.libDS = tmpFile.dataset break # release ddata.releaseVar = pandaJob.AtlasRelease # cache tmpCache = re.sub('^[^-]+-*','',pandaJob.homepackage) tmpCache = re.sub('_','-',tmpCache) ddata.cacheVar = tmpCache # return if update status only if statusOnly: # build job if ddata.buildStatus != '': ddata.buildStatus = sStr.split(',')[0] # set computingSite mainly for rebrokerage if pandaJobForSiteID is not None: ddata.site = pandaJobForSiteID.computingSite ddata.nRebro = pandaJobForSiteID.specialHandling.split(',').count('rebro') + \ pandaJobForSiteID.specialHandling.split(',').count('sretry') # return return ddata # job parameters ddata.jobParams = pandaJob.metadata # extract datasets iDSlist = [] oDSlist = [] if fileInfo != {}: if 'inDS' in fileInfo: iDSlist = fileInfo['inDS'] if 'outDS' in fileInfo: oDSlist = fileInfo['outDS'] else: for pandaJob in pandaJobList: for tmpFile in pandaJob.Files: if tmpFile.type == 'input' and not tmpFile.lfn.endswith('.lib.tgz'): if tmpFile.dataset not in iDSlist: iDSlist.append(tmpFile.dataset) elif tmpFile.type == 'output' and not tmpFile.lfn.endswith('.lib.tgz'): if tmpFile.dataset not in oDSlist: oDSlist.append(tmpFile.dataset) # convert to string ddata.inDS = '' for iDS in iDSlist: ddata.inDS += '%s,' % iDS ddata.inDS = ddata.inDS[:-1] ddata.outDS = '' for oDS in oDSlist: ddata.outDS += '%s,' % oDS ddata.outDS = ddata.outDS[:-1] # job name ddata.jobName = pandaJob.jobName # creation time ddata.creationTime = pandaJob.creationTime # job type ddata.jobType = pandaJob.prodSeriesLabel # site ddata.site = pandaJob.computingSite # cloud ddata.cloud = pandaJob.cloud # job ID ddata.JobID = pandaJob.jobDefinitionID # retry ID ddata.retryID = 0 # provenance ID ddata.provenanceID = pandaJob.jobExecutionID # groupID ddata.groupID = pandaJob.jobsetID ddata.retryJobsetID = -1 if pandaJob.sourceSite not in ['NULL',None,'']: ddata.parentJobsetID = long(pandaJob.sourceSite) else: ddata.parentJobsetID = -1 # job type ddata.jobType = pandaJob.processingType # the number of rebrokerage actions ddata.nRebro = pandaJob.specialHandling.split(',').count('rebro') # jediTaskID ddata.jediTaskID = -1 # return return ddata # convert JediTask to DB representation def convertJTtoD(jediTaskDict,localJob=None): statusOnly = False if localJob is not None: # update status only ddata = localJob statusOnly = True else: # create new spec ddata = LocalJobSpec() # max IDs maxIDs = 20 # task status ddata.taskStatus = jediTaskDict['status'] # statistic ddata.jobStatus = jediTaskDict['statistics'] # PandaID ddata.PandaID = '' for tmpPandaID in jediTaskDict['PandaID'][:maxIDs]: ddata.PandaID += '%s,' % tmpPandaID ddata.PandaID = ddata.PandaID[:-1] if len(jediTaskDict['PandaID']) > maxIDs: ddata.PandaID += ',+%sIDs' % (len(jediTaskDict['PandaID'])-maxIDs) # merge status if 'mergeStatus' not in jediTaskDict or jediTaskDict['mergeStatus'] is None: ddata.mergeJobStatus = 'NA' else: ddata.mergeJobStatus = jediTaskDict['mergeStatus'] # merge PandaID ddata.mergeJobID = '' for tmpPandaID in jediTaskDict['mergePandaID'][:maxIDs]: ddata.mergeJobID += '%s,' % tmpPandaID ddata.mergeJobID = ddata.mergeJobID[:-1] if len(jediTaskDict['mergePandaID']) > maxIDs: ddata.mergeJobID += ',+%sIDs' % (len(jediTaskDict['mergePandaID'])-maxIDs) # return if update status only if statusOnly: return ddata # release ddata.releaseVar = jediTaskDict['transUses'] # cache if jediTaskDict['transHome'] is None: tmpCache = '' else: tmpCache = re.sub('^[^-]+-*','',jediTaskDict['transHome']) tmpCache = re.sub('_','-',tmpCache) ddata.cacheVar = tmpCache # job parameters try: if isinstance(jediTaskDict['cliParams'],unicode): ddata.jobParams = jediTaskDict['cliParams'].encode('utf_8') else: ddata.jobParams = jediTaskDict['cliParams'] # truncate ddata.jobParams = ddata.jobParams[:1024] except Exception: pass # input datasets try: # max number of datasets to show maxDS = 20 inDSs = jediTaskDict['inDS'].split(',') strInDS = '' # concatenate for tmpInDS in inDSs[:maxDS]: strInDS += "%s," % tmpInDS strInDS = strInDS[:-1] # truncate if len(inDSs) > maxDS: strInDS += ',+{0}DSs'.format(len(inDSs)-maxDS) ddata.inDS = strInDS except Exception: ddata.inDS = jediTaskDict['inDS'] # output datasets ddata.outDS = jediTaskDict['outDS'] # job name ddata.jobName = jediTaskDict['taskName'] # creation time ddata.creationTime = jediTaskDict['creationDate'] # job type ddata.jobType = jediTaskDict['processingType'] # site ddata.site = jediTaskDict['site'] # cloud ddata.cloud = jediTaskDict['cloud'] # job ID ddata.JobID = jediTaskDict['reqID'] # retry ID ddata.retryID = 0 # provenance ID ddata.provenanceID = 0 # groupID ddata.groupID = jediTaskDict['reqID'] # jediTaskID ddata.jediTaskID = jediTaskDict['jediTaskID'] # IDs for retry ddata.retryJobsetID = -1 ddata.parentJobsetID = -1 # the number of rebrokerage actions ddata.nRebro = 0 # return return ddata # instantiate database proxy pdbProxy = PdbProxy() # just initialize DB def initialzieDB(verbose=False,restoreDB=False): if restoreDB: pdbProxy.deleteDatabase() pdbProxy.initialize() pdbProxy.setVerbose(verbose) # insert job info to DB def insertJobDB(job,verbose=False): tmpLog = PLogger.getPandaLogger() # set update time job.lastUpdate = datetime.datetime.utcnow() # make sql sql1 = "INSERT INTO %s (%s) " % (pdbProxy.tablename,LocalJobSpec.columnNames()) sql1+= "VALUES " + job.values() status,out = pdbProxy.execute_direct(sql1) if not status: raise RuntimeError("failed to insert job") # update job info in DB def updateJobDB(job,verbose=False,updateTime=None): # make sql sql1 = "UPDATE %s SET " % pdbProxy.tablename sql1 += job.values(forUpdate=True) sql1 += " WHERE JobID=%s " % job.JobID # set update time if updateTime is not None: job.lastUpdate = updateTime sql1 += " AND lastUpdate<'%s' " % updateTime.strftime('%Y-%m-%d %H:%M:%S') else: job.lastUpdate = datetime.datetime.utcnow() status,out = pdbProxy.execute_direct(sql1) if not status: raise RuntimeError("failed to update job") # set retryID def setRetryID(job,verbose=False): # make sql sql1 = "UPDATE %s SET " % pdbProxy.tablename sql1 += "retryID=%s,retryJobsetID=%s " % (job.JobID,job.groupID) sql1 += " WHERE JobID=%s AND (nRebro IS NULL OR nRebro=%s)" % (job.provenanceID,job.nRebro) status,out = pdbProxy.execute(sql1) if not status: raise RuntimeError("failed to set retryID") # delete old jobs def deleteOldJobs(days,verbose=False): # time limit limit = datetime.datetime.utcnow() - datetime.timedelta(days=days) # make sql sql1 = "DELETE FROM %s " % pdbProxy.tablename sql1 += " WHERE creationTime<'%s' " % limit.strftime('%Y-%m-%d %H:%M:%S') status,out = pdbProxy.execute_direct(sql1) if not status: raise RuntimeError("failed to delete old jobs") # read job info from DB def readJobDB(JobID,verbose=False): # make sql sql1 = "SELECT %s FROM %s " % (LocalJobSpec.columnNames(),pdbProxy.tablename) sql1+= "WHERE JobID=%s" % JobID # execute status,out = pdbProxy.execute_direct(sql1, fetch=True) if not status: raise RuntimeError("failed to get JobID=%s" % JobID) if len(out) == 0: return None # instantiate LocalJobSpec for values in out: job = LocalJobSpec() job.pack(values) # return frozen job if exists if job.dbStatus == 'frozen': return job # return any return job # read jobset info from DB def readJobsetDB(JobsetID,verbose=False): # make sql sql1 = "SELECT %s FROM %s " % (LocalJobSpec.columnNames(),pdbProxy.tablename) sql1+= "WHERE groupID=%s" % JobsetID # execute status,out = pdbProxy.execute(sql1) if not status: raise RuntimeError("failed to get JobsetID=%s" % JobsetID) if len(out) == 0: return None # instantiate LocalJobSpec tmpJobMap = {} for tmpStr in out: values = tmpStr.split('|') job = LocalJobSpec() job.pack(values) # return frozen job if exists if job.dbStatus == 'frozen' or job.JobID not in tmpJobMap: tmpJobMap[job.JobID] = job # make jobset jobset = LocalJobsetSpec() # set jobs jobset.setJobs(tmpJobMap.values()) # return any return jobset # check jobset status in DB def checkJobsetStatus(JobsetID,verbose=False): # logger tmpLog = PLogger.getPandaLogger() # make sql sql1 = "SELECT %s FROM %s " % (LocalJobSpec.columnNames(),pdbProxy.tablename) sql1+= "WHERE groupID=%s" % JobsetID failedRet = False,None # execute status,out = pdbProxy.execute(sql1) if not status: tmpLog.error(out) tmpLog.error("failed to access local DB") return failedRet if len(out) == 0: tmpLog.error("failed to get JobsetID=%s from local DB" % JobsetID) return None # instantiate LocalJobSpec jobMap = {} for tmpStr in out: values = tmpStr.split('|') job = LocalJobSpec() job.pack(values) # use frozen job if exists if job.JobID not in jobMap or job.dbStatus == 'frozen': jobMap[job.JobID] = job # check all job status for tmpJobID in jobMap: tmpJobSpec = jobMap[tmpJobID] if tmpJobSpec != 'frozen': return True,'running' # return return True,'frozen' # bulk read job info from DB def bulkReadJobDB(verbose=False): # make sql sql1 = "SELECT %s FROM %s " % (LocalJobSpec.columnNames(),pdbProxy.tablename) # execute status,out = pdbProxy.execute_direct(sql1, fetch=True) if not status: raise RuntimeError("failed to get jobs") if len(out) == 0: return [] # instantiate LocalJobSpec retMap = {} jobsetMap = {} for values in out: job = LocalJobSpec() job.pack(values) # use frozen job if exists if job.JobID not in retMap or job.dbStatus == 'frozen': if job.groupID in [0,'0','NULL',-1,'-1']: retMap[long(job.JobID)] = job else: # add jobset tmpJobsetID = long(job.groupID) if tmpJobsetID not in retMap or tmpJobsetID not in jobsetMap: jobsetMap[tmpJobsetID] = [] jobset = LocalJobsetSpec() retMap[tmpJobsetID] = jobset # add job jobsetMap[tmpJobsetID].append(job) # add jobs to jobset for tmpJobsetID in jobsetMap: tmpJobList = jobsetMap[tmpJobsetID] retMap[tmpJobsetID].setJobs(tmpJobList) # sort ids = list(retMap) ids.sort() retVal = [] for id in ids: retVal.append(retMap[id]) # return return retVal # get list of JobID def getListOfJobIDs(nonFrozen=False,verbose=False): # make sql sql1 = "SELECT JobID,dbStatus FROM %s " % pdbProxy.tablename # execute status,out = pdbProxy.execute_direct(sql1, fetch=True) if not status: raise RuntimeError("failed to get list of JobIDs") allList = [] frozenList = [] for item in out: # extract JobID tmpID = long(item[0]) # status in DB tmpStatus = item[-1] # keep all jobs if tmpID not in allList: allList.append(tmpID) # keep frozen jobs if nonFrozen and tmpStatus == 'frozen': if tmpID not in frozenList: frozenList.append(tmpID) # remove redundant jobs retVal = [] for item in allList: if item not in frozenList: retVal.append(item) # sort retVal.sort() # return return retVal # get map of jobsetID and JobIDs def getMapJobsetIDJobIDs(verbose=False): # make sql sql1 = "SELECT groupID,JobID FROM %s WHERE groupID is not NULL and groupID != 0 and groupID != ''" % pdbProxy.tablename # execute status,out = pdbProxy.execute(sql1) if not status: raise RuntimeError("failed to get list of JobIDs") allMap = {} for item in out: # JobsetID tmpJobsetID = long(item.split('|')[0]) # JobID tmpJobID = long(item.split('|')[-1]) # append if tmpJobsetID not in allMap: allMap[tmpJobsetID] = [] if tmpJobID not in allMap[tmpJobsetID]: allMap[tmpJobsetID].append(tmpJobID) # sort for tmpKey in allMap.keys(): allMap[tmpKey].sort() # return return allMap # make JobSetSpec def makeJobsetSpec(jobList): jobset = LocalJobsetSpec() jobset.setJobs(jobList) return jobset # get map of jobsetID and jediTaskID def getJobsetTaskMap(verbose=False): # make sql sql1 = "SELECT groupID,jediTaskID FROM %s WHERE groupID is not NULL and groupID != 0 and groupID != '' and jediTaskID is not null and jediTaskID != ''" % pdbProxy.tablename # execute status,out = pdbProxy.execute_direct(sql1, fetch=True) if not status: raise RuntimeError("failed to get list of JobIDs") allMap = {} for item in out: # JobsetID tmpJobsetID = long(item[0]) # JobID jediTaskID = long(item[-1]) # append allMap[jediTaskID] = tmpJobsetID # return return allMap
checkSchema
util.rs
#![allow(dead_code)] extern crate gmp; pub fn remove_comments(value: &mut String, comment: char) { match value.find(comment) { Some(pos) => { value.truncate(pos); }, _ => {} } } pub fn boolvec_to_bignum(vec:&[bool]) -> gmp::mpz::Mpz { let mut ret = gmp::mpz::Mpz::zero(); ret.reserve(vec.len()); for i in 0..vec.len() { match vec[i]{ true => ret.setbit(i), false => ret.clrbit(i) } } return ret; } pub fn boolvec_to_usize(vec:&[bool]) -> usize { let mut ret:usize = 0; for i in 0..vec.len() { if vec[i]{ ret |= 1 << i; } } return ret; } pub fn boolvec_to_u8(vec:&[bool]) -> u8 { //backwards from other vec -> num implementations, //printed characters have different endianess let mut ret:u8 = 0; for i in 0..vec.len() { if vec[i]{ ret |= 128 >> i; } } return ret; } pub fn char_to_boolvec(c:char) -> Vec<bool> { let mut temp_string:String = String::new(); temp_string.push(c); return str_to_boolvec(temp_string.as_ref()); } pub fn bignum_to_usize(num: &gmp::mpz::Mpz) -> usize { let mut ret:usize = 0; for i in 0..num.bit_length() { if num.tstbit(i) { ret |= 1 << i; } } return ret; } pub fn bignum_to_boolvec(num: &gmp::mpz::Mpz) -> Vec<bool> { let num_bits = num.bit_length(); let mut ret = vec![false; num_bits]; for i in 0..num_bits { ret[i] = num.tstbit(i); } ret } pub fn str_to_boolvec(s:&str) -> Vec<bool> { let mut ret:Vec<bool> = vec![]; for b in s.as_bytes() { for i in 0..8 { ret.push((*b as u32) & (128 >> i) != 0); } } ret } pub fn usize_len(num:usize) -> usize { let mut ret = 0; let mut num = num; while num != 0 { ret += 1; num /= 2; } ret
let mut ret = vec![]; let mut num = num; while num != 0 { ret.push(num % 2 == 1); num /= 2; } ret } pub fn usize_to_bignum(num: usize) -> gmp::mpz::Mpz { let mut ret = gmp::mpz::Mpz::zero(); let mut i = 0; let mut num = num; while num != 0 { match num % 2 == 1 { true => ret.setbit(i), false => ret.clrbit(i), } num /= 2; i += 1; } ret }
} pub fn usize_to_boolvec(num: usize) -> Vec<bool> {
setup.py
import setuptools with open("README.md", "r") as fh: long_description = fh.read() setuptools.setup( name="torchsupport", version="0.0.1", author="Michael Jendrusch", author_email="[email protected]", description="Support for advanced pytorch usage.",
long_description=long_description, long_description_content_type="text/markdown", url="https://github.com/mjendrusch/torchsupport/", packages=setuptools.find_packages(), classifiers=( "Programming Language :: Python :: 3", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", ), )
util.go
package tidbTools import ( "fmt" "strconv" "strings" ) func ToFloat64(str string) float64 { var newNum float64 tmpStr := strings.ToLower(str) if strings.Contains(tmpStr, "e+") || strings.Contains(tmpStr, "e-") { _, err := fmt.Sscanf(tmpStr, "%e", &newNum) if err != nil { fmt.Printf("fmt.Sscanf error, numStr:%s, err:%v", tmpStr, err) return 0 } } else { newNum, _ = strconv.ParseFloat(str, 64) } return newNum } func
(a, b int) int { return a + b }
Add
main.go
package main import ( "gomail/config"
"gomail/smtp" "log" ) func main() { mailConfig := config.Load("./config.yml") mongo, err := db.New(mailConfig.Mongo) if err != nil { log.Fatal(err) } go smtp.Start(mailConfig.Smtp, mongo) imap.StartAndListen(mailConfig.Imap) }
"gomail/db" "gomail/imap"
scoped_hash_map.rs
//! `ScopedHashMap` //! //! This module defines a struct `ScopedHashMap<K, V>` which defines a `HashMap`-like //! container that has a concept of scopes that can be entered and exited, such that //! values inserted while inside a scope aren't visible outside the scope. use fx::FxHashMap; use std::collections::hash_map; use std::hash::Hash; use std::mem; struct Val<K, V> { value: V, next_key: Option<K>, depth: usize, } /// A view into an occupied entry in a `ScopedHashMap`. It is part of the `Entry` enum. pub struct OccupiedEntry<'a, K: 'a, V: 'a> { entry: hash_map::OccupiedEntry<'a, K, Val<K, V>>, } impl<'a, K, V> OccupiedEntry<'a, K, V> { /// Gets a reference to the value in the entry. pub fn get(&self) -> &V { &self.entry.get().value } } /// A view into a vacant entry in a `ScopedHashMap`. It is part of the `Entry` enum. pub struct VacantEntry<'a, K: 'a, V: 'a> { entry: hash_map::VacantEntry<'a, K, Val<K, V>>, next_key: Option<K>, depth: usize, } impl<'a, K, V> VacantEntry<'a, K, V> { /// Sets the value of the entry with the `VacantEntry`'s key. pub fn insert(self, value: V)
} /// A view into a single entry in a map, which may either be vacant or occupied. /// /// This enum is constructed from the `entry` method on `ScopedHashMap`. pub enum Entry<'a, K: 'a, V: 'a> { Occupied(OccupiedEntry<'a, K, V>), Vacant(VacantEntry<'a, K, V>), } /// A wrapper around a `HashMap` which adds the concept of scopes. Items inserted /// within a scope are removed when the scope is exited. /// /// Shadowing, where one scope has entries with the same keys as a containing scope, /// is not supported in this implementation. pub struct ScopedHashMap<K, V> { map: FxHashMap<K, Val<K, V>>, last_insert: Option<K>, current_depth: usize, } impl<K, V> ScopedHashMap<K, V> where K: PartialEq + Eq + Hash + Clone, { /// Creates an empty `ScopedHashMap`. pub fn new() -> Self { Self { map: FxHashMap(), last_insert: None, current_depth: 0, } } /// Similar to `HashMap::entry`, gets the given key's corresponding entry in the map for /// in-place manipulation. pub fn entry(&mut self, key: K) -> Entry<K, V> { use self::hash_map::Entry::*; match self.map.entry(key) { Occupied(entry) => Entry::Occupied(OccupiedEntry { entry }), Vacant(entry) => { let clone_key = entry.key().clone(); Entry::Vacant(VacantEntry { entry, next_key: mem::replace(&mut self.last_insert, Some(clone_key)), depth: self.current_depth, }) } } } /// Enter a new scope. pub fn increment_depth(&mut self) { // Increment the depth. self.current_depth = self.current_depth.checked_add(1).unwrap(); } /// Exit the current scope. pub fn decrement_depth(&mut self) { // Remove all elements inserted at the current depth. while let Some(key) = self.last_insert.clone() { use self::hash_map::Entry::*; match self.map.entry(key) { Occupied(entry) => { if entry.get().depth != self.current_depth { break; } self.last_insert = entry.remove_entry().1.next_key; } Vacant(_) => panic!(), } } // Decrement the depth. self.current_depth = self.current_depth.checked_sub(1).unwrap(); } } #[cfg(test)] mod tests { use super::*; #[test] fn basic() { let mut map: ScopedHashMap<i32, i32> = ScopedHashMap::new(); match map.entry(0) { Entry::Occupied(_entry) => panic!(), Entry::Vacant(entry) => entry.insert(1), } match map.entry(2) { Entry::Occupied(_entry) => panic!(), Entry::Vacant(entry) => entry.insert(8), } match map.entry(2) { Entry::Occupied(entry) => assert!(*entry.get() == 8), Entry::Vacant(_entry) => panic!(), } map.increment_depth(); match map.entry(2) { Entry::Occupied(entry) => assert!(*entry.get() == 8), Entry::Vacant(_entry) => panic!(), } match map.entry(1) { Entry::Occupied(_entry) => panic!(), Entry::Vacant(entry) => entry.insert(3), } match map.entry(1) { Entry::Occupied(entry) => assert!(*entry.get() == 3), Entry::Vacant(_entry) => panic!(), } match map.entry(0) { Entry::Occupied(entry) => assert!(*entry.get() == 1), Entry::Vacant(_entry) => panic!(), } match map.entry(2) { Entry::Occupied(entry) => assert!(*entry.get() == 8), Entry::Vacant(_entry) => panic!(), } map.decrement_depth(); match map.entry(0) { Entry::Occupied(entry) => assert!(*entry.get() == 1), Entry::Vacant(_entry) => panic!(), } match map.entry(2) { Entry::Occupied(entry) => assert!(*entry.get() == 8), Entry::Vacant(_entry) => panic!(), } map.increment_depth(); match map.entry(2) { Entry::Occupied(entry) => assert!(*entry.get() == 8), Entry::Vacant(_entry) => panic!(), } match map.entry(1) { Entry::Occupied(_entry) => panic!(), Entry::Vacant(entry) => entry.insert(4), } match map.entry(1) { Entry::Occupied(entry) => assert!(*entry.get() == 4), Entry::Vacant(_entry) => panic!(), } match map.entry(2) { Entry::Occupied(entry) => assert!(*entry.get() == 8), Entry::Vacant(_entry) => panic!(), } map.decrement_depth(); map.increment_depth(); map.increment_depth(); map.increment_depth(); match map.entry(2) { Entry::Occupied(entry) => assert!(*entry.get() == 8), Entry::Vacant(_entry) => panic!(), } match map.entry(1) { Entry::Occupied(_entry) => panic!(), Entry::Vacant(entry) => entry.insert(5), } match map.entry(1) { Entry::Occupied(entry) => assert!(*entry.get() == 5), Entry::Vacant(_entry) => panic!(), } match map.entry(2) { Entry::Occupied(entry) => assert!(*entry.get() == 8), Entry::Vacant(_entry) => panic!(), } map.decrement_depth(); map.decrement_depth(); map.decrement_depth(); match map.entry(2) { Entry::Occupied(entry) => assert!(*entry.get() == 8), Entry::Vacant(_entry) => panic!(), } match map.entry(1) { Entry::Occupied(_entry) => panic!(), Entry::Vacant(entry) => entry.insert(3), } } }
{ self.entry.insert(Val { value, next_key: self.next_key, depth: self.depth, }); }
models.rs
#![allow(unused_imports, unused_qualifications, unused_extern_crates)] extern crate chrono;
use serde::ser::Serializer; use std::collections::HashMap; use models; use swagger; /// An additionalPropertiesObject #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct AdditionalPropertiesObject { } impl AdditionalPropertiesObject { pub fn new() -> AdditionalPropertiesObject { AdditionalPropertiesObject { } } }
extern crate uuid;
pattern-tyvar-2.rs
// -*- rust -*- // Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. extern mod std; use option::Some; // error-pattern: mismatched types enum
{ t1((), Option<~[int]>), t2, } fn foo(t: bar) -> int { match t { t1(_, Some(x)) => { return x * 3; } _ => { die!(); } } } fn main() { }
bar
AuthenticatedRoute.tsx
import React, {ReactElement} from "react"; import {Route, Redirect} from "react-router-dom"; import {useSelector} from "react-redux"; import {Routes} from "../constants/routes"; import {RootState} from "./rootReducer"; export interface IAuthenticatedRouteProps { path: string; component: any; } export const AuthenticatedRoute = ({component: Component, ...rest}: IAuthenticatedRouteProps): ReactElement => { const state = useSelector((state: RootState) => state); const token = state.user.token; return ( <Route
{...rest} render={(props): ReactElement => token ? ( <Component {...props} /> ) : ( <Redirect to={{pathname: Routes.LOGIN_ROUTE, state: {from: props.location}}} /> ) } /> ); };
instagram.ts
import AwaitLock from "await-lock"; import chalk from "chalk"; import {EventEmitter} from "events"; import {isLeft} from "fp-ts/lib/Either"; import {Type} from "io-ts"; import {PathReporter} from "io-ts/lib/PathReporter"; import {ThrowReporter} from "io-ts/lib/ThrowReporter"; import * as _ from "lodash/object"; import { Browser, Headers, launch, LaunchOptions, Page, Request, Response, } from "puppeteer"; import * as winston from "winston"; import {IPlugin, IPluginContext} from "../../plugins"; import {IOptions} from "./api"; import {PostIdSet} from "./postIdSet"; /** * Instagram API wrapper */ export class Instagram<PostType> extends EventEmitter { /** * Apply defaults to undefined options */ private static defaultOptions(options: IOptions) { if (options.enableGrafting === undefined) { options.enableGrafting = true; } if (options.fullAPI === undefined) { options.fullAPI = false; } if (options.headless === undefined) { options.headless = true; } if (options.logger === undefined) { options.logger = winston.createLogger({ silent: true, }); } if (options.silent === undefined) { options.silent = true; } if (options.sleepTime === undefined) { options.sleepTime = 2; } if (options.hibernationTime === undefined) { options.hibernationTime = 60 * 20; } if (options.total === undefined) { options.total = 0; } return options; } // Resource identifier public id: string; public url: string; // Iteration state public started: boolean = false; public paused: boolean = false; public finished: boolean = false; // Instagram URLs public catchURL: string = "https://www.instagram.com/graphql/query"; public postURL: string = "https://instagram.com/p/"; public defaultPostURL: string = "https://www.instagram.com/p/"; // Number of jumps before grafting public jumpMod: number = 100; // Depth of jumps public jumpSize: number = 2; // Puppeteer resources public page: Page; // Validations private readonly strict: boolean = false; private readonly validator: Type<unknown>; // Puppeteer state private browser: Browser; private browserDisconnected: boolean = true; private readonly headless: boolean; // Array of scraped posts and lock private postBuffer: PostType[] = []; private postBufferLock: AwaitLock = new AwaitLock(); // Request and Response buffers and locks private requestBuffer: Request[] = []; private requestBufferLock: AwaitLock = new AwaitLock(); private responseBuffer: Response[] = []; private responseBufferLock: AwaitLock = new AwaitLock(); // Get full amount of data from API private readonly fullAPI: boolean = false; private pagePromises: Array<Promise<void>> = []; // Grafting state private readonly enableGrafting: boolean = true; private graft: boolean = false; private graftURL: string = null; private graftHeaders: Headers = null; private foundGraft: boolean = false; // Hibernation due to rate limiting private hibernate: boolean = false; private readonly hibernationTime: number = 60 * 20; // 20 minutes // Number of jumps before exiting because lack of data private failedJumps: number = 10; // Strings denoting the access methods of API objects private readonly pageQuery: string; private readonly edgeQuery: string; // Cache of post ids private postIds: PostIdSet; // Iteration variables private readonly total: number; private index: number = 0; private jumps: number = 0; // Number of times to attempt to visit url initially private readonly maxPageUrlAttempts = 3; private pageUrlAttempts = 0; private postPageRetries = 5; // Output private readonly silent: boolean = false; private writeLock: AwaitLock = new AwaitLock(); // Sleep time remaining private sleepRemaining: number = 0; // Length of time to sleep for private readonly sleepTime: number = 2; // Logging object private logger: winston.Logger; // Proxy for Instagram connection private readonly proxyURL: string; // Location of chromium / chrome binary executable private readonly executablePath: string; /** * Create API wrapper instance * @param endpoint the url for the type of resource to scrape * @param id the identifier for the resource * @param pageQuery the query to identify future pages in the nested API structure * @param edgeQuery the query to identify posts in the nested API structure * @param options configuration details * @param validator response type validator */ constructor( endpoint: string, id: string, pageQuery: string, edgeQuery: string, options: IOptions = {}, validator: Type<unknown>, ) { super(); this.id = id; this.postIds = new PostIdSet(); this.url = endpoint.replace("[id]", id); options = Instagram.defaultOptions(options); this.total = options.total; this.pageQuery = pageQuery; this.edgeQuery = edgeQuery; this.headless = options.headless; this.logger = options.logger; this.silent = options.silent; this.strict = options.strict; this.enableGrafting = options.enableGrafting; this.sleepTime = options.sleepTime; this.hibernationTime = options.hibernationTime; this.fullAPI = options.fullAPI; this.proxyURL = options.proxyURL; this.executablePath = options.executablePath; this.validator = options.validator || validator; this.addPlugins(options["plugins"]); this.emit("construction"); } /** * Toggle pausing data collection */ public pause() { this.paused = !this.paused; } /** * Toggle prolonged pausing */ public toggleHibernation() { this.hibernate = true; } /** * Force the API to stop */ public async forceStop(force?: boolean) { if (!force && !this.started) { return; } this.started = false; this.finished = true; try { this.requestBufferLock.release(); // tslint:disable-next-line: no-empty } catch (e) {} try { this.responseBufferLock.release(); // tslint:disable-next-line: no-empty } catch (e) {} await this.stop(); } /** * Generator of posts on page */ public async *generator(): AsyncIterableIterator<PostType> { // Start if haven't done so already if (!this.started) { await this.start(); } while (true) { // Get more posts await this.getNext(); // Yield posts from buffer let post = await this.postPop(); while (post) { yield post; post = await this.postPop(); } // End loop when finished and posts in buffer exhausted if (this.finished) { break; } } await this.stop(); // Add newline to end of output if (!this.silent) { process.stdout.write("\n"); } } /** * Construct page and add listeners */ public async start() { // Build page and visit url await this.constructPage(); this.started = true; // Add event listeners for requests and responses await this.page.setRequestInterception(true); this.page.on("request", (req) => this.interceptRequest(req)); this.page.on("response", (res) => this.interceptResponse(res)); this.page.on("requestfailed", (res) => this.interceptFailure(res)); // Ignore dialog boxes this.page.on("dialog", (dialog) => dialog.dismiss()); // Log errors this.page.on("error", (error) => this.logger.error(error)); // Gather initial posts from web page if (this.fullAPI) { await this.scrapeDefaultPosts(); } } /** * Match the url to the url used in API requests */ public matchURL(url: string) { return url.startsWith(this.catchURL) && !url.includes("include_reel"); } /** * Close the page and browser */ protected async stop() { await this.progress(Progress.CLOSING); // Close page and browser if (!this.page.isClosed()) { this.page.removeAllListeners("request"); this.page.removeAllListeners("response"); this.page.removeAllListeners("requestfailed"); await this.page.close(); } if (!this.browserDisconnected) { await this.browser.close().catch(e => e);
// Clear request buffers await this.requestBufferLock.acquireAsync(); this.requestBuffer = []; this.requestBufferLock.release(); // Clear response buffers await this.responseBufferLock.acquireAsync(); this.responseBuffer = []; this.responseBufferLock.release().catch(e => e); } /** * Process the requests in the request buffer */ protected async processRequests() { await this.requestBufferLock.acquireAsync(); let newApiRequest = false; for (const req of this.requestBuffer) { // Match url if (!this.matchURL(req.url())) { continue; } else { newApiRequest = true; } // Begin grafting if required, else continue the request if (this.graft) { if (this.foundGraft === false) { // Gather details this.graftURL = req.url(); this.graftHeaders = req.headers(); this.foundGraft = true; // Cancel request await req.abort(); } else { // Swap request const overrides = { headers: this.graftHeaders, url: this.graftURL, }; this.emit("request", req, overrides); await req.continue(overrides); // Reset grafting data this.graft = false; this.foundGraft = false; this.graftURL = null; this.graftHeaders = null; } // Stop reading requests break; } else { const overrides = {}; this.emit("request", req, overrides); await req.continue(overrides); } } // Clear buffer and release this.requestBuffer = []; this.requestBufferLock.release(); if (this.foundGraft && newApiRequest) { // Restart browser and page, clearing all buffers await this.stop(); await this.start(); } } /** * Process the responses in the response buffer */ protected async processResponses() { await this.responseBufferLock.acquireAsync(); for (const res of this.responseBuffer) { // Match url if (!this.matchURL(res.url())) { continue; } // Get JSON data let data: JSON; try { data = await res.json(); } catch (e) { this.logger.error("Error processing response JSON"); this.logger.error(e); } // Emit event this.emit("response", res, data); // Check for rate limiting if (data && "status" in data && data["status"] === "fail") { this.logger.info("Rate limited"); this.hibernate = true; continue; } // Check for next page if ( !( _.get(data, this.pageQuery + ".has_next_page", false) && _.get(data, this.pageQuery + ".end_cursor", false) ) ) { this.logger.info("No posts remaining", {data}); this.finished = true; } await this.processResponseData(data); } // Clear buffer and release this.responseBuffer = []; this.responseBufferLock.release(); } protected async processResponseData(data: unknown) { // Get posts const posts = _.get(data, this.edgeQuery, []); for (const post of posts) { const postId = post["node"]["id"]; // Check it hasn't already been cached const contains = this.postIds.add(postId); if (contains) { this.logger.info("Duplicate id found: " + postId); continue; } // Add to postBuffer if (this.index < this.total || this.total === 0) { this.index++; if (this.fullAPI) { this.pagePromises.push( this.postPage( post["node"]["shortcode"], this.postPageRetries, ), ); } else { await this.addToPostBuffer(post); } } else { this.finished = true; break; } } } /** * Open a post in a new page, then extract its metadata */ protected async postPage(post: string, retries: number) { // Create page const postPage = await this.browser.newPage().catch(e => e); await postPage.setRequestInterception(true); postPage.on("request", async (req) => { if (!req.url().includes("/p/" + post)) { await req.abort(); } else { await req.continue(); } }); postPage.on("requestfailed", async () => undefined); // Visit post and read state let data; let parsed; try { await postPage.goto(this.postURL + post); // Load data from memory /* istanbul ignore next */ data = await postPage.evaluate(() => { return JSON.stringify( window["_sharedData"].entry_data.PostPage[0].graphql, ); }); parsed = JSON.parse(data) as PostType; await postPage.close(); } catch (e) { // Log error and wait this.logger.error(e); await this.progress(Progress.ABORTED); await this.sleep(2); // Close existing attempt await postPage.close(); // Retry if (retries > 0) { await this.postPage(post, --retries); } } if (!parsed) { return; } this.emit("postPage", parsed); await this.addToPostBuffer(parsed); } protected validatePost(post: PostType) { const validationResult = this.validator.decode(post); if (this.strict) { try { ThrowReporter.report(validationResult); } catch (e) { this.forceStop(); throw e; } return; } if (isLeft(validationResult)) { const validationReporter = PathReporter.report(validationResult); this.logger.warn( ` Warning! The Instagram API has been changed since this version of instamancer was released. More info: https://scriptsmith.github.io/instamancer/api-change `, {validationReporter, post}, ); } } /** * Stimulate the page until responses gathered */ protected async getNext() { await this.progress(Progress.SCRAPING); while (true) { // Process results (if any) await this.processRequests(); await this.processResponses(); // Finish page promises if (this.pagePromises.length > 0) { await this.progress(Progress.BRANCHING); await Promise.all(this.pagePromises); this.pagePromises = []; } // Check if finished if (this.finished) { break; } // Pause if paused await this.waitResume(); // Interact with page to stimulate request await this.jump(); // Stop if no data is being gathered if (this.jumps === this.failedJumps && this.index === 0) { this.finished = true; this.logger.error("Page failed to make requests"); break; } // Enable grafting if required if (this.jumps % this.jumpMod === 0) { await this.initiateGraft(); } // Sleep await this.sleep(this.sleepTime); // Hibernate if rate-limited if (this.hibernate) { await this.sleep(this.hibernationTime); this.hibernate = false; } // Break if posts in buffer await this.postBufferLock.acquireAsync(); const posts = this.postBuffer.length; this.postBufferLock.release(); if (posts > 0) { break; } } } /** * Halt execution * @param time Seconds */ protected async sleep(time: number) { for (let i = time; i > 0; i--) { this.sleepRemaining = i; await this.progress(Progress.SCRAPING); await new Promise((resolve) => { setTimeout(resolve, i >= 1 ? 1000 : i * 1000); }); } this.sleepRemaining = 0; await this.progress(Progress.SCRAPING); } /** * Create the browser and page, then visit the url */ private async constructPage() { // Browser args const args = []; if (process.env.NO_SANDBOX) { args.push("--no-sandbox"); args.push("--disable-setuid-sandbox"); } if (this.proxyURL !== undefined) { args.push("--proxy-server=" + this.proxyURL); } // Browser launch options const options: LaunchOptions = { args, headless: this.headless, }; if (this.executablePath !== undefined) { options.executablePath = this.executablePath; } // Launch browser await this.progress(Progress.LAUNCHING); this.browser = await launch(options); this.browserDisconnected = false; this.browser.on( "disconnected", () => (this.browserDisconnected = true), ); // New page this.page = await this.browser.newPage().catch(e => e); await this.progress(Progress.OPENING); // Attempt to visit URL try { await this.page.goto(this.url); } catch (e) { // Increment attempts if ( this.pageUrlAttempts++ === this.maxPageUrlAttempts && !this.started ) { await this.forceStop(true); throw new Error("Failed to visit URL"); } // Log error and wait this.logger.error(e); this.logger.error(this.url); await this.progress(Progress.ABORTED); await this.sleep(60); // Close existing attempt await this.page.close(); await this.browser.close().catch(e => e); // Retry await this.constructPage(); } } /** * Pause and wait until resumed */ private async waitResume() { // Pause for 200 milliseconds function f() { return new Promise((resolve) => { setTimeout(resolve, 200); }); } // Pause until pause toggled while (this.paused === true) { await this.progress(Progress.PAUSED); await f(); } } /** * Pop a post off the postBuffer (using locks). Returns null if no posts in buffer */ private async postPop() { let post = null; await this.postBufferLock.acquireAsync(); if (this.postBuffer.length > 0) { post = this.postBuffer.shift(); } this.postBufferLock.release(); return post; } /** * Print progress to stderr */ private async progress(state: Progress) { // End if silent if (this.silent) { return; } // Lock await this.writeLock.acquireAsync(); // Calculate total const total = this.total === 0 ? "Unlimited" : this.total; // Generate output string const idStr = chalk.bgYellow.black(` ${this.id} `); const totalStr = chalk.bgBlack(` Total: ${total} `); const stateStr = chalk.bgWhite.black(` State: ${state} `); const sleepStr = chalk.bgWhite.black( ` Sleeping: ${this.sleepRemaining} `, ); const indexStr = chalk.bgWhite.black(` Scraped: ${this.index} `); this.logger.debug({ id: this.id, index: this.index, sleepRemaining: this.sleepRemaining, state, total, }); // Print output process.stderr.write( `\r${idStr}${totalStr}${stateStr}${sleepStr}${indexStr}\u001B[K`, ); // Release this.writeLock.release(); } /** * Add request to the request buffer */ private async interceptRequest(req: Request) { await this.requestBufferLock.acquireAsync(); this.requestBuffer.push(req); await this.requestBufferLock.release(); } /** * Add the response to the response buffer */ private async interceptResponse(res: Response) { await this.responseBufferLock.acquireAsync(); this.responseBuffer.push(res); await this.responseBufferLock.release(); } /** * Log failed requests */ private async interceptFailure(req: Request) { this.logger.info("Failed: " + req.url()); await this.progress(Progress.ABORTED); } /** * Add post to buffer */ private async addToPostBuffer(post: PostType) { await this.postBufferLock.acquireAsync(); this.validatePost(post); this.postBuffer.push(post); this.postBufferLock.release(); } /** * Manipulate the page to stimulate a request */ private async jump() { await this.page.keyboard.press("PageUp"); const jumpSize = this.graft ? 1 : this.jumpSize; for (let i = 0; i < jumpSize; i++) { await this.page.keyboard.press("End"); } // Move mouse randomly const width = this.page.viewport()["width"]; const height = this.page.viewport()["height"]; await this.page.mouse.move( Math.round(width * Math.random()), Math.round(height * Math.random()), ); ++this.jumps; } /** * Clear request and response buffers */ private async initiateGraft() { // Check if enabled if (!this.enableGrafting) { return; } await this.progress(Progress.GRAFTING); this.emit("grafting"); // Enable grafting this.graft = true; } /** * Read the posts that are pre-loaded on the page */ private async scrapeDefaultPosts() { // Get shortcodes from page /* istanbul ignore next */ const shortCodes = await this.page.evaluate((url) => { return Array.from(document.links) .filter((link) => { return ( link.href.startsWith(url) && link.href.split("/").length >= 2 ); }) .map((link) => { const linkSplit = link.href.split("/"); return linkSplit[linkSplit.length - 2]; }); }, this.defaultPostURL); // Add postPage promises for (const shortCode of shortCodes) { if (this.index < this.total || this.total === 0) { this.index++; this.pagePromises.push( this.postPage(shortCode, this.postPageRetries), ); } else { this.finished = true; break; } } } private addPlugins(plugins: Array<IPlugin<PostType>>) { if (!plugins) { return; } const events = [ "construction", "request", "response", "postPage", "grafting", ]; for (const plugin of plugins) { for (const event of events) { const pluginEvent = plugin[event + "Event"]; if (pluginEvent) { const context: IPluginContext<typeof plugin, PostType> = { plugin, state: this, }; this.on(event, pluginEvent.bind(context)); } } } } } /** * The states of progress that the API can be in. Used to output status. */ enum Progress { LAUNCHING = "Launching", OPENING = "Navigating", SCRAPING = "Scraping", BRANCHING = "Branching", GRAFTING = "Grafting", CLOSING = "Closing", PAUSED = "Paused", ABORTED = "Request aborted", }
}
component-deprecations.ts
import { Rule, SchematicContext, Tree } from '@angular-devkit/schematics'; import { getSourceNodes } from '@angular/cdk/schematics'; import { UTF_8 } from '../../../shared/constants'; import { buildSpartacusComment, commitChanges, getAllHtmlFiles, getAllTsSourceFiles, insertCommentAboveIdentifier, InsertDirection, insertHtmlComment, isInheriting, } from '../../../shared/utils/file-utils'; import { getSourceRoot } from '../../../shared/utils/workspace-utils'; import { COMPONENT_DEPRECATION_DATA } from './component-deprecations-data'; export function migrate(): Rule { return (tree: Tree, context: SchematicContext) => {
const sourceRoot = getSourceRoot(tree); const allHtmlFilePaths = getAllHtmlFiles(tree, sourceRoot); for (const htmlFilePath of allHtmlFilePaths) { for (const deprecatedComponent of COMPONENT_DEPRECATION_DATA) { for (const removedProperty of deprecatedComponent.removedProperties) { const buffer = tree.read(htmlFilePath); if (!buffer) { context.logger.warn(`Could not read file (${htmlFilePath}).`); continue; } const content = buffer.toString(UTF_8); const change = insertHtmlComment( content, deprecatedComponent.selector, removedProperty ); if (change) { tree.overwrite(htmlFilePath, change); } } } } const project = getSourceRoot(tree, {}); const sourceFiles = getAllTsSourceFiles(tree, project); for (const originalSource of sourceFiles) { const sourcePath = originalSource.fileName; const nodes = getSourceNodes(originalSource); for (const deprecatedComponent of COMPONENT_DEPRECATION_DATA) { if (isInheriting(nodes, deprecatedComponent.componentClassName)) { for (const removedProperty of deprecatedComponent.removedProperties || []) { const changes = insertCommentAboveIdentifier( sourcePath, originalSource, removedProperty.name, buildSpartacusComment(removedProperty.comment) ); if (changes.length) { commitChanges(tree, sourcePath, changes, InsertDirection.RIGHT); } } } } } return tree; }; }
context.logger.info('Checking component selectors...');
contracts.rs
use crate::errors::SemanticError; use crate::namespace::events::Event; use crate::namespace::scopes::{ ContractScope, ModuleScope, Scope, Shared, }; use crate::namespace::types::FixedSize; use crate::traversal::{ functions, types, }; use crate::Context; use fe_parser::ast as fe; use fe_parser::span::Spanned; use std::rc::Rc; /// Gather context information for contract definitions and check for type /// errors. pub fn
( module_scope: Shared<ModuleScope>, context: Shared<Context>, stmt: &Spanned<fe::ModuleStmt>, ) -> Result<(), SemanticError> { if let fe::ModuleStmt::ContractDef { name: _, body } = &stmt.node { let contract_scope = ContractScope::new(module_scope); for stmt in body.iter() { match &stmt.node { fe::ContractStmt::ContractField { .. } => { contract_field(Rc::clone(&contract_scope), stmt) } fe::ContractStmt::EventDef { .. } => event_def(Rc::clone(&contract_scope), stmt), fe::ContractStmt::FuncDef { .. } => { functions::func_def(Rc::clone(&contract_scope), Rc::clone(&context), stmt) } } .map_err(|error| error.with_context(stmt.span))?; } for stmt in body.iter() { if let fe::ContractStmt::FuncDef { .. } = &stmt.node { functions::func_body(Rc::clone(&contract_scope), Rc::clone(&context), stmt) .map_err(|error| error.with_context(stmt.span))?; }; } context .borrow_mut() .add_contract(stmt, contract_scope.borrow().into()); return Ok(()); } unreachable!() } fn contract_field( scope: Shared<ContractScope>, stmt: &Spanned<fe::ContractStmt>, ) -> Result<(), SemanticError> { if let fe::ContractStmt::ContractField { qual: _, name, typ } = &stmt.node { let typ = types::type_desc(Scope::Contract(Rc::clone(&scope)), typ)?; scope.borrow_mut().add_field(name.node.to_string(), typ); return Ok(()); } unreachable!() } fn event_def( scope: Shared<ContractScope>, stmt: &Spanned<fe::ContractStmt>, ) -> Result<(), SemanticError> { if let fe::ContractStmt::EventDef { name, fields } = &stmt.node { let name = name.node.to_string(); let (is_indexed_bools, fields): (Vec<bool>, Vec<FixedSize>) = fields .iter() .map(|field| event_field(Rc::clone(&scope), field)) .collect::<Result<Vec<_>, _>>()? .into_iter() .unzip(); let indexed_fields = is_indexed_bools .into_iter() .enumerate() .filter(|(_, is_indexed)| *is_indexed) .map(|(index, _)| index) .collect::<Vec<_>>(); if indexed_fields.len() > 3 { return Err(SemanticError::more_than_three_indexed_params()); } // check if they are trying to index an array type for index in indexed_fields.clone() { match fields[index].to_owned() { FixedSize::Base(_) => {} _ => unimplemented!("non-base type indexed event params"), } } scope .borrow_mut() .add_event(name.clone(), Event::new(name, fields, indexed_fields)); return Ok(()); } unreachable!() } fn event_field( scope: Shared<ContractScope>, field: &Spanned<fe::EventField>, ) -> Result<(bool, FixedSize), SemanticError> { Ok(( field.node.qual.is_some(), types::type_desc_fixed_size(Scope::Contract(scope), &field.node.typ)?, )) }
contract_def
device.rs
use std::{fmt, marker::PhantomData}; use crypto::ed25519; use rand::{self, Rng}; use serde::{ de::{self, Deserialize, Deserializer, SeqAccess, Visitor}, ser::{Serialize, SerializeTuple, Serializer}, }; use serde_derive::{Deserialize, Serialize}; use serde_json; use crate::{ db::{Database, DatabasePtr}, pin::Pin, Result, }; /// `Device` represents instances of the HAP server. #[derive(Serialize, Deserialize)] pub struct Device { pub id: String, pub pin: Pin, #[serde(with = "BigArray")] pub private_key: [u8; 64], pub public_key: [u8; 32], } impl Device { /// Creates a new `Device` with a given key pair. pub fn new(id: String, pin: Pin, private_key: [u8; 64], public_key: [u8; 32]) -> Device { Device { id, pin, public_key, private_key, } } /// Creates a new `Device` generating a random key pair. pub fn new_random(id: String, pin: Pin) -> Device { let (private_key, public_key) = generate_key_pair(); Device { id, pin, private_key, public_key, } } /// Attempts to load a `Device` from a database and creates a new one with a random key pair if /// none is found for the given ID. pub fn load_or_new(id: String, pin: Pin, database: &Database) -> Result<Device> { match database.get_device() { Ok(device) => Ok(device), Err(_) => { let device = Device::new_random(id, pin); database.set_device(&device)?; Ok(device) }, } } /// Loads a `Device` from a database. pub fn load_from(database: &DatabasePtr) -> Result<Device> { database.lock().expect("couldn't access database").get_device() } /// Saves a `Device` to a database. pub fn save_to(&self, database: &DatabasePtr) -> Result<()> { database.lock().expect("couldn't access database").set_device(self)?; Ok(()) } /// Serializes a `Device` to a `Vec<u8>`. pub fn as_bytes(&self) -> Result<Vec<u8>> { let value = serde_json::to_vec(&self)?; Ok(value) } /// Deserializes a `Device` from a `&[u8]`. pub fn from_bytes(bytes: &[u8]) -> Result<Device> { let value = serde_json::from_slice(bytes)?; Ok(value) } } fn generate_key_pair() -> ([u8; 64], [u8; 32]) { let mut rng = rand::thread_rng(); let seed = rng.gen::<[u8; 32]>(); ed25519::keypair(&seed) } // see https://github.com/serde-rs/serde/issues/631 trait BigArray<'de>: Sized { fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error> where S: Serializer; fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error> where D: Deserializer<'de>; } macro_rules! big_array { ($($len:expr,)+) => { $( impl<'de, T> BigArray<'de> for [T; $len] where T: Default + Copy + Serialize + Deserialize<'de> { fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error> where S: Serializer { let mut seq = serializer.serialize_tuple(self.len())?; for elem in &self[..] { seq.serialize_element(elem)?; } seq.end() } fn deserialize<D>(deserializer: D) -> std::result::Result<[T; $len], D::Error> where D: Deserializer<'de> { struct ArrayVisitor<T> { element: PhantomData<T>, } impl<'de, T> Visitor<'de> for ArrayVisitor<T> where T: Default + Copy + Deserialize<'de> { type Value = [T; $len]; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str(concat!("an array of length ", $len)) } fn visit_seq<A>(self, mut seq: A) -> std::result::Result<[T; $len], A::Error> where A: SeqAccess<'de> { let mut arr = [T::default(); $len];
Ok(arr) } } let visitor = ArrayVisitor {element: PhantomData}; deserializer.deserialize_tuple($len, visitor) } } )+ } } big_array! { 40, 48, 50, 56, 64, 72, 96, 100, 128, 160, 192, 200, 224, 256, 384, 512, 768, 1024, 2048, 4096, 8192, 16384, 32768, 65536, }
for i in 0..$len { arr[i] = seq.next_element()? .ok_or_else(|| de::Error::invalid_length(i, &self))?; }
contact_additional_views.py
import re from django.db.models import Q from django.utils.translation import ugettext_lazy as _ from rest_framework.exceptions import APIException from rest_framework.response import Response from rest_framework.views import APIView from leasing.models import Contact from leasing.permissions import PerMethodPermission
perms_map = { 'GET': ['leasing.view_contact'], } def get_view_name(self): return _("Check if contact already exist") def get_view_description(self, html=False): return _("Check if contact already exist by business id or national identification number") def get(self, request, format=None): identifier = request.query_params.get('identifier', None) if not identifier: raise APIException(_('Query parameter "identifier" is mandatory')) if re.match(r'FI\d{8}', identifier, re.IGNORECASE): identifier = "{}-{}".format(identifier[2:9], identifier[-1]) return Response({ "exists": Contact.objects.filter( Q(business_id__iexact=identifier) | Q(national_identification_number__iexact=identifier)).exists(), })
class ContactExistsView(APIView): permission_classes = (PerMethodPermission,)
error-handling.ts
import Koa from 'koa'; export function
() { return async (ctx: Koa.BaseContext, next: () => Promise<any>) => { try { await next(); } catch (err) { ctx.status = err.status || 500; ctx.body = err.message; ctx.app.emit('error', err, ctx); } }; }
errorHandling
bullet.rs
use bevy::math::Vec3; use crate::Owner; use crate::Velocity; pub struct Bullet {
} impl Bullet { pub fn new(owner: Owner, velocity: Vec3, sprite_file_path: String) -> Self { Bullet { owner, velocity, sprite_file_path } } pub fn get(&self) -> (&Owner, &Vec3, &String) { (&self.owner, &self.velocity, &self.sprite_file_path) } } impl Clone for Bullet { fn clone(&self) -> Self { Bullet::new(self.owner.clone(), self.velocity, self.sprite_file_path.clone()) } } impl Velocity for Bullet { fn get_velocity(&self) -> Vec3 { self.velocity } }
owner: Owner, velocity: Vec3, sprite_file_path: String,
utils.py
"""Useful command to download and clean data from OpenFoodfact.""" import requests keys = [ "id", "product_name_fr", "nutrition_grade_fr", "url", "image_front_url", "image_ingredients_url", ] class RequestData: """The class fetch the data and save it in to a json file.""" def __init__(self): self.cat_url = "https://fr.openfoodfacts.org/categories.json" self.search_url = "https://fr.openfoodfacts.org/cgi/search.pl" self.list_cat = [] self.list_prod = [] self.data = {} def
(self, page_size): """Main public function executing all necessary privates functions.""" self.list_cat = self._fetch_category() data = self._fetch_products(page_size) return data def _fetch_category(self): """Request the list of category from the API.""" print("Getting Categories from API") try: response = self._req(self.cat_url) data = response.json() list_cat = [i["name"] for i in data["tags"]][:17] self.data = {} return list_cat except requests.exceptions.Timeout as t: print("Request Timeout, please retry : ", t) except requests.exceptions.RequestException as err: print("Something went bad, please retry : :", err) def _fetch_products(self, page_size): """Request the products in respect for the categories loaded.""" print( "Getting Products from API in respect to the" " Categories previously got" ) fields = ",".join(keys) all_products = {} for category in self.list_cat: config = { "action": "process", # Get the result by category "tagtype_0": "categories", # the tag represents the article search "tag_0": category, "fields": fields, "tag_contains_0": "contains", # Number of articles per page # Min content 20, Max content 1000 "page_size": page_size, # The API response in JSON "json": 1, } response = self._req(self.search_url, param=config) all_products[category] = response.json() return all_products def _req(self, url, param=None): """Small request function used multiple times.""" response = requests.get(url, param) return response class Cleaner: """This class will handle the data formatting before db use.""" def __init__(self, data): """Initialize variables and launch filter_products.""" self.data = data self.keys = keys self.list_cat = [categories for categories in self.data] self._dict_data = {} self.list_of_dictio = [] self.barcode_list = [] self.name_list = [] def filter_product(self): """Get the data from json files and run checks.""" for category in self.list_cat: for element in self.data[category]["products"]: if self._data_exist(element): self.list_of_dictio.append(element) self._dict_data[category] = self.list_of_dictio self.list_of_dictio = [] return self._dict_data def _data_exist(self, element): """Run trough the data, if something's missing it's discarded.""" for x in self.keys: if ( x not in element or element[x] == "" or len(element["id"]) != 13 ): return False barcode = int(element["id"]) if barcode in self.barcode_list: return False else: self.barcode_list.append(barcode) name = element["product_name_fr"].lower() if name in self.name_list: return False else: self.name_list.append(name) return True def req_and_clean(page_size): """Main function to instantiate and launch operations.""" r = RequestData() data = r.exec(page_size) c = Cleaner(data) data = c.filter_product() return data if __name__ == "__main__": data = req_and_clean() print(data)
exec
chapter4.go
package chapter4 import ( "encoding/json" "fmt" "io/ioutil" "log" "os" ) // 構造体(struct)のField名を大文字始まりにすることで別のパッケージからも person.Id などでアクセスできる // ここでは別パッケージから person.memo はアクセスできない // 更に小文字で始まるFieldはencode/decodeされない // Field名は大文字にしつつタグで json:"id" のようにして小文字のフィールドに対応する // https://qiita.com/Yarimizu14/items/e93097c4f4cfd5468259 type Person struct { Id int `json:"id"` Name string `json:"name"` Email string `json:"-"` Age int `json:"age"` Address string `json:"address,omitempty"` memo string } func Do() { fmt.Println("chapter4始まるよー") doMarshal() doUnMarshal() doFiles() doJsonFiles() doIoUtil() } func doMarshal() { person := &Person{ Id: 1, Name: "Goper", Email: "[email protected]", Age: 5, Address: "", memo: "golang lover", } b, err := json.Marshal(person) if err != nil { log.Fatal(err) } // structureにタグ付けしているのでその使用で出力される fmt.Println(string(b)) // => {"id":1,"name":"Goper","age":5} } func doUnMarshal() { var person Person b := []byte(`{"id":1,"name":"Gopher","age":5}`) err := json.Unmarshal(b, &person) if err != nil { log.Fatal(err) } fmt.Println(person) // => {1 Gopher 5 } } func doFiles() { // ファイル生成 file, err := os.Create("./file.txt") if err != nil { log.Fatal(err) } // プログラムが終わったらファイルを閉じる defer file.Close() // defer でプログラムが終わったらファイルを閉じる // 書き込むデータを[]byteで利用する message := []byte("hello world!!!\n") // Writeで書き込む _, err = file.Write(message) // WriteStringというのもある //_, err = file.WriteString("hello world!!\n") // fmt.Fprintで直接fileに書き込める //_, err = fmt.Fprint(file, "hello world\n") if err != nil { log.Fatal(err) } readFile, err := os.Open("./file.txt") if err != nil { log.Fatal(err) } defer readFile.Close() // ここが重要 // 10byte格納可能なスライスを用意する // []byte を必要な長さを用意してあげないといけない // 10byteよりオーバーする分は取得できない readMessage := make([]byte, 10) _, err = readFile.Read(readMessage) if err != nil { log.Fatal(err) } fmt.Println(string(readMessage)) } func doJsonFiles() { person := &Person{ Id: 1, Name: "Goper", Email: "[email protected]", Age: 5, Address: "", memo: "golang lover", } // ここからは // JSONの書き込み // ファイルを開く file, err := os.Create("./person.json") if err != nil { log.Fatal(err) } defer file.Close() // エンコーダーの取得 encoder := json.NewEncoder(file) // ファイルへの書き出し(Encodeを使う) err = encoder.Encode(person) if err != nil { log.Fatal(err) } // ここからは // JSONの読み込み readFile, err := os.Open("./person.json") if err != nil { log.Fatal(err) } defer readFile.Close() // データを読み込む変数 var readPerson Person // デコーダの取得 decoder := json.NewDecoder(readFile) // JSONデコードしたデータの書き込み(Decodeを使う) err = decoder.Decode(&readPerson) if err != nil { log.Fatal(err) } // 読みだした結果の表示 fmt.Println(readPerson) } func doIoUtil() { // ReadAll file, _ := os.Open("./file.txt") message, _ := ioutil.ReadAll(file) fmt.Println(string(message)) // Printfだと[]byteもそのまま出力できる //fmt.Printf("%s\n", message) // WriteFile message = []byte("hello world???") // permを0777としているが実際作成されるのは755のファイルumaskが効いているのかな ioutil.WriteFile("./file_io_util.txt", message, 0777) // ReadFile message, _ = ioutil.ReadFile("./file_io_util.txt") fmt.Println(string(message)) }
test_optimizer.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import numpy as np import mxnet as mx import mxnet.lr_scheduler as lr_scheduler from mxnet import gluon import unittest from nose.tools import raises import math from mxnet.test_utils import * from common import setup_module, with_seed @with_seed() def test_learning_rate(): o1 = mx.optimizer.Optimizer(learning_rate=0.01) o1.set_learning_rate(0.2) assert o1.learning_rate == 0.2 lr_s = lr_scheduler.FactorScheduler(step=1) o2 = mx.optimizer.Optimizer(lr_scheduler=lr_s, learning_rate=0.3) assert o2.learning_rate == 0.3 o2.lr_scheduler.base_lr = 0.4 assert o2.learning_rate == 0.4 @raises(UserWarning) @with_seed() def test_learning_rate_expect_user_warning(): lr_s = lr_scheduler.FactorScheduler(step=1) o = mx.optimizer.Optimizer(lr_scheduler=lr_s, learning_rate=0.3) o.set_learning_rate(0.5) @with_seed() def test_lr_wd_mult(): data = mx.sym.Variable('data') bias = mx.sym.Variable('fc1_bias', lr_mult=1.0) fc1 = mx.sym.FullyConnected(data=data, bias=bias, name='fc1', num_hidden=10, lr_mult=0) fc2 = mx.sym.FullyConnected(data=fc1, name='fc2', num_hidden=10, wd_mult=0.5) mod = mx.mod.Module(symbol=fc2, label_names=None, context=default_context()) mod.bind(data_shapes=[('data', (5,10))]) mod.init_params(initializer=mx.init.Uniform(1.0)) mod.init_optimizer(optimizer_params={'learning_rate': 1.0}) args1, _ = mod.get_params() args1 = {k: v.asnumpy() for k, v in args1.items()} mod.forward(mx.io.DataBatch(data=[mx.random.uniform(low=-1.0, high=1.0, shape=(5,10))], label=None), is_train=True) mod.backward(mod.get_outputs()) mod.update() args2, _ = mod.get_params() args2 = {k: v.asnumpy() for k, v in args2.items()} assert mod._optimizer.lr_mult == {'fc1_bias': 1.0, 'fc1_weight': 0.0} assert mod._optimizer.wd_mult == {'fc2_bias': 0.5, 'fc2_weight': 0.5, 'fc1_bias': 0.0} assert mx.test_utils.almost_equal(args1['fc1_weight'], args2['fc1_weight'], 1e-10) assert not mx.test_utils.almost_equal(args1['fc1_bias'], args2['fc1_bias'], 1e-1) assert not mx.test_utils.almost_equal(args1['fc2_weight'], args2['fc2_weight'], 1e-1) def compare_ndarray_tuple(t1, t2, rtol=None, atol=None): if t1 is not None and t2 is not None: if isinstance(t1, tuple): for s1, s2 in zip(t1, t2): compare_ndarray_tuple(s1, s2, rtol, atol) else: assert_almost_equal(t1.asnumpy(), t2.asnumpy(), rtol=rtol, atol=atol) def compare_optimizer(opt1, opt2, shape, dtype, w_stype='default', g_stype='default', rtol=1e-4, atol=1e-5): if w_stype == 'default': w2 = mx.random.uniform(shape=shape, ctx=default_context(), dtype=dtype) w1 = w2.copyto(default_context()) elif w_stype == 'row_sparse' or w_stype == 'csr': w2 = rand_ndarray(shape, w_stype, density=1, dtype=dtype) w1 = w2.copyto(default_context()).tostype('default') else: raise Exception("type not supported yet") if g_stype == 'default': g2 = mx.random.uniform(shape=shape, ctx=default_context(), dtype=dtype) g1 = g2.copyto(default_context()) elif g_stype == 'row_sparse' or g_stype == 'csr': g2 = rand_ndarray(shape, g_stype, dtype=dtype) g1 = g2.copyto(default_context()).tostype('default') else: raise Exception("type not supported yet") state1 = opt1.create_state_multi_precision(0, w1) state2 = opt2.create_state_multi_precision(0, w2) compare_ndarray_tuple(state1, state2) opt1.update_multi_precision(0, w1, g1, state1) opt2.update_multi_precision(0, w2, g2, state2) compare_ndarray_tuple(state1, state2, rtol=rtol, atol=atol) assert_almost_equal(w1.asnumpy(), w2.asnumpy(), rtol=rtol, atol=atol) # SGD class PySGD(mx.optimizer.Optimizer): """python reference implemenation of sgd""" def __init__(self, learning_rate=0.01, momentum=0.0, multi_precision=False, **kwargs): super(PySGD, self).__init__(learning_rate=learning_rate, **kwargs) self.momentum = momentum self.multi_precision = multi_precision def create_state(self, index, weight): """Create additional optimizer state: momentum Parameters ---------- weight : NDArray The weight data """ momentum = None weight_master_copy = None do_multi_precision = self.multi_precision and weight.dtype == np.float16 if do_multi_precision: if self.momentum != 0.0: momentum = mx.nd.zeros(weight.shape, weight.context, dtype=np.float32) weight_master_copy = array(weight, ctx=weight.context, dtype=np.float32) return (momentum, weight_master_copy) else: if self.momentum != 0.0: momentum = mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype) return momentum def create_state_multi_precision(self, index, weight): return self.create_state(index, weight) def update(self, index, weight, grad, state): """Update the parameters. Parameters ---------- index : int An unique integer key used to index the parameters weight : NDArray weight ndarray grad : NDArray grad ndarray state : NDArray or other objects returned by init_state The auxiliary state used in optimization. """ lr = self._get_lr(index) wd = self._get_wd(index) self._update_count(index) use_multi_precision = isinstance(state, list) or isinstance(state, tuple) if not use_multi_precision: if self.momentum == 0.0: if self.clip_gradient is not None: weight[:] = ((1 - lr*wd)*weight - lr*mx.nd.clip(grad*self.rescale_grad, -self.clip_gradient, self.clip_gradient)) else: weight[:] = (1 - lr*wd)*weight - lr*self.rescale_grad*grad else: mom = state if self.clip_gradient is not None: mom[:] = (self.momentum*mom - lr*wd*weight - lr*mx.nd.clip(grad*self.rescale_grad, -self.clip_gradient, self.clip_gradient)) weight += mom else: mom[:] = self.momentum*mom - lr*wd*weight - lr*self.rescale_grad*grad weight += mom else: grad32 = array(grad, ctx=grad.context, dtype=np.float32) mom = state[0] weight32 = state[1] if self.momentum == 0.0: if self.clip_gradient is not None: weight32[:] = ((1 - lr*wd)*weight32 - lr*mx.nd.clip(grad32*self.rescale_grad, -self.clip_gradient, self.clip_gradient)) else: weight32[:] = (1 - lr*wd)*weight32 - lr*self.rescale_grad*grad32 else: if self.clip_gradient is not None: mom[:] = (self.momentum*mom - lr*wd*weight32 - lr*mx.nd.clip(grad32*self.rescale_grad, -self.clip_gradient, self.clip_gradient)) weight32 += mom else: mom[:] = self.momentum*mom - lr*wd*weight32 - lr*self.rescale_grad*grad32 weight32 += mom tmp = weight32.astype(weight.dtype) tmp.copyto(weight) def update_multi_precision(self, index, weight, grad, state): self.update(index, weight, grad, state) @unittest.skip("Test fails intermittently. Temporarily disabled until fixed. Tracked at https://github.com/apache/incubator-mxnet/issues/9000") @with_seed() def test_sgd(): opt1 = PySGD opt2 = mx.optimizer.SGD shape = (3, 4, 5) mom_options = [{}, {'momentum': 0.9}] cg_options = [{}, {'clip_gradient': 0.4}, {'clip_gradient': 0.5}] rg_options = [{}, {'rescale_grad': 0.14}, {'rescale_grad': 0.8}] wd_options = [{}, {'wd': 0.03}, {'wd': 0.05}, {'wd': 0.07}] mp_options = [{}, {'multi_precision': False}, {'multi_precision': True}] for dtype in [np.float16, np.float32, np.float64]: for mom_option in mom_options: for cg_option in cg_options: for rg_option in rg_options: for wd_option in wd_options: for mp_option in mp_options: kwarg = {} kwarg.update(mom_option) kwarg.update(cg_option) kwarg.update(rg_option) kwarg.update(wd_option) kwarg.update(mp_option) if (dtype == np.float16 and ('multi_precision' not in kwarg or not kwarg['multi_precision'])): continue compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, dtype) # test operator fallback on cpu if (default_context() == mx.cpu()): compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, dtype, g_stype='row_sparse') if dtype != np.float16: compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape[:2], dtype, w_stype='csr', g_stype='csr') # test optimizer with a big shape big_shape = (54686454, 1) kwarg = {'momentum': 0.9, 'wd': 0.05} compare_optimizer(opt1(**kwarg), opt2(**kwarg), big_shape, np.float32) class PySparseSGD(mx.optimizer.Optimizer): """python reference implemenation of sgd""" def __init__(self, learning_rate=0.01, momentum=0.0, **kwargs): super(PySparseSGD, self).__init__(learning_rate=learning_rate, **kwargs) self.momentum = momentum def create_state(self, index, weight): """Create additional optimizer state: momentum Parameters ---------- weight : NDArray The weight data """ if self.momentum == 0.0: return None else: return mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype) def update(self, index, weight, grad, state): """Update the parameters. Parameters ---------- index : int An unique integer key used to index the parameters weight : NDArray weight ndarray grad : NDArray grad ndarray state : NDArray or other objects returned by init_state The auxiliary state used in optimization. """ lr = self._get_lr(index) wd = self._get_wd(index) self._update_count(index) num_rows = weight.shape[0] if self.momentum == 0.0: # Update on a per row basis, skip all-zero rows for row in range(num_rows): grad_row = grad[row].asnumpy() all_zeros = mx.test_utils.almost_equal(grad_row, np.zeros_like(grad_row)) if all_zeros: continue if self.clip_gradient is not None: weight[row] = ((1 - lr*wd)*weight[row] - lr*mx.nd.clip(grad[row]*self.rescale_grad, -self.clip_gradient, self.clip_gradient)) else: weight[row] = (1 - lr*wd)*weight[row] - lr*self.rescale_grad*grad[row] else: mom = state for row in range(num_rows): grad_row = grad[row].asnumpy() all_zeros = mx.test_utils.almost_equal(grad_row, np.zeros_like(grad_row)) if all_zeros: continue if self.clip_gradient is not None: mom[row] = (self.momentum*mom[row] - lr*wd*weight[row] - lr*mx.nd.clip(grad[row]*self.rescale_grad, -self.clip_gradient, self.clip_gradient)) weight[row] += mom[row] else: mom[row] = self.momentum*mom[row] - lr*wd*weight[row] - lr*self.rescale_grad*grad[row] weight[row] += mom[row] @with_seed() def test_sparse_sgd(): opt1 = PySparseSGD opt2 = mx.optimizer.SGD shape = (3, 4, 5) mom_options = [{}, {'momentum': 0.9}] cg_options = [{}, {'clip_gradient': 0.4}, {'clip_gradient': 0.5}] rg_options = [{}, {'rescale_grad': 0.14}, {'rescale_grad': 0.8}] wd_options = [{}, {'wd': 0.03}, {'wd': 0.05}, {'wd': 0.07}] mp_options = [{}, {'multi_precision': False}, {'multi_precision': True}] for dtype in [np.float32]: for mom_option in mom_options: for cg_option in cg_options: for rg_option in rg_options: for wd_option in wd_options: for mp_option in mp_options: kwarg = {} kwarg.update(mom_option) kwarg.update(cg_option) kwarg.update(rg_option) kwarg.update(wd_option) kwarg.update(mp_option) compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, dtype, w_stype='row_sparse', g_stype='row_sparse') @with_seed(0) def test_std_sparse_sgd(): opt1 = PySGD opt2 = mx.optimizer.SGD shape = (3, 4, 5) mom_options = [{'momentum': 0.9}] cg_options = [{}, {'clip_gradient': 0.4}, {'clip_gradient': 0.5}] rg_options = [{}, {'rescale_grad': 0.14}, {'rescale_grad': 0.8}] wd_options = [{}, {'wd': 0.03}, {'wd': 0.05}, {'wd': 0.07}] for dtype in [np.float32]: for mom_option in mom_options: for cg_option in cg_options: for rg_option in rg_options: for wd_option in wd_options: kwarg = {} kwarg.update(mom_option) kwarg.update(cg_option) kwarg.update(rg_option) kwarg.update(wd_option) compare_optimizer(opt1(**kwarg), opt2(lazy_update=False, **kwarg), shape, dtype, w_stype='row_sparse', g_stype='row_sparse') class PyNAG(PySGD): def __init__(self, **kwargs): super(PyNAG, self).__init__(**kwargs) def create_state(self, index, weight): """Create additional optimizer state: momentum Parameters ---------- weight : NDArray The weight data """ momentum = None weight_master_copy = None do_multi_precision = self.multi_precision and weight.dtype == np.float16 if do_multi_precision: if self.momentum != 0.0: momentum = mx.nd.zeros(weight.shape, weight.context, dtype=np.float32) weight_master_copy = array(weight, ctx=weight.context, dtype=np.float32) return (weight_master_copy, momentum) else: if self.momentum != 0.0: momentum = mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype) return momentum def create_state_multi_precision(self, index, weight): return self.create_state(index, weight) def update(self, index, weight, grad, state):
@with_seed(0) def test_nag(): opt1 = PyNAG opt2 = mx.optimizer.NAG shape = (3, 4, 5) mom_options = [{}, {'momentum': 0.9}] cg_options = [{}, {'clip_gradient': 0.4}, {'clip_gradient': 0.5}] rg_options = [{}, {'rescale_grad': 0.14}, {'rescale_grad': 0.8}] wd_options = [{}, {'wd': 0.03}, {'wd': 0.05}, {'wd': 0.07}] mp_options = [{}, {'multi_precision': False}, {'multi_precision': True}] for dtype in [np.float16, np.float32, np.float64]: for mom_option in mom_options: for cg_option in cg_options: for rg_option in rg_options: for wd_option in wd_options: for mp_option in mp_options: kwarg = {} kwarg.update(mom_option) kwarg.update(cg_option) kwarg.update(rg_option) kwarg.update(wd_option) kwarg.update(mp_option) if (dtype == np.float16 and ('multi_precision' not in kwarg or not kwarg['multi_precision'])): continue compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, dtype) # FTML class PyFTML(mx.optimizer.Optimizer): """python reference implemenation of FTML""" def __init__(self, beta1=0.6, beta2=0.999, epsilon=1e-8, **kwargs): super(PyFTML, self).__init__(**kwargs) self.beta1 = beta1 self.beta2 = beta2 self.epsilon = epsilon def create_state(self, index, weight): return (mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype), # d_0 mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype), # v_0 mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype)) # z_0 def update(self, index, weight, grad, state): assert(isinstance(weight, mx.nd. NDArray)) assert(isinstance(grad, mx.nd.NDArray)) self._update_count(index) lr = self._get_lr(index) wd = self._get_wd(index) t = self._index_update_count[index] grad = grad * self.rescale_grad + wd * weight if self.clip_gradient is not None: grad = mx.nd.clip(grad, -self.clip_gradient, self.clip_gradient) # get previous states prev_d, prev_v, prev_z = state # compute states v_t = self.beta2 * prev_v + (1 - self.beta2) * mx.nd.square(grad) d_t = (1 - pow(self.beta1, t)) / lr * (mx.nd.sqrt(v_t / (1 - pow(self.beta2, t))) + self.epsilon) sigma_t = d_t - self.beta1 * prev_d z_t = self.beta1 * prev_z + (1 - self.beta1) * grad - sigma_t * weight # update weight weight[:] = - z_t / d_t # update states prev_d[:] = d_t prev_v[:] = v_t prev_z[:] = z_t @with_seed(0) def test_ftml(): opt1 = PyFTML opt2 = mx.optimizer.FTML shape = (3, 4, 5) beta1_options = [{}, {'beta1': 0.5}, {'beta1': 0.7}] beta2_options = [{}, {'beta2': 0.8}, {'beta2': 0.9}] cg_options = [{}, {'clip_gradient': 0.4}, {'clip_gradient': 0.5}] rg_options = [{}, {'rescale_grad': 0.14}, {'rescale_grad': 0.8}] wd_options = [{}, {'wd': 0.03}, {'wd': 0.05}, {'wd': 0.07}] for dtype in [np.float32]: for beta1_option in beta1_options: for beta2_option in beta2_options: for cg_option in cg_options: for rg_option in rg_options: for wd_option in wd_options: kwarg = {} kwarg.update(beta1_option) kwarg.update(beta2_option) kwarg.update(cg_option) kwarg.update(rg_option) kwarg.update(wd_option) compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, dtype) # ADAM class PyAdam(mx.optimizer.Optimizer): """python reference implemenation of adam""" def __init__(self, learning_rate=0.001, beta1=0.9, beta2=0.999, epsilon=1e-8, decay_factor=(1 - 1e-8), sparse_update=False, **kwargs): super(PyAdam, self).__init__(learning_rate=learning_rate, **kwargs) self.beta1 = beta1 self.beta2 = beta2 self.epsilon = epsilon self.decay_factor = decay_factor self.sparse_update = sparse_update def create_state(self, index, weight): """Create additional optimizer state: mean, variance Parameters ---------- weight : NDArray The weight data """ return (mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype), # mean mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype)) # variance def update(self, index, weight, grad, state): """Update the parameters. Parameters ---------- index : int An unique integer key used to index the parameters weight : NDArray weight ndarray grad : NDArray grad ndarray state : NDArray or other objects returned by init_state The auxiliary state used in optimization. """ lr = self._get_lr(index) self._update_count(index) t = self._index_update_count[index] mean, variance = state wd = self._get_wd(index) num_rows = weight.shape[0] coef1 = 1. - self.beta1**t coef2 = 1. - self.beta2**t lr *= math.sqrt(coef2)/coef1 for row in range(num_rows): # check row slices of all zeros all_zeros = mx.test_utils.almost_equal(grad[row].asnumpy(), np.zeros_like(grad[row].asnumpy())) # skip zeros during sparse update if all_zeros and self.sparse_update: continue grad[row] = grad[row] * self.rescale_grad + wd * weight[row] # clip gradients if self.clip_gradient is not None: mx.nd.clip(grad[row], -self.clip_gradient, self.clip_gradient, out=grad[row]) # update mean mean[row] *= self.beta1 mean[row] += grad[row] * (1. - self.beta1) # update variance variance[row] *= self.beta2 variance[row] += (1 - self.beta2) * mx.nd.square(grad[row], out=grad[row]) # update weight weight[row] -= lr*mean[row]/(mx.nd.sqrt(variance[row]) + self.epsilon) @with_seed() def test_adam(): opt1 = PyAdam opt2 = mx.optimizer.Adam shape = (3, 4, 5) cg_options = [{}, {'clip_gradient': 0.4}, {'clip_gradient': 0.5}] rg_options = [{}, {'rescale_grad': 0.14}, {'rescale_grad': 0.8}] wd_options = [{}, {'wd': 0.03}, {'wd': 0.05}, {'wd': 0.07}] mp_options = [{}, {'multi_precision': False}, {'multi_precision': True}] for dtype in [np.float16, np.float32, np.float64]: for cg_option in cg_options: for rg_option in rg_options: for wd_option in wd_options: for mp_option in mp_options: kwarg = {} kwarg.update(cg_option) kwarg.update(rg_option) kwarg.update(wd_option) kwarg.update(mp_option) if (dtype == np.float16 and ('multi_precision' not in kwarg or not kwarg['multi_precision'])): continue # atol 2e-5 needed to pass with seed 1248389097 compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, dtype, rtol=1e-4, atol=2e-5) # atol 2e-5 needed to pass with seed 781809840 compare_optimizer(opt1(sparse_update=True, **kwarg), opt2(**kwarg), shape, dtype, w_stype='row_sparse', g_stype='row_sparse', rtol=1e-4, atol=2e-5) compare_optimizer(opt1(**kwarg), opt2(lazy_update=False, **kwarg), shape, dtype, w_stype='row_sparse', g_stype='row_sparse', rtol=1e-4, atol=2e-5) # Signum class PySignum(mx.optimizer.Optimizer): """The python reference of Signum optimizer. The optimizer updates the weight by: rescaled_grad = rescale_grad * clip(grad, clip_gradient) + wd * weight state = momentum * state + (1-momentum)*rescaled_grad weight = (1 - lr * wd_lh) * weight - lr * sign(state) See the original paper at: https://jeremybernste.in/projects/amazon/signum.pdf For details of the update algorithm see :class:`~mxnet.ndarray.signsgd_update` and :class:`~mxnet.ndarray.signum_update`. This optimizer accepts the following parameters in addition to those accepted by :class:`.Optimizer`. Parameters ---------- momentum : float, optional The momentum value. wd_lh : float, optitional The amount of decoupled weight decay regularization. """ def __init__(self, learning_rate=0.01, momentum=0.9, wd_lh = 0.0, **kwargs): super(PySignum, self).__init__(learning_rate = learning_rate, **kwargs) self.momentum = momentum self.wd_lh = wd_lh def create_state(self, index, weight): momentum = None if self.momentum != 0.0: momentum = mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype, stype=weight.stype) return momentum def update(self, index, weight, grad, state): self._update_count(index) lr = self._get_lr(index) wd = self._get_wd(index) if state is not None: mom = state if self.clip_gradient is not None: mom[:] = (self.momentum*mom - (1-self.momentum)*(wd*weight + mx.nd.clip(grad*self.rescale_grad, -self.clip_gradient, self.clip_gradient))) else: mom[:] = self.momentum*mom - (1-self.momentum)*wd*weight - (1-self.momentum)*self.rescale_grad*grad weight[:] = (1 - lr*self.wd_lh)*weight + lr*mx.nd.sign(mom) else: weight[:] = (1 - lr*(wd+self.wd_lh))*weight - lr*mx.nd.sign(grad) @with_seed(0) def test_signum(): opt1 = PySignum opt2 = mx.optimizer.Signum shape = (3, 4, 5) cg_options = [{}, {'clip_gradient': 0.4}, {'clip_gradient': 0.5}] rg_options = [{}, {'rescale_grad': 0.14}, {'rescale_grad': 0.8}] wd_options = [{}, {'wd': 0.03}, {'wd': 0.05}, {'wd': 0.07}] wd_lh_options = [{}, {'wd_lh': 0.015}, {'wd_lh': 0.0}] mom_options = [{}, {'momentum': 0.9}] lr_options = [{'learning_rate': 0.05},{'learning_rate': 0.01}] for dtype in [np.float32, np.float64]: for cg_option in cg_options: for rg_option in rg_options: for wd_option in wd_options: for mp_option in wd_lh_options: for lr_option in lr_options: for mom_option in mom_options: kwarg = {} kwarg.update(cg_option) kwarg.update(rg_option) kwarg.update(wd_option) kwarg.update(mp_option) kwarg.update(lr_option) kwarg.update(mom_option) compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, dtype) # RMSProp class PyRMSProp(mx.optimizer.Optimizer): """RMSProp optimizer of Tieleman & Hinton, 2012, For centered=False, the code follows the version in http://www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf by Tieleman & Hinton, 2012 For centered=True, the code follows the version in http://arxiv.org/pdf/1308.0850v5.pdf Eq(38) - Eq(45) by Alex Graves, 2013. Parameters ---------- learning_rate : float, optional Step size. Default value is set to 0.001. gamma1: float, optional decay factor of moving average for gradient, gradient^2. Default value is set to 0.9. gamma2: float, optional "momentum" factor. Default value if set to 0.9. Only used if centered=True epsilon : float, optional Default value is set to 1e-8. centered : boolean, optional Use Graves or Tielemans & Hintons version of RMSProp wd : float, optional L2 regularization coefficient add to all the weights rescale_grad : float, optional rescaling factor of gradient. clip_gradient : float, optional clip gradient in range [-clip_gradient, clip_gradient] clip_weights : float, optional clip weights in range [-clip_weights, clip_weights] """ def __init__(self, learning_rate=0.001, gamma1=0.9, gamma2=0.9, epsilon=1e-8, centered=False, clip_weights=None, **kwargs): super(PyRMSProp, self).__init__(learning_rate=learning_rate, **kwargs) self.centered = centered self.gamma1 = gamma1 self.gamma2 = gamma2 self.epsilon = epsilon self.clip_weights = clip_weights def create_state(self, index, weight): """Create additional optimizer state. For centered=False: n For centered=True: n, g, delta Parameters ---------- weight : NDArray The weight data """ if self.centered: return (mx.nd.zeros(weight.shape, weight.context), # n mx.nd.zeros(weight.shape, weight.context), # g mx.nd.zeros(weight.shape, weight.context)) # delta else: return (mx.nd.zeros(weight.shape, weight.context), ) # n def update(self, index, weight, grad, state): """Update the parameters. Parameters ---------- index : int An unique integer key used to index the parameters weight : NDArray weight ndarray grad : NDArray grad ndarray state : NDArray or other objects returned by init_state The auxiliary state used in optimization. """ lr = self._get_lr(index) wd = self._get_wd(index) self._update_count(index) grad = grad * self.rescale_grad + wd * weight if not self.centered: (n, ) = state if self.clip_gradient is not None: grad = mx.nd.clip(grad, -self.clip_gradient, self.clip_gradient) n[:] = (1 - self.gamma1) * (grad * grad) + self.gamma1 * n weight[:] -= lr * grad/(mx.nd.sqrt(n + self.epsilon)) else: n, g, delta = state if self.clip_gradient is not None: grad = mx.nd.clip(grad, -self.clip_gradient, self.clip_gradient) n[:] = (1 - self.gamma1) * (grad * grad) + self.gamma1 * n g[:] = (1 - self.gamma1) * grad + self.gamma1 * g delta[:] = (self.gamma2) * delta - lr * grad/(mx.nd.sqrt(n - g*g + self.epsilon)) weight[:] += delta if self.clip_weights: mx.ndarray.clip(weight, -self.clip_weights, self.clip_weights, out=weight) @unittest.skip("Test fails intermittently. Temporarily disabled until fixed. Tracked at https://github.com/apache/incubator-mxnet/issues/8230") @with_seed(0) def test_rms(): opt1 = PyRMSProp opt2 = mx.optimizer.RMSProp shape = (3, 4, 5) cg_options = [{}, {'clip_gradient': 0.4}, {'clip_gradient': 0.5}] cw_options = [{}, {'clip_weights': 0.01}] center_options = [{}, {'centered': False}, {'centered': True}] rg_options = [{}, {'rescale_grad': 0.14}, {'rescale_grad': 0.8}] wd_options = [{}, {'wd': 0.03}, {'wd': 0.05}, {'wd': 0.07}] mp_options = [{}, {'multi_precision': False}, {'multi_precision': True}] for dtype in [np.float16, np.float32]: for cw_option in cw_options: for cg_option in cg_options: for center_option in center_options: for rg_option in rg_options: for wd_option in wd_options: for mp_option in mp_options: kwarg = {} kwarg.update(cw_option) kwarg.update(cg_option) kwarg.update(center_option) kwarg.update(rg_option) kwarg.update(wd_option) kwarg.update(mp_option) if (dtype == np.float16 and ('multi_precision' not in kwarg or not kwarg['multi_precision'])): continue compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, dtype) if (default_context() == mx.cpu()): compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, dtype, g_stype='row_sparse') class PyFtrl(mx.optimizer.Optimizer): """The Ftrl optimizer. Referenced from *Ad Click Prediction: a View from the Trenches*, available at http://dl.acm.org/citation.cfm?id=2488200. Parameters ---------- lamda1 : float, optional L1 regularization coefficient. learning_rate : float, optional The initial learning rate. beta : float, optional Per-coordinate learning rate correlation parameter. eta : .. math:: \\eta_{t,i} = \\frac{learningrate}{\\beta+\\sqrt{\\sum_{s=1}^tg_{s,i}^t}} """ def __init__(self, lamda1=0.01, learning_rate=0.1, beta=1, sparse_update=False, **kwargs): super(PyFtrl, self).__init__(**kwargs) self.lamda1 = lamda1 self.beta = beta self.lr = learning_rate self.sparse_update = sparse_update def create_state(self, index, weight): return (mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype), # dn mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype)) # n def update(self, index, weight, grad, state): self._update_count(index) wd = self._get_wd(index) lr = self._get_lr(index) num_rows = weight.shape[0] dn, n = state for row in range(num_rows): all_zeros = mx.test_utils.almost_equal(grad[row].asnumpy(), np.zeros_like(grad[row].asnumpy())) if all_zeros and self.sparse_update: continue grad[row] = grad[row] * self.rescale_grad if self.clip_gradient is not None: mx.nd.clip(grad[row], -self.clip_gradient, self.clip_gradient, out=grad[row]) #update dn, n dn[row] += grad[row] - (mx.nd.sqrt(n[row] + grad[row] * grad[row]) - mx.nd.sqrt(n[row])) * weight[row] / lr n[row] += grad[row] * grad[row] # update weight weight[row] = (mx.nd.sign(dn[row]) * self.lamda1 - dn[row]) / \ ((self.beta + mx.nd.sqrt(n[row])) / lr + wd) * (mx.nd.abs(dn[row]) > self.lamda1) @with_seed() def test_ftrl(): opt1 = PyFtrl opt2 = mx.optimizer.Ftrl shape = (3, 4, 5) kwargs = [{}, {'clip_gradient': 0.5}, {'clip_gradient': 0.4, 'rescale_grad': 0.14}, {'rescale_grad': 0.8}, {'clip_gradient': 0.5, 'wd': 0.07}, {'clip_gradient': 0.4, 'rescale_grad': 0.14, 'wd': 0.03}, {'rescale_grad': 0.8, 'wd': 0.05}, {'rescale_grad': 0.8, 'wd': 0.05, 'lamda1': 0.01}, {'clip_gradient': 0.5, 'wd': 0.07, 'lamda1': 1.0}] for kwarg in kwargs: compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, np.float32) compare_optimizer(opt1(sparse_update=True, **kwarg), opt2(**kwarg), shape, np.float32, w_stype='row_sparse', g_stype='row_sparse') @with_seed(1234) def test_nadam(): def get_net(num_hidden, flatten=True): data = mx.symbol.Variable('data') fc1 = mx.symbol.FullyConnected(data, name='fc1', num_hidden=128, flatten=flatten) act1 = mx.symbol.Activation(fc1, name='relu1', act_type="relu") fc2 = mx.symbol.FullyConnected(act1, name = 'fc2', num_hidden = 64, flatten=flatten) act2 = mx.symbol.Activation(fc2, name='relu2', act_type="relu") fc3 = mx.symbol.FullyConnected(act2, name='fc3', num_hidden=num_hidden, flatten=flatten) return fc3 N = 20 data = mx.random.uniform(-1, 1, shape=(N, 10)) label = mx.random.uniform(-1, 1, shape=(N, 1)) data_iter = mx.io.NDArrayIter(data, label, batch_size=5, label_name='label', shuffle=True) output = get_net(1) l = mx.symbol.Variable('label') Loss = gluon.loss.L1Loss() loss = Loss(output, l) loss = mx.sym.make_loss(loss) mod = mx.mod.Module(loss, data_names=('data',), label_names=('label',)) mod.fit(data_iter, num_epoch=60, optimizer_params={'learning_rate': 0.0005, 'wd': 0.0005}, initializer=mx.init.Xavier(magnitude=2), eval_metric=mx.metric.Loss(), optimizer='nadam') assert mod.score(data_iter, eval_metric=mx.metric.Loss())[0][1] < 0.1 # AdaGrad class PyAdaGrad(mx.optimizer.Optimizer): """The python reference of AdaGrad optimizer. This class implements the AdaGrad optimizer described in *Adaptive Subgradient Methods for Online Learning and Stochastic Optimization*, and available at http://www.jmlr.org/papers/volume12/duchi11a/duchi11a.pdf. Updates are applied by:: rescaled_grad = clip(grad * rescale_grad + wd * weight, clip_gradient) history = history + square(rescaled_grad) w = w - learning_rate * rescaled_grad / sqrt(history + epsilon) This optimizer accepts the following parameters in addition to those accepted by :class:`.Optimizer`. Parameters ---------- eps: float, optional Small value to avoid division by 0. """ def __init__(self, eps=1e-7, **kwargs): super(PyAdaGrad, self).__init__(**kwargs) self.float_stable_eps = eps def create_state(self, index, weight): return mx.nd.zeros(weight.shape, weight.context, stype=weight.stype) def update(self, index, weight, grad, state): self._update_count(index) lr = self._get_lr(index) wd = self._get_wd(index) history = state grad = grad * self.rescale_grad if self.clip_gradient is not None: grad = mx.nd.clip(grad, -self.clip_gradient, self.clip_gradient) history[:] += mx.nd.square(grad) div = grad / mx.nd.sqrt(history + self.float_stable_eps) weight[:] += (div + weight * wd) * -lr def test_adagrad(): mx.random.seed(0) opt1 = PyAdaGrad opt2 = mx.optimizer.AdaGrad shape = (3, 4, 5) eps_options = [{}, {'eps': 1e-8}] cg_options = [{}, {'clip_gradient': 0.4}, {'clip_gradient': 0.5}] rg_options = [{}, {'rescale_grad': 0.14}, {'rescale_grad': 0.8}] wd_options = [{}, {'wd': 0.0}] for dtype in [np.float32]: for eps_option in eps_options: for cg_option in cg_options: for rg_option in rg_options: for wd_option in wd_options: kwarg = {} kwarg.update(eps_option) kwarg.update(cg_option) kwarg.update(rg_option) kwarg.update(wd_option) compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, dtype) if wd_option.get('wd', 0.0) == 0.0: compare_optimizer(opt1(**kwarg), opt2(**kwarg), shape, dtype, w_stype='row_sparse', g_stype='row_sparse') if __name__ == '__main__': import nose nose.runmodule()
"""Update the parameters. Parameters ---------- index : int An unique integer key used to index the parameters weight : NDArray weight ndarray grad : NDArray grad ndarray state : NDArray or other objects returned by init_state The auxiliary state used in optimization. """ lr = self._get_lr(index) wd = self._get_wd(index) self._update_count(index) use_multi_precision = isinstance(state, list) or isinstance(state, tuple) if not use_multi_precision: grad = grad * self.rescale_grad if self.clip_gradient is not None: grad = mx.nd.clip(grad, -self.clip_gradient, self.clip_gradient) if self.momentum == 0.0: weight[:] += -lr * (grad + wd * weight) else: mom = state mom[:] *= self.momentum grad += wd * weight mom[:] += grad grad[:] += self.momentum * mom weight[:] += -lr * grad else: grad32 = array(grad, ctx=grad.context, dtype=np.float32) grad32 = grad32 * self.rescale_grad if self.clip_gradient is not None: grad32 = mx.nd.clip(grad32, -self.clip_gradient, self.clip_gradient) mom = state[1] weight32 = state[0] if self.momentum == 0.0: weight32[:] += -lr * (grad32 + wd * weight32) else: mom[:] *= self.momentum grad32 += wd * weight32 mom[:] += grad32 grad32[:] += self.momentum * mom weight32[:] += -lr * grad32 tmp = weight32.astype(weight.dtype) tmp.copyto(weight)
slice.rs
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Slice management and manipulation //! //! For more details `std::slice`. #![stable(feature = "rust1", since = "1.0.0")] // How this module is organized. // // The library infrastructure for slices is fairly messy. There's // a lot of stuff defined here. Let's keep it clean. // // Since slices don't support inherent methods; all operations // on them are defined on traits, which are then reexported from // the prelude for convenience. So there are a lot of traits here. // // The layout of this file is thus: // // * Slice-specific 'extension' traits and their implementations. This // is where most of the slice API resides. // * Implementations of a few common traits with important slice ops. // * Definitions of a bunch of iterators. // * Free functions. // * The `raw` and `bytes` submodules. // * Boilerplate trait implementations. use mem::transmute; use clone::Clone; use cmp::{Ordering, PartialEq, PartialOrd, Eq, Ord}; use cmp::Ordering::{Less, Equal, Greater}; use cmp; use default::Default; use intrinsics::assume; use iter::*; use ops::{FnMut, self, Index}; use ops::RangeFull; use option::Option; use option::Option::{None, Some}; use result::Result; use result::Result::{Ok, Err}; use ptr; use mem; use mem::size_of; use marker::{Send, Sync, self}; use raw::Repr; // Avoid conflicts with *both* the Slice trait (buggy) and the `slice::raw` module. use raw::Slice as RawSlice; // // Extension traits // /// Extension methods for slices. #[allow(missing_docs)] // docs in libcollections #[doc(hidden)] #[unstable(feature = "core_slice_ext", reason = "stable interface provided by `impl [T]` in later crates")] pub trait SliceExt { type Item; fn split_at<'a>(&'a self, mid: usize) -> (&'a [Self::Item], &'a [Self::Item]); fn iter<'a>(&'a self) -> Iter<'a, Self::Item>; fn split<'a, P>(&'a self, pred: P) -> Split<'a, Self::Item, P> where P: FnMut(&Self::Item) -> bool; fn splitn<'a, P>(&'a self, n: usize, pred: P) -> SplitN<'a, Self::Item, P> where P: FnMut(&Self::Item) -> bool; fn rsplitn<'a, P>(&'a self, n: usize, pred: P) -> RSplitN<'a, Self::Item, P> where P: FnMut(&Self::Item) -> bool; fn windows<'a>(&'a self, size: usize) -> Windows<'a, Self::Item>; fn chunks<'a>(&'a self, size: usize) -> Chunks<'a, Self::Item>; fn get<'a>(&'a self, index: usize) -> Option<&'a Self::Item>; fn first<'a>(&'a self) -> Option<&'a Self::Item>; fn tail<'a>(&'a self) -> &'a [Self::Item]; fn init<'a>(&'a self) -> &'a [Self::Item]; fn split_first<'a>(&'a self) -> Option<(&'a Self::Item, &'a [Self::Item])>; fn split_last<'a>(&'a self) -> Option<(&'a Self::Item, &'a [Self::Item])>; fn last<'a>(&'a self) -> Option<&'a Self::Item>; unsafe fn get_unchecked<'a>(&'a self, index: usize) -> &'a Self::Item; fn as_ptr(&self) -> *const Self::Item; fn binary_search_by<F>(&self, f: F) -> Result<usize, usize> where F: FnMut(&Self::Item) -> Ordering; fn len(&self) -> usize; fn is_empty(&self) -> bool { self.len() == 0 } fn get_mut<'a>(&'a mut self, index: usize) -> Option<&'a mut Self::Item>; fn iter_mut<'a>(&'a mut self) -> IterMut<'a, Self::Item>; fn first_mut<'a>(&'a mut self) -> Option<&'a mut Self::Item>; fn tail_mut<'a>(&'a mut self) -> &'a mut [Self::Item]; fn init_mut<'a>(&'a mut self) -> &'a mut [Self::Item]; fn split_first_mut<'a>(&'a mut self) -> Option<(&'a mut Self::Item, &'a mut [Self::Item])>; fn split_last_mut<'a>(&'a mut self) -> Option<(&'a mut Self::Item, &'a mut [Self::Item])>; fn last_mut<'a>(&'a mut self) -> Option<&'a mut Self::Item>; fn split_mut<'a, P>(&'a mut self, pred: P) -> SplitMut<'a, Self::Item, P> where P: FnMut(&Self::Item) -> bool; fn splitn_mut<P>(&mut self, n: usize, pred: P) -> SplitNMut<Self::Item, P> where P: FnMut(&Self::Item) -> bool; fn rsplitn_mut<P>(&mut self, n: usize, pred: P) -> RSplitNMut<Self::Item, P> where P: FnMut(&Self::Item) -> bool; fn chunks_mut<'a>(&'a mut self, chunk_size: usize) -> ChunksMut<'a, Self::Item>; fn swap(&mut self, a: usize, b: usize); fn split_at_mut<'a>(&'a mut self, mid: usize) -> (&'a mut [Self::Item], &'a mut [Self::Item]); fn reverse(&mut self); unsafe fn get_unchecked_mut<'a>(&'a mut self, index: usize) -> &'a mut Self::Item; fn as_mut_ptr(&mut self) -> *mut Self::Item; fn position_elem(&self, t: &Self::Item) -> Option<usize> where Self::Item: PartialEq; fn rposition_elem(&self, t: &Self::Item) -> Option<usize> where Self::Item: PartialEq; fn contains(&self, x: &Self::Item) -> bool where Self::Item: PartialEq; fn starts_with(&self, needle: &[Self::Item]) -> bool where Self::Item: PartialEq; fn ends_with(&self, needle: &[Self::Item]) -> bool where Self::Item: PartialEq; fn binary_search(&self, x: &Self::Item) -> Result<usize, usize> where Self::Item: Ord; fn next_permutation(&mut self) -> bool where Self::Item: Ord; fn prev_permutation(&mut self) -> bool where Self::Item: Ord; fn clone_from_slice(&mut self, &[Self::Item]) -> usize where Self::Item: Clone; } // Use macros to be generic over const/mut macro_rules! slice_offset { ($ptr:expr, $by:expr) => {{ let ptr = $ptr; if size_from_ptr(ptr) == 0 { ::intrinsics::arith_offset(ptr as *mut i8, $by) as *mut _ } else { ptr.offset($by) } }}; } macro_rules! slice_ref { ($ptr:expr) => {{ let ptr = $ptr; if size_from_ptr(ptr) == 0 { // Use a non-null pointer value &mut *(1 as *mut _) } else { transmute(ptr) } }}; } impl<T> SliceExt for [T] { type Item = T; #[inline] fn split_at(&self, mid: usize) -> (&[T], &[T]) { (&self[..mid], &self[mid..]) } #[inline] fn iter<'a>(&'a self) -> Iter<'a, T> { unsafe { let p = if mem::size_of::<T>() == 0 { 1 as *const _ } else { let p = self.as_ptr(); assume(!p.is_null()); p }; Iter { ptr: p, end: slice_offset!(p, self.len() as isize), _marker: marker::PhantomData } } } #[inline] fn split<'a, P>(&'a self, pred: P) -> Split<'a, T, P> where P: FnMut(&T) -> bool { Split { v: self, pred: pred, finished: false } } #[inline] fn splitn<'a, P>(&'a self, n: usize, pred: P) -> SplitN<'a, T, P> where P: FnMut(&T) -> bool, { SplitN { inner: GenericSplitN { iter: self.split(pred), count: n, invert: false } } } #[inline] fn rsplitn<'a, P>(&'a self, n: usize, pred: P) -> RSplitN<'a, T, P> where P: FnMut(&T) -> bool, { RSplitN { inner: GenericSplitN { iter: self.split(pred), count: n, invert: true } } } #[inline] fn windows(&self, size: usize) -> Windows<T> { assert!(size != 0); Windows { v: self, size: size } } #[inline] fn chunks(&self, size: usize) -> Chunks<T> { assert!(size != 0); Chunks { v: self, size: size } } #[inline] fn get(&self, index: usize) -> Option<&T> { if index < self.len() { Some(&self[index]) } else { None } } #[inline] fn first(&self) -> Option<&T> { if self.is_empty() { None } else { Some(&self[0]) } } #[inline] fn tail(&self) -> &[T] { &self[1..] } #[inline] fn split_first(&self) -> Option<(&T, &[T])> { if self.is_empty() { None } else { Some((&self[0], &self[1..])) } } #[inline] fn init(&self) -> &[T] { &self[..self.len() - 1] } #[inline] fn split_last(&self) -> Option<(&T, &[T])> { let len = self.len(); if len == 0 { None } else { Some((&self[len - 1], &self[..(len - 1)])) } } #[inline] fn last(&self) -> Option<&T> { if self.is_empty() { None } else { Some(&self[self.len() - 1]) } } #[inline] unsafe fn get_unchecked(&self, index: usize) -> &T { transmute(self.repr().data.offset(index as isize)) } #[inline] fn as_ptr(&self) -> *const T { self.repr().data } fn binary_search_by<F>(&self, mut f: F) -> Result<usize, usize> where F: FnMut(&T) -> Ordering { let mut base : usize = 0; let mut lim : usize = self.len(); while lim != 0 { let ix = base + (lim >> 1); match f(&self[ix]) { Equal => return Ok(ix), Less => { base = ix + 1; lim -= 1; } Greater => () } lim >>= 1; } Err(base) } #[inline] fn len(&self) -> usize { self.repr().len } #[inline] fn get_mut(&mut self, index: usize) -> Option<&mut T> { if index < self.len() { Some(&mut self[index]) } else { None } } #[inline] fn split_at_mut(&mut self, mid: usize) -> (&mut [T], &mut [T]) { let len = self.len(); let ptr = self.as_mut_ptr(); assert!(mid <= len); unsafe { (from_raw_parts_mut(ptr, mid), from_raw_parts_mut(ptr.offset(mid as isize), len - mid)) } } #[inline] fn iter_mut<'a>(&'a mut self) -> IterMut<'a, T> { unsafe { let p = if mem::size_of::<T>() == 0 { 1 as *mut _ } else { let p = self.as_mut_ptr(); assume(!p.is_null()); p }; IterMut { ptr: p, end: slice_offset!(p, self.len() as isize), _marker: marker::PhantomData } } } #[inline] fn last_mut(&mut self) -> Option<&mut T> { let len = self.len(); if len == 0 { return None; } Some(&mut self[len - 1]) } #[inline] fn first_mut(&mut self) -> Option<&mut T> { if self.is_empty() { None } else { Some(&mut self[0]) } } #[inline] fn tail_mut(&mut self) -> &mut [T] { &mut self[1 ..] } #[inline] fn split_first_mut(&mut self) -> Option<(&mut T, &mut [T])> { if self.is_empty() { None } else { let split = self.split_at_mut(1); Some((&mut split.0[0], split.1)) } } #[inline] fn init_mut(&mut self) -> &mut [T] { let len = self.len(); &mut self[.. (len - 1)] } #[inline] fn split_last_mut(&mut self) -> Option<(&mut T, &mut [T])> { let len = self.len(); if len == 0 { None } else { let split = self.split_at_mut(len - 1); Some((&mut split.1[0], split.0)) } } #[inline] fn split_mut<'a, P>(&'a mut self, pred: P) -> SplitMut<'a, T, P> where P: FnMut(&T) -> bool { SplitMut { v: self, pred: pred, finished: false } } #[inline] fn splitn_mut<'a, P>(&'a mut self, n: usize, pred: P) -> SplitNMut<'a, T, P> where P: FnMut(&T) -> bool { SplitNMut { inner: GenericSplitN { iter: self.split_mut(pred), count: n, invert: false } } } #[inline] fn rsplitn_mut<'a, P>(&'a mut self, n: usize, pred: P) -> RSplitNMut<'a, T, P> where P: FnMut(&T) -> bool, { RSplitNMut { inner: GenericSplitN { iter: self.split_mut(pred), count: n, invert: true } } } #[inline] fn chunks_mut(&mut self, chunk_size: usize) -> ChunksMut<T> { assert!(chunk_size > 0); ChunksMut { v: self, chunk_size: chunk_size } } #[inline] fn swap(&mut self, a: usize, b: usize) { unsafe { // Can't take two mutable loans from one vector, so instead just cast // them to their raw pointers to do the swap let pa: *mut T = &mut self[a]; let pb: *mut T = &mut self[b]; ptr::swap(pa, pb); } } fn reverse(&mut self) { let mut i: usize = 0; let ln = self.len(); while i < ln / 2 { // Unsafe swap to avoid the bounds check in safe swap. unsafe { let pa: *mut T = self.get_unchecked_mut(i); let pb: *mut T = self.get_unchecked_mut(ln - i - 1); ptr::swap(pa, pb); } i += 1; } } #[inline] unsafe fn get_unchecked_mut(&mut self, index: usize) -> &mut T { transmute((self.repr().data as *mut T).offset(index as isize)) } #[inline] fn as_mut_ptr(&mut self) -> *mut T { self.repr().data as *mut T } #[inline] fn position_elem(&self, x: &T) -> Option<usize> where T: PartialEq { self.iter().position(|y| *x == *y) } #[inline] fn rposition_elem(&self, t: &T) -> Option<usize> where T: PartialEq { self.iter().rposition(|x| *x == *t) } #[inline] fn contains(&self, x: &T) -> bool where T: PartialEq { self.iter().any(|elt| *x == *elt) } #[inline] fn starts_with(&self, needle: &[T]) -> bool where T: PartialEq { let n = needle.len(); self.len() >= n && needle == &self[..n] } #[inline] fn ends_with(&self, needle: &[T]) -> bool where T: PartialEq { let (m, n) = (self.len(), needle.len()); m >= n && needle == &self[m-n..] } fn binary_search(&self, x: &T) -> Result<usize, usize> where T: Ord { self.binary_search_by(|p| p.cmp(x)) } fn next_permutation(&mut self) -> bool where T: Ord { // These cases only have 1 permutation each, so we can't do anything. if self.len() < 2 { return false; } // Step 1: Identify the longest, rightmost weakly decreasing part of the vector let mut i = self.len() - 1; while i > 0 && self[i-1] >= self[i] { i -= 1; } // If that is the entire vector, this is the last-ordered permutation. if i == 0 { return false; } // Step 2: Find the rightmost element larger than the pivot (i-1) let mut j = self.len() - 1; while j >= i && self[j] <= self[i-1] { j -= 1; } // Step 3: Swap that element with the pivot self.swap(j, i-1); // Step 4: Reverse the (previously) weakly decreasing part self[i..].reverse(); true } fn prev_permutation(&mut self) -> bool where T: Ord { // These cases only have 1 permutation each, so we can't do anything. if self.len() < 2 { return false; } // Step 1: Identify the longest, rightmost weakly increasing part of the vector let mut i = self.len() - 1; while i > 0 && self[i-1] <= self[i] { i -= 1; } // If that is the entire vector, this is the first-ordered permutation. if i == 0 { return false; } // Step 2: Reverse the weakly increasing part self[i..].reverse(); // Step 3: Find the rightmost element equal to or bigger than the pivot (i-1) let mut j = self.len() - 1; while j >= i && self[j-1] < self[i-1] { j -= 1; } // Step 4: Swap that element with the pivot self.swap(i-1, j); true } #[inline] fn clone_from_slice(&mut self, src: &[T]) -> usize where T: Clone { let min = cmp::min(self.len(), src.len()); let dst = &mut self[.. min]; let src = &src[.. min]; for i in 0..min { dst[i].clone_from(&src[i]); } min } } #[stable(feature = "rust1", since = "1.0.0")] impl<T> ops::Index<usize> for [T] { type Output = T; fn index(&self, index: usize) -> &T { assert!(index < self.len()); unsafe { mem::transmute(self.repr().data.offset(index as isize)) } } } #[stable(feature = "rust1", since = "1.0.0")] impl<T> ops::IndexMut<usize> for [T] { #[inline] fn index_mut(&mut self, index: usize) -> &mut T { assert!(index < self.len()); unsafe { mem::transmute(self.repr().data.offset(index as isize)) } } } #[stable(feature = "rust1", since = "1.0.0")] impl<T> ops::Index<ops::Range<usize>> for [T] { type Output = [T]; #[inline] fn index(&self, index: ops::Range<usize>) -> &[T] { assert!(index.start <= index.end); assert!(index.end <= self.len()); unsafe { from_raw_parts ( self.as_ptr().offset(index.start as isize), index.end - index.start ) } } } #[stable(feature = "rust1", since = "1.0.0")] impl<T> ops::Index<ops::RangeTo<usize>> for [T] { type Output = [T]; #[inline] fn index(&self, index: ops::RangeTo<usize>) -> &[T] { self.index(ops::Range{ start: 0, end: index.end }) } } #[stable(feature = "rust1", since = "1.0.0")] impl<T> ops::Index<ops::RangeFrom<usize>> for [T] { type Output = [T]; #[inline] fn index(&self, index: ops::RangeFrom<usize>) -> &[T] { self.index(ops::Range{ start: index.start, end: self.len() }) } } #[stable(feature = "rust1", since = "1.0.0")] impl<T> ops::Index<RangeFull> for [T] { type Output = [T]; #[inline] fn index(&self, _index: RangeFull) -> &[T] { self } } #[stable(feature = "rust1", since = "1.0.0")] impl<T> ops::IndexMut<ops::Range<usize>> for [T] { #[inline] fn index_mut(&mut self, index: ops::Range<usize>) -> &mut [T] { assert!(index.start <= index.end); assert!(index.end <= self.len()); unsafe { from_raw_parts_mut( self.as_mut_ptr().offset(index.start as isize), index.end - index.start ) } } } #[stable(feature = "rust1", since = "1.0.0")] impl<T> ops::IndexMut<ops::RangeTo<usize>> for [T] { #[inline] fn index_mut(&mut self, index: ops::RangeTo<usize>) -> &mut [T] { self.index_mut(ops::Range{ start: 0, end: index.end }) } } #[stable(feature = "rust1", since = "1.0.0")] impl<T> ops::IndexMut<ops::RangeFrom<usize>> for [T] { #[inline] fn index_mut(&mut self, index: ops::RangeFrom<usize>) -> &mut [T] { let len = self.len(); self.index_mut(ops::Range{ start: index.start, end: len }) } } #[stable(feature = "rust1", since = "1.0.0")] impl<T> ops::IndexMut<RangeFull> for [T] { #[inline] fn index_mut(&mut self, _index: RangeFull) -> &mut [T] { self } } //////////////////////////////////////////////////////////////////////////////// // Common traits //////////////////////////////////////////////////////////////////////////////// #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> Default for &'a [T] { #[stable(feature = "rust1", since = "1.0.0")] fn default() -> &'a [T] { &[] } } // // Iterators // #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> IntoIterator for &'a [T] { type Item = &'a T; type IntoIter = Iter<'a, T>; fn into_iter(self) -> Iter<'a, T> { self.iter() } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> IntoIterator for &'a mut [T] { type Item = &'a mut T; type IntoIter = IterMut<'a, T>; fn into_iter(self) -> IterMut<'a, T> { self.iter_mut() } } #[inline(always)] fn size_from_ptr<T>(_: *const T) -> usize { mem::size_of::<T>() } // The shared definition of the `Iter` and `IterMut` iterators macro_rules! iterator { (struct $name:ident -> $ptr:ty, $elem:ty) => { #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> Iterator for $name<'a, T> { type Item = $elem; #[inline] fn next(&mut self) -> Option<$elem> { // could be implemented with slices, but this avoids bounds checks unsafe { if mem::size_of::<T>() != 0 { assume(!self.ptr.is_null()); assume(!self.end.is_null()); } if self.ptr == self.end { None } else { let old = self.ptr; self.ptr = slice_offset!(self.ptr, 1); Some(slice_ref!(old)) } } } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { let diff = (self.end as usize).wrapping_sub(self.ptr as usize); let size = mem::size_of::<T>(); let exact = diff / (if size == 0 {1} else {size}); (exact, Some(exact)) } #[inline] fn count(self) -> usize { self.size_hint().0 } #[inline] fn nth(&mut self, n: usize) -> Option<$elem> { // Call helper method. Can't put the definition here because mut versus const. self.iter_nth(n) } #[inline] fn last(mut self) -> Option<$elem> { self.next_back() } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> DoubleEndedIterator for $name<'a, T> { #[inline] fn next_back(&mut self) -> Option<$elem> { // could be implemented with slices, but this avoids bounds checks unsafe { if mem::size_of::<T>() != 0 { assume(!self.ptr.is_null()); assume(!self.end.is_null()); } if self.end == self.ptr { None } else { self.end = slice_offset!(self.end, -1); Some(slice_ref!(self.end)) } } } } } } macro_rules! make_slice { ($start: expr, $end: expr) => {{ let start = $start; let diff = ($end as usize).wrapping_sub(start as usize); if size_from_ptr(start) == 0 { // use a non-null pointer value unsafe { from_raw_parts(1 as *const _, diff) } } else { let len = diff / size_from_ptr(start); unsafe { from_raw_parts(start, len) } } }} } macro_rules! make_mut_slice { ($start: expr, $end: expr) => {{ let start = $start; let diff = ($end as usize).wrapping_sub(start as usize); if size_from_ptr(start) == 0 { // use a non-null pointer value unsafe { from_raw_parts_mut(1 as *mut _, diff) } } else { let len = diff / size_from_ptr(start); unsafe { from_raw_parts_mut(start, len) } } }} } /// Immutable slice iterator #[stable(feature = "rust1", since = "1.0.0")] pub struct Iter<'a, T: 'a> { ptr: *const T, end: *const T, _marker: marker::PhantomData<&'a T>, } unsafe impl<'a, T: Sync> Sync for Iter<'a, T> {} unsafe impl<'a, T: Sync> Send for Iter<'a, T> {} impl<'a, T> Iter<'a, T> { /// View the underlying data as a subslice of the original data. /// /// This has the same lifetime as the original slice, and so the /// iterator can continue to be used while this exists. #[unstable(feature = "iter_to_slice")] pub fn as_slice(&self) -> &'a [T] { make_slice!(self.ptr, self.end) } // Helper function for Iter::nth fn iter_nth(&mut self, n: usize) -> Option<&'a T> { match self.as_slice().get(n) { Some(elem_ref) => unsafe { self.ptr = slice_offset!(self.ptr, (n as isize).wrapping_add(1)); Some(elem_ref) }, None => { self.ptr = self.end; None } } } } iterator!{struct Iter -> *const T, &'a T} #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> ExactSizeIterator for Iter<'a, T> {} #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> Clone for Iter<'a, T> { fn clone(&self) -> Iter<'a, T> { Iter { ptr: self.ptr, end: self.end, _marker: self._marker } } } #[unstable(feature = "iter_idx", reason = "trait is experimental")] #[allow(deprecated)] impl<'a, T> RandomAccessIterator for Iter<'a, T> { #[inline] fn indexable(&self) -> usize { let (exact, _) = self.size_hint(); exact } #[inline] fn idx(&mut self, index: usize) -> Option<&'a T> { unsafe { if index < self.indexable() { Some(slice_ref!(self.ptr.offset(index as isize))) } else { None } } } } /// Mutable slice iterator. #[stable(feature = "rust1", since = "1.0.0")] pub struct IterMut<'a, T: 'a> { ptr: *mut T, end: *mut T, _marker: marker::PhantomData<&'a mut T>, } unsafe impl<'a, T: Sync> Sync for IterMut<'a, T> {} unsafe impl<'a, T: Send> Send for IterMut<'a, T> {} impl<'a, T> IterMut<'a, T> { /// View the underlying data as a subslice of the original data. /// /// To avoid creating `&mut` references that alias, this is forced /// to consume the iterator. Consider using the `Slice` and /// `SliceMut` implementations for obtaining slices with more /// restricted lifetimes that do not consume the iterator. #[unstable(feature = "iter_to_slice")] pub fn into_slice(self) -> &'a mut [T] { make_mut_slice!(self.ptr, self.end) } // Helper function for IterMut::nth fn iter_nth(&mut self, n: usize) -> Option<&'a mut T> { match make_mut_slice!(self.ptr, self.end).get_mut(n) { Some(elem_ref) => unsafe { self.ptr = slice_offset!(self.ptr, (n as isize).wrapping_add(1)); Some(elem_ref) }, None => { self.ptr = self.end; None } } } } iterator!{struct IterMut -> *mut T, &'a mut T} #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> ExactSizeIterator for IterMut<'a, T> {} /// An internal abstraction over the splitting iterators, so that /// splitn, splitn_mut etc can be implemented once. trait SplitIter: DoubleEndedIterator { /// Mark the underlying iterator as complete, extracting the remaining /// portion of the slice. fn finish(&mut self) -> Option<Self::Item>; } /// An iterator over subslices separated by elements that match a predicate /// function. #[stable(feature = "rust1", since = "1.0.0")] pub struct Split<'a, T:'a, P> where P: FnMut(&T) -> bool { v: &'a [T], pred: P, finished: bool } // FIXME(#19839) Remove in favor of `#[derive(Clone)]` #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T, P> Clone for Split<'a, T, P> where P: Clone + FnMut(&T) -> bool { fn clone(&self) -> Split<'a, T, P> { Split { v: self.v, pred: self.pred.clone(), finished: self.finished, } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T, P> Iterator for Split<'a, T, P> where P: FnMut(&T) -> bool { type Item = &'a [T]; #[inline] fn next(&mut self) -> Option<&'a [T]> { if self.finished { return None; } match self.v.iter().position(|x| (self.pred)(x)) { None => self.finish(), Some(idx) => { let ret = Some(&self.v[..idx]); self.v = &self.v[idx + 1..]; ret } } } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { if self.finished { (0, Some(0)) } else { (1, Some(self.v.len() + 1)) } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T, P> DoubleEndedIterator for Split<'a, T, P> where P: FnMut(&T) -> bool { #[inline] fn next_back(&mut self) -> Option<&'a [T]> { if self.finished { return None; } match self.v.iter().rposition(|x| (self.pred)(x)) { None => self.finish(), Some(idx) => { let ret = Some(&self.v[idx + 1..]); self.v = &self.v[..idx]; ret } } } } impl<'a, T, P> SplitIter for Split<'a, T, P> where P: FnMut(&T) -> bool { #[inline] fn finish(&mut self) -> Option<&'a [T]> { if self.finished { None } else { self.finished = true; Some(self.v) } } } /// An iterator over the subslices of the vector which are separated /// by elements that match `pred`. #[stable(feature = "rust1", since = "1.0.0")] pub struct SplitMut<'a, T:'a, P> where P: FnMut(&T) -> bool { v: &'a mut [T], pred: P, finished: bool } impl<'a, T, P> SplitIter for SplitMut<'a, T, P> where P: FnMut(&T) -> bool { #[inline] fn finish(&mut self) -> Option<&'a mut [T]> { if self.finished
else { self.finished = true; Some(mem::replace(&mut self.v, &mut [])) } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T, P> Iterator for SplitMut<'a, T, P> where P: FnMut(&T) -> bool { type Item = &'a mut [T]; #[inline] fn next(&mut self) -> Option<&'a mut [T]> { if self.finished { return None; } let idx_opt = { // work around borrowck limitations let pred = &mut self.pred; self.v.iter().position(|x| (*pred)(x)) }; match idx_opt { None => self.finish(), Some(idx) => { let tmp = mem::replace(&mut self.v, &mut []); let (head, tail) = tmp.split_at_mut(idx); self.v = &mut tail[1..]; Some(head) } } } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { if self.finished { (0, Some(0)) } else { // if the predicate doesn't match anything, we yield one slice // if it matches every element, we yield len+1 empty slices. (1, Some(self.v.len() + 1)) } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T, P> DoubleEndedIterator for SplitMut<'a, T, P> where P: FnMut(&T) -> bool, { #[inline] fn next_back(&mut self) -> Option<&'a mut [T]> { if self.finished { return None; } let idx_opt = { // work around borrowck limitations let pred = &mut self.pred; self.v.iter().rposition(|x| (*pred)(x)) }; match idx_opt { None => self.finish(), Some(idx) => { let tmp = mem::replace(&mut self.v, &mut []); let (head, tail) = tmp.split_at_mut(idx); self.v = head; Some(&mut tail[1..]) } } } } /// An private iterator over subslices separated by elements that /// match a predicate function, splitting at most a fixed number of /// times. struct GenericSplitN<I> { iter: I, count: usize, invert: bool } impl<T, I: SplitIter<Item=T>> Iterator for GenericSplitN<I> { type Item = T; #[inline] fn next(&mut self) -> Option<T> { match self.count { 0 => None, 1 => { self.count -= 1; self.iter.finish() } _ => { self.count -= 1; if self.invert {self.iter.next_back()} else {self.iter.next()} } } } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { let (lower, upper_opt) = self.iter.size_hint(); (lower, upper_opt.map(|upper| cmp::min(self.count, upper))) } } /// An iterator over subslices separated by elements that match a predicate /// function, limited to a given number of splits. #[stable(feature = "rust1", since = "1.0.0")] pub struct SplitN<'a, T: 'a, P> where P: FnMut(&T) -> bool { inner: GenericSplitN<Split<'a, T, P>> } /// An iterator over subslices separated by elements that match a /// predicate function, limited to a given number of splits, starting /// from the end of the slice. #[stable(feature = "rust1", since = "1.0.0")] pub struct RSplitN<'a, T: 'a, P> where P: FnMut(&T) -> bool { inner: GenericSplitN<Split<'a, T, P>> } /// An iterator over subslices separated by elements that match a predicate /// function, limited to a given number of splits. #[stable(feature = "rust1", since = "1.0.0")] pub struct SplitNMut<'a, T: 'a, P> where P: FnMut(&T) -> bool { inner: GenericSplitN<SplitMut<'a, T, P>> } /// An iterator over subslices separated by elements that match a /// predicate function, limited to a given number of splits, starting /// from the end of the slice. #[stable(feature = "rust1", since = "1.0.0")] pub struct RSplitNMut<'a, T: 'a, P> where P: FnMut(&T) -> bool { inner: GenericSplitN<SplitMut<'a, T, P>> } macro_rules! forward_iterator { ($name:ident: $elem:ident, $iter_of:ty) => { #[stable(feature = "rust1", since = "1.0.0")] impl<'a, $elem, P> Iterator for $name<'a, $elem, P> where P: FnMut(&T) -> bool { type Item = $iter_of; #[inline] fn next(&mut self) -> Option<$iter_of> { self.inner.next() } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { self.inner.size_hint() } } } } forward_iterator! { SplitN: T, &'a [T] } forward_iterator! { RSplitN: T, &'a [T] } forward_iterator! { SplitNMut: T, &'a mut [T] } forward_iterator! { RSplitNMut: T, &'a mut [T] } /// An iterator over overlapping subslices of length `size`. #[stable(feature = "rust1", since = "1.0.0")] pub struct Windows<'a, T:'a> { v: &'a [T], size: usize } // FIXME(#19839) Remove in favor of `#[derive(Clone)]` #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> Clone for Windows<'a, T> { fn clone(&self) -> Windows<'a, T> { Windows { v: self.v, size: self.size, } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> Iterator for Windows<'a, T> { type Item = &'a [T]; #[inline] fn next(&mut self) -> Option<&'a [T]> { if self.size > self.v.len() { None } else { let ret = Some(&self.v[..self.size]); self.v = &self.v[1..]; ret } } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { if self.size > self.v.len() { (0, Some(0)) } else { let size = self.v.len() - self.size + 1; (size, Some(size)) } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> DoubleEndedIterator for Windows<'a, T> { #[inline] fn next_back(&mut self) -> Option<&'a [T]> { if self.size > self.v.len() { None } else { let ret = Some(&self.v[self.v.len()-self.size..]); self.v = &self.v[..self.v.len()-1]; ret } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> ExactSizeIterator for Windows<'a, T> {} #[unstable(feature = "iter_idx", reason = "trait is experimental")] #[allow(deprecated)] impl<'a, T> RandomAccessIterator for Windows<'a, T> { #[inline] fn indexable(&self) -> usize { self.size_hint().0 } #[inline] fn idx(&mut self, index: usize) -> Option<&'a [T]> { if index + self.size > self.v.len() { None } else { Some(&self.v[index .. index+self.size]) } } } /// An iterator over a slice in (non-overlapping) chunks (`size` elements at a /// time). /// /// When the slice len is not evenly divided by the chunk size, the last slice /// of the iteration will be the remainder. #[stable(feature = "rust1", since = "1.0.0")] pub struct Chunks<'a, T:'a> { v: &'a [T], size: usize } // FIXME(#19839) Remove in favor of `#[derive(Clone)]` #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> Clone for Chunks<'a, T> { fn clone(&self) -> Chunks<'a, T> { Chunks { v: self.v, size: self.size, } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> Iterator for Chunks<'a, T> { type Item = &'a [T]; #[inline] fn next(&mut self) -> Option<&'a [T]> { if self.v.is_empty() { None } else { let chunksz = cmp::min(self.v.len(), self.size); let (fst, snd) = self.v.split_at(chunksz); self.v = snd; Some(fst) } } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { if self.v.is_empty() { (0, Some(0)) } else { let n = self.v.len() / self.size; let rem = self.v.len() % self.size; let n = if rem > 0 { n+1 } else { n }; (n, Some(n)) } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> DoubleEndedIterator for Chunks<'a, T> { #[inline] fn next_back(&mut self) -> Option<&'a [T]> { if self.v.is_empty() { None } else { let remainder = self.v.len() % self.size; let chunksz = if remainder != 0 { remainder } else { self.size }; let (fst, snd) = self.v.split_at(self.v.len() - chunksz); self.v = fst; Some(snd) } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> ExactSizeIterator for Chunks<'a, T> {} #[unstable(feature = "iter_idx", reason = "trait is experimental")] #[allow(deprecated)] impl<'a, T> RandomAccessIterator for Chunks<'a, T> { #[inline] fn indexable(&self) -> usize { self.v.len()/self.size + if self.v.len() % self.size != 0 { 1 } else { 0 } } #[inline] fn idx(&mut self, index: usize) -> Option<&'a [T]> { if index < self.indexable() { let lo = index * self.size; let mut hi = lo + self.size; if hi < lo || hi > self.v.len() { hi = self.v.len(); } Some(&self.v[lo..hi]) } else { None } } } /// An iterator over a slice in (non-overlapping) mutable chunks (`size` /// elements at a time). When the slice len is not evenly divided by the chunk /// size, the last slice of the iteration will be the remainder. #[stable(feature = "rust1", since = "1.0.0")] pub struct ChunksMut<'a, T:'a> { v: &'a mut [T], chunk_size: usize } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> Iterator for ChunksMut<'a, T> { type Item = &'a mut [T]; #[inline] fn next(&mut self) -> Option<&'a mut [T]> { if self.v.is_empty() { None } else { let sz = cmp::min(self.v.len(), self.chunk_size); let tmp = mem::replace(&mut self.v, &mut []); let (head, tail) = tmp.split_at_mut(sz); self.v = tail; Some(head) } } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { if self.v.is_empty() { (0, Some(0)) } else { let n = self.v.len() / self.chunk_size; let rem = self.v.len() % self.chunk_size; let n = if rem > 0 { n + 1 } else { n }; (n, Some(n)) } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> DoubleEndedIterator for ChunksMut<'a, T> { #[inline] fn next_back(&mut self) -> Option<&'a mut [T]> { if self.v.is_empty() { None } else { let remainder = self.v.len() % self.chunk_size; let sz = if remainder != 0 { remainder } else { self.chunk_size }; let tmp = mem::replace(&mut self.v, &mut []); let tmp_len = tmp.len(); let (head, tail) = tmp.split_at_mut(tmp_len - sz); self.v = head; Some(tail) } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> ExactSizeIterator for ChunksMut<'a, T> {} // // Free functions // /// Converts a pointer to A into a slice of length 1 (without copying). #[unstable(feature = "ref_slice")] pub fn ref_slice<'a, A>(s: &'a A) -> &'a [A] { unsafe { from_raw_parts(s, 1) } } /// Converts a pointer to A into a slice of length 1 (without copying). #[unstable(feature = "ref_slice")] pub fn mut_ref_slice<'a, A>(s: &'a mut A) -> &'a mut [A] { unsafe { from_raw_parts_mut(s, 1) } } /// Forms a slice from a pointer and a length. /// /// The `len` argument is the number of **elements**, not the number of bytes. /// /// # Unsafety /// /// This function is unsafe as there is no guarantee that the given pointer is /// valid for `len` elements, nor whether the lifetime inferred is a suitable /// lifetime for the returned slice. /// /// `p` must be non-null, even for zero-length slices. /// /// # Caveat /// /// The lifetime for the returned slice is inferred from its usage. To /// prevent accidental misuse, it's suggested to tie the lifetime to whichever /// source lifetime is safe in the context, such as by providing a helper /// function taking the lifetime of a host value for the slice, or by explicit /// annotation. /// /// # Examples /// /// ``` /// use std::slice; /// /// // manifest a slice out of thin air! /// let ptr = 0x1234 as *const usize; /// let amt = 10; /// unsafe { /// let slice = slice::from_raw_parts(ptr, amt); /// } /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub unsafe fn from_raw_parts<'a, T>(p: *const T, len: usize) -> &'a [T] { transmute(RawSlice { data: p, len: len }) } /// Performs the same functionality as `from_raw_parts`, except that a mutable /// slice is returned. /// /// This function is unsafe for the same reasons as `from_raw_parts`, as well /// as not being able to provide a non-aliasing guarantee of the returned /// mutable slice. #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub unsafe fn from_raw_parts_mut<'a, T>(p: *mut T, len: usize) -> &'a mut [T] { transmute(RawSlice { data: p, len: len }) } // // Submodules // /// Operations on `[u8]`. #[unstable(feature = "slice_bytes", reason = "needs review")] pub mod bytes { use ptr; use slice::SliceExt; /// A trait for operations on mutable `[u8]`s. pub trait MutableByteVector { /// Sets all bytes of the receiver to the given value. fn set_memory(&mut self, value: u8); } impl MutableByteVector for [u8] { #[inline] fn set_memory(&mut self, value: u8) { unsafe { ptr::write_bytes(self.as_mut_ptr(), value, self.len()) }; } } /// Copies data from `src` to `dst` /// /// Panics if the length of `dst` is less than the length of `src`. #[inline] pub fn copy_memory(src: &[u8], dst: &mut [u8]) { let len_src = src.len(); assert!(dst.len() >= len_src); // `dst` is unaliasable, so we know statically it doesn't overlap // with `src`. unsafe { ptr::copy_nonoverlapping(src.as_ptr(), dst.as_mut_ptr(), len_src); } } } // // Boilerplate traits // #[stable(feature = "rust1", since = "1.0.0")] impl<A, B> PartialEq<[B]> for [A] where A: PartialEq<B> { fn eq(&self, other: &[B]) -> bool { if self.len() != other.len() { return false; } for i in 0..self.len() { if !self[i].eq(&other[i]) { return false; } } true } fn ne(&self, other: &[B]) -> bool { if self.len() != other.len() { return true; } for i in 0..self.len() { if self[i].ne(&other[i]) { return true; } } false } } #[stable(feature = "rust1", since = "1.0.0")] impl<T: Eq> Eq for [T] {} #[stable(feature = "rust1", since = "1.0.0")] impl<T: Ord> Ord for [T] { fn cmp(&self, other: &[T]) -> Ordering { order::cmp(self.iter(), other.iter()) } } #[stable(feature = "rust1", since = "1.0.0")] impl<T: PartialOrd> PartialOrd for [T] { #[inline] fn partial_cmp(&self, other: &[T]) -> Option<Ordering> { order::partial_cmp(self.iter(), other.iter()) } #[inline] fn lt(&self, other: &[T]) -> bool { order::lt(self.iter(), other.iter()) } #[inline] fn le(&self, other: &[T]) -> bool { order::le(self.iter(), other.iter()) } #[inline] fn ge(&self, other: &[T]) -> bool { order::ge(self.iter(), other.iter()) } #[inline] fn gt(&self, other: &[T]) -> bool { order::gt(self.iter(), other.iter()) } } /// Extension methods for slices containing integers. #[unstable(feature = "int_slice")] #[deprecated(since = "1.2.0", reason = "has not seen much usage and may want to live in the \ standard library now that most slice methods are \ on an inherent implementation block")] pub trait IntSliceExt<U, S> { /// Converts the slice to an immutable slice of unsigned integers with the same width. fn as_unsigned<'a>(&'a self) -> &'a [U]; /// Converts the slice to an immutable slice of signed integers with the same width. fn as_signed<'a>(&'a self) -> &'a [S]; /// Converts the slice to a mutable slice of unsigned integers with the same width. fn as_unsigned_mut<'a>(&'a mut self) -> &'a mut [U]; /// Converts the slice to a mutable slice of signed integers with the same width. fn as_signed_mut<'a>(&'a mut self) -> &'a mut [S]; } macro_rules! impl_int_slice { ($u:ty, $s:ty, $t:ty) => { #[unstable(feature = "int_slice")] #[allow(deprecated)] impl IntSliceExt<$u, $s> for [$t] { #[inline] fn as_unsigned(&self) -> &[$u] { unsafe { transmute(self) } } #[inline] fn as_signed(&self) -> &[$s] { unsafe { transmute(self) } } #[inline] fn as_unsigned_mut(&mut self) -> &mut [$u] { unsafe { transmute(self) } } #[inline] fn as_signed_mut(&mut self) -> &mut [$s] { unsafe { transmute(self) } } } } } macro_rules! impl_int_slices { ($u:ty, $s:ty) => { impl_int_slice! { $u, $s, $u } impl_int_slice! { $u, $s, $s } } } impl_int_slices! { u8, i8 } impl_int_slices! { u16, i16 } impl_int_slices! { u32, i32 } impl_int_slices! { u64, i64 } impl_int_slices! { usize, isize }
{ None }
nrfhw.rs
#[doc = "Reader of register NRFHW[%s]"] pub type R = crate::R<u32, super::NRFHW>; #[doc = "Writer for register NRFHW[%s]"] pub type W = crate::W<u32, super::NRFHW>; #[doc = "Register NRFHW[%s] `reset()`'s with value 0xffff_ffff"] impl crate::ResetValue for super::NRFHW { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type
} #[doc = "Reader of field `NRFHW`"] pub type NRFHW_R = crate::R<u32, u32>; #[doc = "Write proxy for field `NRFHW`"] pub struct NRFHW_W<'a> { w: &'a mut W, } impl<'a> NRFHW_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u32) -> &'a mut W { self.w.bits = (self.w.bits & !0xffff_ffff) | ((value as u32) & 0xffff_ffff); self.w } } impl R { #[doc = "Bits 0:31 - Reserved for Nordic hardware design"] #[inline(always)] pub fn nrfhw(&self) -> NRFHW_R { NRFHW_R::new((self.bits & 0xffff_ffff) as u32) } } impl W { #[doc = "Bits 0:31 - Reserved for Nordic hardware design"] #[inline(always)] pub fn nrfhw(&mut self) -> NRFHW_W { NRFHW_W { w: self } } }
{ 0xffff_ffff }
show_bp.py
from flask import Blueprint from controllers.show import shows, create_shows, create_show_submission
show_bp.route('/', methods=['GET'])(shows) show_bp.route('/create', methods=['GET'])(create_shows) show_bp.route('/create', methods=['POST'])(create_show_submission)
show_bp = Blueprint('show_bp', __name__)
event.rs
use crate::Context; use crate::sapp::{self, sapp_keycode, sapp_mousebutton}; #[derive(Debug, Copy, Clone, PartialEq, Hash, Eq)] pub enum MouseButton { Right, Left, Middle, Unknown, } #[derive(Debug, Copy, Clone)] pub struct Touch { pub id: u32, pub x: f32, pub y: f32, } impl From<sapp_mousebutton> for MouseButton { fn from(btn: sapp_mousebutton) -> MouseButton { match btn { sapp::sapp_mousebutton_SAPP_MOUSEBUTTON_LEFT => MouseButton::Left, sapp::sapp_mousebutton_SAPP_MOUSEBUTTON_RIGHT => MouseButton::Right, sapp::sapp_mousebutton_SAPP_MOUSEBUTTON_MIDDLE => MouseButton::Middle, _ => MouseButton::Unknown, } } } #[derive(Debug, Copy, Clone, PartialEq, Hash, Eq)] #[repr(u32)] pub enum KeyCode { Space, Apostrophe, Comma, Minus, Period, Slash, Key0, Key1, Key2, Key3, Key4, Key5, Key6, Key7, Key8, Key9, Semicolon, Equal, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W, X, Y, Z, LeftBracket, Backslash, RightBracket, GraveAccent, World1, World2, Escape, Enter, Tab, Backspace, Insert, Delete, Right, Left, Down, Up, PageUp, PageDown, Home, End, CapsLock, ScrollLock, NumLock, PrintScreen, Pause, F1, F2, F3, F4, F5, F6, F7, F8, F9, F10, F11, F12, F13, F14, F15, F16, F17, F18, F19, F20, F21, F22, F23, F24, F25, Kp0, Kp1, Kp2, Kp3, Kp4, Kp5, Kp6, Kp7, Kp8, Kp9, KpDecimal, KpDivide, KpMultiply, KpSubtract, KpAdd, KpEnter, KpEqual, LeftShift, LeftControl, LeftAlt, LeftSuper, RightShift, RightControl, RightAlt, RightSuper, Menu, Unknown, } impl From<sapp_keycode> for KeyCode { fn from(key_code: sapp_keycode) -> KeyCode { match key_code { sapp::sapp_keycode_SAPP_KEYCODE_SPACE => KeyCode::Space, sapp::sapp_keycode_SAPP_KEYCODE_APOSTROPHE => KeyCode::Apostrophe, sapp::sapp_keycode_SAPP_KEYCODE_COMMA => KeyCode::Comma, sapp::sapp_keycode_SAPP_KEYCODE_MINUS => KeyCode::Minus, sapp::sapp_keycode_SAPP_KEYCODE_PERIOD => KeyCode::Period, sapp::sapp_keycode_SAPP_KEYCODE_SLASH => KeyCode::Slash, sapp::sapp_keycode_SAPP_KEYCODE_0 => KeyCode::Key0, sapp::sapp_keycode_SAPP_KEYCODE_1 => KeyCode::Key1, sapp::sapp_keycode_SAPP_KEYCODE_2 => KeyCode::Key2, sapp::sapp_keycode_SAPP_KEYCODE_3 => KeyCode::Key3, sapp::sapp_keycode_SAPP_KEYCODE_4 => KeyCode::Key4, sapp::sapp_keycode_SAPP_KEYCODE_5 => KeyCode::Key5, sapp::sapp_keycode_SAPP_KEYCODE_6 => KeyCode::Key6, sapp::sapp_keycode_SAPP_KEYCODE_7 => KeyCode::Key7, sapp::sapp_keycode_SAPP_KEYCODE_8 => KeyCode::Key8, sapp::sapp_keycode_SAPP_KEYCODE_9 => KeyCode::Key9, sapp::sapp_keycode_SAPP_KEYCODE_SEMICOLON => KeyCode::Semicolon, sapp::sapp_keycode_SAPP_KEYCODE_EQUAL => KeyCode::Equal, sapp::sapp_keycode_SAPP_KEYCODE_A => KeyCode::A, sapp::sapp_keycode_SAPP_KEYCODE_B => KeyCode::B, sapp::sapp_keycode_SAPP_KEYCODE_C => KeyCode::C, sapp::sapp_keycode_SAPP_KEYCODE_D => KeyCode::D, sapp::sapp_keycode_SAPP_KEYCODE_E => KeyCode::E, sapp::sapp_keycode_SAPP_KEYCODE_F => KeyCode::F, sapp::sapp_keycode_SAPP_KEYCODE_G => KeyCode::G, sapp::sapp_keycode_SAPP_KEYCODE_H => KeyCode::H, sapp::sapp_keycode_SAPP_KEYCODE_I => KeyCode::I, sapp::sapp_keycode_SAPP_KEYCODE_J => KeyCode::J, sapp::sapp_keycode_SAPP_KEYCODE_K => KeyCode::K, sapp::sapp_keycode_SAPP_KEYCODE_L => KeyCode::L, sapp::sapp_keycode_SAPP_KEYCODE_M => KeyCode::M, sapp::sapp_keycode_SAPP_KEYCODE_N => KeyCode::N, sapp::sapp_keycode_SAPP_KEYCODE_O => KeyCode::O, sapp::sapp_keycode_SAPP_KEYCODE_P => KeyCode::P, sapp::sapp_keycode_SAPP_KEYCODE_Q => KeyCode::Q, sapp::sapp_keycode_SAPP_KEYCODE_R => KeyCode::R, sapp::sapp_keycode_SAPP_KEYCODE_S => KeyCode::S, sapp::sapp_keycode_SAPP_KEYCODE_T => KeyCode::T, sapp::sapp_keycode_SAPP_KEYCODE_U => KeyCode::U, sapp::sapp_keycode_SAPP_KEYCODE_V => KeyCode::V, sapp::sapp_keycode_SAPP_KEYCODE_W => KeyCode::W, sapp::sapp_keycode_SAPP_KEYCODE_X => KeyCode::X, sapp::sapp_keycode_SAPP_KEYCODE_Y => KeyCode::Y, sapp::sapp_keycode_SAPP_KEYCODE_Z => KeyCode::Z, sapp::sapp_keycode_SAPP_KEYCODE_LEFT_BRACKET => KeyCode::LeftBracket, sapp::sapp_keycode_SAPP_KEYCODE_BACKSLASH => KeyCode::Backslash, sapp::sapp_keycode_SAPP_KEYCODE_RIGHT_BRACKET => KeyCode::RightBracket, sapp::sapp_keycode_SAPP_KEYCODE_GRAVE_ACCENT => KeyCode::GraveAccent, sapp::sapp_keycode_SAPP_KEYCODE_WORLD_1 => KeyCode::World1, sapp::sapp_keycode_SAPP_KEYCODE_WORLD_2 => KeyCode::World2, sapp::sapp_keycode_SAPP_KEYCODE_ESCAPE => KeyCode::Escape, sapp::sapp_keycode_SAPP_KEYCODE_ENTER => KeyCode::Enter, sapp::sapp_keycode_SAPP_KEYCODE_TAB => KeyCode::Tab, sapp::sapp_keycode_SAPP_KEYCODE_BACKSPACE => KeyCode::Backspace, sapp::sapp_keycode_SAPP_KEYCODE_INSERT => KeyCode::Insert, sapp::sapp_keycode_SAPP_KEYCODE_DELETE => KeyCode::Delete, sapp::sapp_keycode_SAPP_KEYCODE_RIGHT => KeyCode::Right, sapp::sapp_keycode_SAPP_KEYCODE_LEFT => KeyCode::Left, sapp::sapp_keycode_SAPP_KEYCODE_DOWN => KeyCode::Down, sapp::sapp_keycode_SAPP_KEYCODE_UP => KeyCode::Up, sapp::sapp_keycode_SAPP_KEYCODE_PAGE_UP => KeyCode::PageUp, sapp::sapp_keycode_SAPP_KEYCODE_PAGE_DOWN => KeyCode::PageDown, sapp::sapp_keycode_SAPP_KEYCODE_HOME => KeyCode::Home, sapp::sapp_keycode_SAPP_KEYCODE_END => KeyCode::End, sapp::sapp_keycode_SAPP_KEYCODE_CAPS_LOCK => KeyCode::CapsLock, sapp::sapp_keycode_SAPP_KEYCODE_SCROLL_LOCK => KeyCode::ScrollLock, sapp::sapp_keycode_SAPP_KEYCODE_NUM_LOCK => KeyCode::NumLock, sapp::sapp_keycode_SAPP_KEYCODE_PRINT_SCREEN => KeyCode::PrintScreen, sapp::sapp_keycode_SAPP_KEYCODE_PAUSE => KeyCode::Pause, sapp::sapp_keycode_SAPP_KEYCODE_F1 => KeyCode::F1, sapp::sapp_keycode_SAPP_KEYCODE_F2 => KeyCode::F2, sapp::sapp_keycode_SAPP_KEYCODE_F3 => KeyCode::F3, sapp::sapp_keycode_SAPP_KEYCODE_F4 => KeyCode::F4, sapp::sapp_keycode_SAPP_KEYCODE_F5 => KeyCode::F5, sapp::sapp_keycode_SAPP_KEYCODE_F6 => KeyCode::F6, sapp::sapp_keycode_SAPP_KEYCODE_F7 => KeyCode::F7, sapp::sapp_keycode_SAPP_KEYCODE_F8 => KeyCode::F8, sapp::sapp_keycode_SAPP_KEYCODE_F9 => KeyCode::F9, sapp::sapp_keycode_SAPP_KEYCODE_F10 => KeyCode::F10, sapp::sapp_keycode_SAPP_KEYCODE_F11 => KeyCode::F11, sapp::sapp_keycode_SAPP_KEYCODE_F12 => KeyCode::F12, sapp::sapp_keycode_SAPP_KEYCODE_F13 => KeyCode::F13, sapp::sapp_keycode_SAPP_KEYCODE_F14 => KeyCode::F14, sapp::sapp_keycode_SAPP_KEYCODE_F15 => KeyCode::F15, sapp::sapp_keycode_SAPP_KEYCODE_F16 => KeyCode::F16, sapp::sapp_keycode_SAPP_KEYCODE_F17 => KeyCode::F17, sapp::sapp_keycode_SAPP_KEYCODE_F18 => KeyCode::F18, sapp::sapp_keycode_SAPP_KEYCODE_F19 => KeyCode::F19, sapp::sapp_keycode_SAPP_KEYCODE_F20 => KeyCode::F20, sapp::sapp_keycode_SAPP_KEYCODE_F21 => KeyCode::F21, sapp::sapp_keycode_SAPP_KEYCODE_F22 => KeyCode::F22, sapp::sapp_keycode_SAPP_KEYCODE_F23 => KeyCode::F23, sapp::sapp_keycode_SAPP_KEYCODE_F24 => KeyCode::F24, sapp::sapp_keycode_SAPP_KEYCODE_F25 => KeyCode::F25, sapp::sapp_keycode_SAPP_KEYCODE_KP_0 => KeyCode::Kp0, sapp::sapp_keycode_SAPP_KEYCODE_KP_1 => KeyCode::Kp1, sapp::sapp_keycode_SAPP_KEYCODE_KP_2 => KeyCode::Kp2, sapp::sapp_keycode_SAPP_KEYCODE_KP_3 => KeyCode::Kp3, sapp::sapp_keycode_SAPP_KEYCODE_KP_4 => KeyCode::Kp4, sapp::sapp_keycode_SAPP_KEYCODE_KP_5 => KeyCode::Kp5, sapp::sapp_keycode_SAPP_KEYCODE_KP_6 => KeyCode::Kp6, sapp::sapp_keycode_SAPP_KEYCODE_KP_7 => KeyCode::Kp7, sapp::sapp_keycode_SAPP_KEYCODE_KP_8 => KeyCode::Kp8, sapp::sapp_keycode_SAPP_KEYCODE_KP_9 => KeyCode::Kp9, sapp::sapp_keycode_SAPP_KEYCODE_KP_DECIMAL => KeyCode::KpDecimal, sapp::sapp_keycode_SAPP_KEYCODE_KP_DIVIDE => KeyCode::KpDivide, sapp::sapp_keycode_SAPP_KEYCODE_KP_MULTIPLY => KeyCode::KpMultiply, sapp::sapp_keycode_SAPP_KEYCODE_KP_SUBTRACT => KeyCode::KpSubtract, sapp::sapp_keycode_SAPP_KEYCODE_KP_ADD => KeyCode::KpAdd, sapp::sapp_keycode_SAPP_KEYCODE_KP_ENTER => KeyCode::KpEnter, sapp::sapp_keycode_SAPP_KEYCODE_KP_EQUAL => KeyCode::KpEqual, sapp::sapp_keycode_SAPP_KEYCODE_LEFT_SHIFT => KeyCode::LeftShift, sapp::sapp_keycode_SAPP_KEYCODE_LEFT_CONTROL => KeyCode::LeftControl, sapp::sapp_keycode_SAPP_KEYCODE_LEFT_ALT => KeyCode::LeftAlt, sapp::sapp_keycode_SAPP_KEYCODE_LEFT_SUPER => KeyCode::LeftSuper, sapp::sapp_keycode_SAPP_KEYCODE_RIGHT_SHIFT => KeyCode::RightShift, sapp::sapp_keycode_SAPP_KEYCODE_RIGHT_CONTROL => KeyCode::RightControl, sapp::sapp_keycode_SAPP_KEYCODE_RIGHT_ALT => KeyCode::RightAlt, sapp::sapp_keycode_SAPP_KEYCODE_RIGHT_SUPER => KeyCode::RightSuper, sapp::sapp_keycode_SAPP_KEYCODE_MENU => KeyCode::Menu, _ => KeyCode::Unknown, } } } #[derive(Debug, Copy, Clone, PartialEq, Default)] pub struct KeyMods { pub shift: bool, pub ctrl: bool, pub alt: bool, pub logo: bool, } impl From<u32> for KeyMods { fn from(value: u32) -> KeyMods { let mut key_mods = KeyMods::default(); if value & sapp::SAPP_MODIFIER_SHIFT != 0 { key_mods.shift = true; } if value & sapp::SAPP_MODIFIER_CTRL != 0 { key_mods.ctrl = true; } if value & sapp::SAPP_MODIFIER_ALT != 0 { key_mods.alt = true; } if value & sapp::SAPP_MODIFIER_SUPER != 0 { key_mods.logo = true; } key_mods } } #[derive(Debug, Hash, PartialEq, Eq, Clone, Copy)] pub enum TouchPhase { Started, Moved, Ended, Cancelled, } impl From<u32> for TouchPhase { fn from(event: u32) -> TouchPhase { match event { sapp::sapp_event_type_SAPP_EVENTTYPE_TOUCHES_BEGAN => TouchPhase::Started, sapp::sapp_event_type_SAPP_EVENTTYPE_TOUCHES_ENDED => TouchPhase::Ended, sapp::sapp_event_type_SAPP_EVENTTYPE_TOUCHES_CANCELLED => TouchPhase::Cancelled, sapp::sapp_event_type_SAPP_EVENTTYPE_TOUCHES_MOVED => TouchPhase::Moved, _ => unreachable!(), } } } /// A trait defining event callbacks. pub trait EventHandler { fn update(&mut self, _ctx: &mut Context); fn draw(&mut self, _ctx: &mut Context); fn resize_event(&mut self, _ctx: &mut Context, _width: f32, _height: f32) {} fn mouse_motion_event(&mut self, _ctx: &mut Context, _x: f32, _y: f32) {} fn mouse_wheel_event(&mut self, _ctx: &mut Context, _x: f32, _y: f32) {} fn mouse_button_down_event( &mut self, _ctx: &mut Context, _button: MouseButton, _x: f32, _y: f32, ) { } fn mouse_button_up_event( &mut self, _ctx: &mut Context, _button: MouseButton, _x: f32, _y: f32, ) { } fn char_event( &mut self, _ctx: &mut Context, _character: char, _keymods: KeyMods, _repeat: bool, ) { } fn key_down_event( &mut self, _ctx: &mut Context, _keycode: KeyCode, _keymods: KeyMods, _repeat: bool, ) { } fn key_up_event(&mut self, _ctx: &mut Context, _keycode: KeyCode, _keymods: KeyMods) {} /// Default implementation emulates mouse clicks fn touch_event(&mut self, ctx: &mut Context, phase: TouchPhase, _id: u64, x: f32, y: f32) { if phase == TouchPhase::Started { self.mouse_button_down_event(ctx, MouseButton::Left, x, y); } if phase == TouchPhase::Ended { self.mouse_button_up_event(ctx, MouseButton::Left, x, y); } if phase == TouchPhase::Moved { self.mouse_motion_event(ctx, x, y); } } /// Represents raw hardware mouse motion event /// Note that these events are delivered regardless of input focus and not in pixels, but in /// hardware units instead. And those units may be different from pixels depending on the target platform fn raw_mouse_motion(&mut self, _ctx: &mut Context, _dx: f32, _dy: f32) {} /// Window has been minimized /// Right now is not implemented anywhere. TODO: implement fn window_minimized_event(&mut self, _ctx: &mut Context) {} /// Window has been restored ///Right now is not implemented anywhere. TODO: implement fn window_restored_event(&mut self, _ctx: &mut Context) {} /// This event is sent when the userclicks the window's close button /// or application code calls the ctx.request_quit() function. The event /// handler callback code can handle this event by calling /// ctx.cancel_quit() to cancel the quit. /// If the event is ignored, the application will quit as usual. fn quit_requested_event(&mut self, _ctx: &mut Context) {} } /// A trait defining event callbacks. /// Used for miniquad's setup with user-owned Context. /// The only difference from EventHandler - will not receive "&mut Context" pub trait EventHandlerFree { fn update(&mut self); fn draw(&mut self); fn resize_event(&mut self, _width: f32, _height: f32) {} fn mouse_motion_event(&mut self, _x: f32, _y: f32) {} fn
(&mut self, _x: f32, _y: f32) {} fn mouse_button_down_event(&mut self, _button: MouseButton, _x: f32, _y: f32) {} fn mouse_button_up_event(&mut self, _button: MouseButton, _x: f32, _y: f32) {} fn char_event(&mut self, _character: char, _keymods: KeyMods, _repeat: bool) {} fn key_down_event(&mut self, _keycode: KeyCode, _keymods: KeyMods, _repeat: bool) {} fn key_up_event(&mut self, _keycode: KeyCode, _keymods: KeyMods) {} /// Default implementation emulates mouse clicks fn touch_event(&mut self, phase: TouchPhase, _id: u64, x: f32, y: f32) { if phase == TouchPhase::Started { self.mouse_button_down_event(MouseButton::Left, x, y); } if phase == TouchPhase::Ended { self.mouse_button_up_event(MouseButton::Left, x, y); } if phase == TouchPhase::Moved { self.mouse_motion_event(x, y); } } /// Represents raw hardware mouse motion event /// Note that these events are delivered regardless of input focus and not in pixels, but in /// hardware units instead. And those units may be different from pixels depending on the target platform fn raw_mouse_motion(&mut self, _dx: f32, _dy: f32) {} /// Window has been minimized /// Right now is not implemented anywhere. TODO: implement fn window_minimized_event(&mut self) {} /// Window has been restored /// Right now is not implemented anywhere. TODO: implement fn window_restored_event(&mut self) {} /// This event is sent when the userclicks the window's close button /// or application code calls the ctx.request_quit() function. The event /// handler callback code can handle this event by calling /// ctx.cancel_quit() to cancel the quit. /// If the event is ignored, the application will quit as usual. fn quit_requested_event(&mut self) {} }
mouse_wheel_event
mod.rs
/*! # typeck: check phase Within the check phase of type check, we check each item one at a time (bodies of function expressions are checked as part of the containing function). Inference is used to supply types wherever they are unknown. By far the most complex case is checking the body of a function. This can be broken down into several distinct phases: - gather: creates type variables to represent the type of each local variable and pattern binding. - main: the main pass does the lion's share of the work: it determines the types of all expressions, resolves methods, checks for most invalid conditions, and so forth. In some cases, where a type is unknown, it may create a type or region variable and use that as the type of an expression. In the process of checking, various constraints will be placed on these type variables through the subtyping relationships requested through the `demand` module. The `infer` module is in charge of resolving those constraints. - regionck: after main is complete, the regionck pass goes over all types looking for regions and making sure that they did not escape into places they are not in scope. This may also influence the final assignments of the various region variables if there is some flexibility. - writeback: writes the final types within a function body, replacing type variables with their final inferred types. These final types are written into the `tcx.node_types` table, which should *never* contain any reference to a type variable. ## Intermediate types While type checking a function, the intermediate types for the expressions, blocks, and so forth contained within the function are stored in `fcx.node_types` and `fcx.node_substs`. These types may contain unresolved type variables. After type checking is complete, the functions in the writeback module are used to take the types from this table, resolve them, and then write them into their permanent home in the type context `tcx`. This means that during inferencing you should use `fcx.write_ty()` and `fcx.expr_ty()` / `fcx.node_ty()` to write/obtain the types of nodes within the function. The types of top-level items, which never contain unbound type variables, are stored directly into the `tcx` typeck_results. N.B., a type variable is not the same thing as a type parameter. A type variable is an instance of a type parameter. That is, given a generic function `fn foo<T>(t: T)`, while checking the function `foo`, the type `ty_param(0)` refers to the type `T`, which is treated in abstract. However, when `foo()` is called, `T` will be substituted for a fresh type variable `N`. This variable will eventually be resolved to some concrete type (which might itself be a type parameter). */ pub mod _match; mod autoderef; mod callee; pub mod cast; mod check; mod closure; pub mod coercion; mod compare_method; pub mod demand; mod diverges; pub mod dropck; mod expectation; mod expr; mod fallback; mod fn_ctxt; mod gather_locals; mod generator_interior; mod inherited; pub mod intrinsic; pub mod method; mod op; mod pat; mod place_op; mod regionck; mod upvar; mod wfcheck; pub mod writeback; use check::{ check_abi, check_fn, check_impl_item_well_formed, check_item_well_formed, check_mod_item_types, check_trait_item_well_formed, }; pub use check::{check_item_type, check_wf_new}; pub use diverges::Diverges; pub use expectation::Expectation; pub use fn_ctxt::*; use hir::def::CtorOf; pub use inherited::{Inherited, InheritedBuilder}; use crate::astconv::AstConv; use crate::check::gather_locals::GatherLocalsVisitor; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_errors::{ pluralize, struct_span_err, Applicability, DiagnosticBuilder, EmissionGuarantee, MultiSpan, }; use rustc_hir as hir; use rustc_hir::def::Res; use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_hir::intravisit::Visitor; use rustc_hir::itemlikevisit::ItemLikeVisitor; use rustc_hir::{HirIdMap, ImplicitSelfKind, Node}; use rustc_index::bit_set::BitSet; use rustc_index::vec::Idx; use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind}; use rustc_middle::ty::query::Providers; use rustc_middle::ty::subst::{InternalSubsts, Subst, SubstsRef}; use rustc_middle::ty::{self, Ty, TyCtxt, UserType}; use rustc_session::config; use rustc_session::parse::feature_err; use rustc_session::Session; use rustc_span::source_map::DUMMY_SP; use rustc_span::symbol::{kw, Ident}; use rustc_span::{self, BytePos, Span}; use rustc_target::abi::VariantIdx; use rustc_target::spec::abi::Abi; use rustc_trait_selection::traits; use rustc_trait_selection::traits::error_reporting::recursive_type_with_infinite_size_error; use rustc_trait_selection::traits::error_reporting::suggestions::ReturnsVisitor; use std::cell::{Ref, RefCell, RefMut}; use crate::require_c_abi_if_c_variadic; use crate::util::common::indenter; use self::coercion::DynamicCoerceMany; pub use self::Expectation::*; #[macro_export] macro_rules! type_error_struct { ($session:expr, $span:expr, $typ:expr, $code:ident, $($message:tt)*) => ({ let mut err = rustc_errors::struct_span_err!($session, $span, $code, $($message)*); if $typ.references_error() { err.downgrade_to_delayed_bug(); } err }) } /// The type of a local binding, including the revealed type for anon types. #[derive(Copy, Clone, Debug)] pub struct LocalTy<'tcx> { decl_ty: Ty<'tcx>, revealed_ty: Ty<'tcx>, } #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum Needs { MutPlace, None, } impl Needs { fn maybe_mut_place(m: hir::Mutability) -> Self { match m { hir::Mutability::Mut => Needs::MutPlace, hir::Mutability::Not => Needs::None, } } } #[derive(Copy, Clone)] pub struct UnsafetyState { pub def: hir::HirId, pub unsafety: hir::Unsafety, from_fn: bool, } impl UnsafetyState { pub fn function(unsafety: hir::Unsafety, def: hir::HirId) -> UnsafetyState { UnsafetyState { def, unsafety, from_fn: true } } pub fn recurse(self, blk: &hir::Block<'_>) -> UnsafetyState { use hir::BlockCheckMode; match self.unsafety { // If this unsafe, then if the outer function was already marked as // unsafe we shouldn't attribute the unsafe'ness to the block. This // way the block can be warned about instead of ignoring this // extraneous block (functions are never warned about). hir::Unsafety::Unsafe if self.from_fn => self, unsafety => { let (unsafety, def) = match blk.rules { BlockCheckMode::UnsafeBlock(..) => (hir::Unsafety::Unsafe, blk.hir_id), BlockCheckMode::DefaultBlock => (unsafety, self.def), }; UnsafetyState { def, unsafety, from_fn: false } } } } } #[derive(Debug, Copy, Clone)] pub enum PlaceOp { Deref, Index, } pub struct BreakableCtxt<'tcx> { may_break: bool, // this is `null` for loops where break with a value is illegal, // such as `while`, `for`, and `while let` coerce: Option<DynamicCoerceMany<'tcx>>, } pub struct EnclosingBreakables<'tcx> { stack: Vec<BreakableCtxt<'tcx>>, by_id: HirIdMap<usize>, } impl<'tcx> EnclosingBreakables<'tcx> { fn find_breakable(&mut self, target_id: hir::HirId) -> &mut BreakableCtxt<'tcx> { self.opt_find_breakable(target_id).unwrap_or_else(|| { bug!("could not find enclosing breakable with id {}", target_id); }) } fn opt_find_breakable(&mut self, target_id: hir::HirId) -> Option<&mut BreakableCtxt<'tcx>> { match self.by_id.get(&target_id) { Some(ix) => Some(&mut self.stack[*ix]), None => None, } } } pub fn provide(providers: &mut Providers) { method::provide(providers); *providers = Providers { typeck_item_bodies, typeck_const_arg, typeck, diagnostic_only_typeck, has_typeck_results, adt_destructor, used_trait_imports, check_item_well_formed, check_trait_item_well_formed, check_impl_item_well_formed, check_mod_item_types, ..*providers }; } fn adt_destructor(tcx: TyCtxt<'_>, def_id: DefId) -> Option<ty::Destructor> { tcx.calculate_dtor(def_id, dropck::check_drop_impl) } /// If this `DefId` is a "primary tables entry", returns /// `Some((body_id, body_ty, fn_sig))`. Otherwise, returns `None`. /// /// If this function returns `Some`, then `typeck_results(def_id)` will /// succeed; if it returns `None`, then `typeck_results(def_id)` may or /// may not succeed. In some cases where this function returns `None` /// (notably closures), `typeck_results(def_id)` would wind up /// redirecting to the owning function. fn primary_body_of( tcx: TyCtxt<'_>, id: hir::HirId, ) -> Option<(hir::BodyId, Option<&hir::Ty<'_>>, Option<&hir::FnSig<'_>>)> { match tcx.hir().get(id) { Node::Item(item) => match item.kind { hir::ItemKind::Const(ty, body) | hir::ItemKind::Static(ty, _, body) => { Some((body, Some(ty), None)) } hir::ItemKind::Fn(ref sig, .., body) => Some((body, None, Some(sig))), _ => None, }, Node::TraitItem(item) => match item.kind { hir::TraitItemKind::Const(ty, Some(body)) => Some((body, Some(ty), None)), hir::TraitItemKind::Fn(ref sig, hir::TraitFn::Provided(body)) => { Some((body, None, Some(sig))) } _ => None, }, Node::ImplItem(item) => match item.kind { hir::ImplItemKind::Const(ty, body) => Some((body, Some(ty), None)), hir::ImplItemKind::Fn(ref sig, body) => Some((body, None, Some(sig))), _ => None, }, Node::AnonConst(constant) => Some((constant.body, None, None)), _ => None, } } fn has_typeck_results(tcx: TyCtxt<'_>, def_id: DefId) -> bool { // Closures' typeck results come from their outermost function, // as they are part of the same "inference environment". let typeck_root_def_id = tcx.typeck_root_def_id(def_id); if typeck_root_def_id != def_id { return tcx.has_typeck_results(typeck_root_def_id); } if let Some(def_id) = def_id.as_local() { let id = tcx.hir().local_def_id_to_hir_id(def_id); primary_body_of(tcx, id).is_some() } else { false } } fn used_trait_imports(tcx: TyCtxt<'_>, def_id: LocalDefId) -> &FxHashSet<LocalDefId> { &*tcx.typeck(def_id).used_trait_imports } fn typeck_const_arg<'tcx>( tcx: TyCtxt<'tcx>, (did, param_did): (LocalDefId, DefId), ) -> &ty::TypeckResults<'tcx> { let fallback = move || tcx.type_of(param_did); typeck_with_fallback(tcx, did, fallback) } fn typeck<'tcx>(tcx: TyCtxt<'tcx>, def_id: LocalDefId) -> &ty::TypeckResults<'tcx> { if let Some(param_did) = tcx.opt_const_param_of(def_id) { tcx.typeck_const_arg((def_id, param_did)) } else { let fallback = move || tcx.type_of(def_id.to_def_id()); typeck_with_fallback(tcx, def_id, fallback) } } /// Used only to get `TypeckResults` for type inference during error recovery. /// Currently only used for type inference of `static`s and `const`s to avoid type cycle errors. fn diagnostic_only_typeck<'tcx>(tcx: TyCtxt<'tcx>, def_id: LocalDefId) -> &ty::TypeckResults<'tcx>
#[instrument(skip(tcx, fallback))] fn typeck_with_fallback<'tcx>( tcx: TyCtxt<'tcx>, def_id: LocalDefId, fallback: impl Fn() -> Ty<'tcx> + 'tcx, ) -> &'tcx ty::TypeckResults<'tcx> { // Closures' typeck results come from their outermost function, // as they are part of the same "inference environment". let typeck_root_def_id = tcx.typeck_root_def_id(def_id.to_def_id()).expect_local(); if typeck_root_def_id != def_id { return tcx.typeck(typeck_root_def_id); } let id = tcx.hir().local_def_id_to_hir_id(def_id); let span = tcx.hir().span(id); // Figure out what primary body this item has. let (body_id, body_ty, fn_sig) = primary_body_of(tcx, id).unwrap_or_else(|| { span_bug!(span, "can't type-check body of {:?}", def_id); }); let body = tcx.hir().body(body_id); let typeck_results = Inherited::build(tcx, def_id).enter(|inh| { let param_env = tcx.param_env(def_id); let (fcx, wf_tys) = if let Some(hir::FnSig { header, decl, .. }) = fn_sig { let fn_sig = if crate::collect::get_infer_ret_ty(&decl.output).is_some() { let fcx = FnCtxt::new(&inh, param_env, body.value.hir_id); <dyn AstConv<'_>>::ty_of_fn( &fcx, id, header.unsafety, header.abi, decl, &hir::Generics::empty(), None, None, ) } else { tcx.fn_sig(def_id) }; check_abi(tcx, id, span, fn_sig.abi()); // When normalizing the function signature, we assume all types are // well-formed. So, we don't need to worry about the obligations // from normalization. We could just discard these, but to align with // compare_method and elsewhere, we just add implied bounds for // these types. let mut wf_tys = FxHashSet::default(); // Compute the fty from point of view of inside the fn. let fn_sig = tcx.liberate_late_bound_regions(def_id.to_def_id(), fn_sig); let fn_sig = inh.normalize_associated_types_in( body.value.span, body_id.hir_id, param_env, fn_sig, ); wf_tys.extend(fn_sig.inputs_and_output.iter()); let fcx = check_fn(&inh, param_env, fn_sig, decl, id, body, None, true).0; (fcx, wf_tys) } else { let fcx = FnCtxt::new(&inh, param_env, body.value.hir_id); let expected_type = body_ty .and_then(|ty| match ty.kind { hir::TyKind::Infer => Some(<dyn AstConv<'_>>::ast_ty_to_ty(&fcx, ty)), _ => None, }) .unwrap_or_else(|| match tcx.hir().get(id) { Node::AnonConst(_) => match tcx.hir().get(tcx.hir().get_parent_node(id)) { Node::Expr(&hir::Expr { kind: hir::ExprKind::ConstBlock(ref anon_const), .. }) if anon_const.hir_id == id => fcx.next_ty_var(TypeVariableOrigin { kind: TypeVariableOriginKind::TypeInference, span, }), Node::Ty(&hir::Ty { kind: hir::TyKind::Typeof(ref anon_const), .. }) if anon_const.hir_id == id => fcx.next_ty_var(TypeVariableOrigin { kind: TypeVariableOriginKind::TypeInference, span, }), Node::Expr(&hir::Expr { kind: hir::ExprKind::InlineAsm(asm), .. }) | Node::Item(&hir::Item { kind: hir::ItemKind::GlobalAsm(asm), .. }) => { let operand_ty = asm .operands .iter() .filter_map(|(op, _op_sp)| match op { hir::InlineAsmOperand::Const { anon_const } if anon_const.hir_id == id => { // Inline assembly constants must be integers. Some(fcx.next_int_var()) } hir::InlineAsmOperand::SymFn { anon_const } if anon_const.hir_id == id => { Some(fcx.next_ty_var(TypeVariableOrigin { kind: TypeVariableOriginKind::MiscVariable, span, })) } _ => None, }) .next(); operand_ty.unwrap_or_else(fallback) } _ => fallback(), }, _ => fallback(), }); let expected_type = fcx.normalize_associated_types_in(body.value.span, expected_type); fcx.require_type_is_sized(expected_type, body.value.span, traits::ConstSized); // Gather locals in statics (because of block expressions). GatherLocalsVisitor::new(&fcx).visit_body(body); fcx.check_expr_coercable_to_type(&body.value, expected_type, None); fcx.write_ty(id, expected_type); (fcx, FxHashSet::default()) }; let fallback_has_occurred = fcx.type_inference_fallback(); // Even though coercion casts provide type hints, we check casts after fallback for // backwards compatibility. This makes fallback a stronger type hint than a cast coercion. fcx.check_casts(); fcx.select_obligations_where_possible(fallback_has_occurred, |_| {}); // Closure and generator analysis may run after fallback // because they don't constrain other type variables. fcx.closure_analyze(body); assert!(fcx.deferred_call_resolutions.borrow().is_empty()); fcx.resolve_generator_interiors(def_id.to_def_id()); for (ty, span, code) in fcx.deferred_sized_obligations.borrow_mut().drain(..) { let ty = fcx.normalize_ty(span, ty); fcx.require_type_is_sized(ty, span, code); } fcx.select_all_obligations_or_error(); if fn_sig.is_some() { fcx.regionck_fn(id, body, span, wf_tys); } else { fcx.regionck_expr(body); } fcx.resolve_type_vars_in_body(body) }); // Consistency check our TypeckResults instance can hold all ItemLocalIds // it will need to hold. assert_eq!(typeck_results.hir_owner, id.owner); typeck_results } /// When `check_fn` is invoked on a generator (i.e., a body that /// includes yield), it returns back some information about the yield /// points. struct GeneratorTypes<'tcx> { /// Type of generator argument / values returned by `yield`. resume_ty: Ty<'tcx>, /// Type of value that is yielded. yield_ty: Ty<'tcx>, /// Types that are captured (see `GeneratorInterior` for more). interior: Ty<'tcx>, /// Indicates if the generator is movable or static (immovable). movability: hir::Movability, } /// Given a `DefId` for an opaque type in return position, find its parent item's return /// expressions. fn get_owner_return_paths<'tcx>( tcx: TyCtxt<'tcx>, def_id: LocalDefId, ) -> Option<(LocalDefId, ReturnsVisitor<'tcx>)> { let hir_id = tcx.hir().local_def_id_to_hir_id(def_id); let parent_id = tcx.hir().get_parent_item(hir_id); tcx.hir().find_by_def_id(parent_id).and_then(|node| node.body_id()).map(|body_id| { let body = tcx.hir().body(body_id); let mut visitor = ReturnsVisitor::default(); visitor.visit_body(body); (parent_id, visitor) }) } // Forbid defining intrinsics in Rust code, // as they must always be defined by the compiler. fn fn_maybe_err(tcx: TyCtxt<'_>, sp: Span, abi: Abi) { if let Abi::RustIntrinsic | Abi::PlatformIntrinsic = abi { tcx.sess.span_err(sp, "intrinsic must be in `extern \"rust-intrinsic\" { ... }` block"); } } fn maybe_check_static_with_link_section(tcx: TyCtxt<'_>, id: LocalDefId, span: Span) { // Only restricted on wasm target for now if !tcx.sess.target.is_like_wasm { return; } // If `#[link_section]` is missing, then nothing to verify let attrs = tcx.codegen_fn_attrs(id); if attrs.link_section.is_none() { return; } // For the wasm32 target statics with `#[link_section]` are placed into custom // sections of the final output file, but this isn't link custom sections of // other executable formats. Namely we can only embed a list of bytes, // nothing with pointers to anything else or relocations. If any relocation // show up, reject them here. // `#[link_section]` may contain arbitrary, or even undefined bytes, but it is // the consumer's responsibility to ensure all bytes that have been read // have defined values. if let Ok(alloc) = tcx.eval_static_initializer(id.to_def_id()) && alloc.inner().relocations().len() != 0 { let msg = "statics with a custom `#[link_section]` must be a \ simple list of bytes on the wasm target with no \ extra levels of indirection such as references"; tcx.sess.span_err(span, msg); } } fn report_forbidden_specialization( tcx: TyCtxt<'_>, impl_item: &hir::ImplItemRef, parent_impl: DefId, ) { let mut err = struct_span_err!( tcx.sess, impl_item.span, E0520, "`{}` specializes an item from a parent `impl`, but \ that item is not marked `default`", impl_item.ident ); err.span_label(impl_item.span, format!("cannot specialize default item `{}`", impl_item.ident)); match tcx.span_of_impl(parent_impl) { Ok(span) => { err.span_label(span, "parent `impl` is here"); err.note(&format!( "to specialize, `{}` in the parent `impl` must be marked `default`", impl_item.ident )); } Err(cname) => { err.note(&format!("parent implementation is in crate `{cname}`")); } } err.emit(); } fn missing_items_err( tcx: TyCtxt<'_>, impl_span: Span, missing_items: &[&ty::AssocItem], full_impl_span: Span, ) { let missing_items_msg = missing_items .iter() .map(|trait_item| trait_item.name.to_string()) .collect::<Vec<_>>() .join("`, `"); let mut err = struct_span_err!( tcx.sess, impl_span, E0046, "not all trait items implemented, missing: `{missing_items_msg}`", ); err.span_label(impl_span, format!("missing `{missing_items_msg}` in implementation")); // `Span` before impl block closing brace. let hi = full_impl_span.hi() - BytePos(1); // Point at the place right before the closing brace of the relevant `impl` to suggest // adding the associated item at the end of its body. let sugg_sp = full_impl_span.with_lo(hi).with_hi(hi); // Obtain the level of indentation ending in `sugg_sp`. let indentation = tcx.sess.source_map().span_to_margin(sugg_sp).unwrap_or(0); // Make the whitespace that will make the suggestion have the right indentation. let padding: String = " ".repeat(indentation); for trait_item in missing_items { let snippet = suggestion_signature(trait_item, tcx); let code = format!("{}{}\n{}", padding, snippet, padding); let msg = format!("implement the missing item: `{snippet}`"); let appl = Applicability::HasPlaceholders; if let Some(span) = tcx.hir().span_if_local(trait_item.def_id) { err.span_label(span, format!("`{}` from trait", trait_item.name)); err.tool_only_span_suggestion(sugg_sp, &msg, code, appl); } else { err.span_suggestion_hidden(sugg_sp, &msg, code, appl); } } err.emit(); } fn missing_items_must_implement_one_of_err( tcx: TyCtxt<'_>, impl_span: Span, missing_items: &[Ident], annotation_span: Option<Span>, ) { let missing_items_msg = missing_items.iter().map(Ident::to_string).collect::<Vec<_>>().join("`, `"); let mut err = struct_span_err!( tcx.sess, impl_span, E0046, "not all trait items implemented, missing one of: `{missing_items_msg}`", ); err.span_label(impl_span, format!("missing one of `{missing_items_msg}` in implementation")); if let Some(annotation_span) = annotation_span { err.span_note(annotation_span, "required because of this annotation"); } err.emit(); } /// Re-sugar `ty::GenericPredicates` in a way suitable to be used in structured suggestions. fn bounds_from_generic_predicates<'tcx>( tcx: TyCtxt<'tcx>, predicates: ty::GenericPredicates<'tcx>, ) -> (String, String) { let mut types: FxHashMap<Ty<'tcx>, Vec<DefId>> = FxHashMap::default(); let mut projections = vec![]; for (predicate, _) in predicates.predicates { debug!("predicate {:?}", predicate); let bound_predicate = predicate.kind(); match bound_predicate.skip_binder() { ty::PredicateKind::Trait(trait_predicate) => { let entry = types.entry(trait_predicate.self_ty()).or_default(); let def_id = trait_predicate.def_id(); if Some(def_id) != tcx.lang_items().sized_trait() { // Type params are `Sized` by default, do not add that restriction to the list // if it is a positive requirement. entry.push(trait_predicate.def_id()); } } ty::PredicateKind::Projection(projection_pred) => { projections.push(bound_predicate.rebind(projection_pred)); } _ => {} } } let generics = if types.is_empty() { "".to_string() } else { format!( "<{}>", types .keys() .filter_map(|t| match t.kind() { ty::Param(_) => Some(t.to_string()), // Avoid suggesting the following: // fn foo<T, <T as Trait>::Bar>(_: T) where T: Trait, <T as Trait>::Bar: Other {} _ => None, }) .collect::<Vec<_>>() .join(", ") ) }; let mut where_clauses = vec![]; for (ty, bounds) in types { where_clauses .extend(bounds.into_iter().map(|bound| format!("{}: {}", ty, tcx.def_path_str(bound)))); } for projection in &projections { let p = projection.skip_binder(); // FIXME: this is not currently supported syntax, we should be looking at the `types` and // insert the associated types where they correspond, but for now let's be "lazy" and // propose this instead of the following valid resugaring: // `T: Trait, Trait::Assoc = K` → `T: Trait<Assoc = K>` where_clauses.push(format!( "{} = {}", tcx.def_path_str(p.projection_ty.item_def_id), p.term, )); } let where_clauses = if where_clauses.is_empty() { String::new() } else { format!(" where {}", where_clauses.join(", ")) }; (generics, where_clauses) } /// Return placeholder code for the given function. fn fn_sig_suggestion<'tcx>( tcx: TyCtxt<'tcx>, sig: ty::FnSig<'tcx>, ident: Ident, predicates: ty::GenericPredicates<'tcx>, assoc: &ty::AssocItem, ) -> String { let args = sig .inputs() .iter() .enumerate() .map(|(i, ty)| { Some(match ty.kind() { ty::Param(_) if assoc.fn_has_self_parameter && i == 0 => "self".to_string(), ty::Ref(reg, ref_ty, mutability) if i == 0 => { let reg = format!("{reg} "); let reg = match &reg[..] { "'_ " | " " => "", reg => reg, }; if assoc.fn_has_self_parameter { match ref_ty.kind() { ty::Param(param) if param.name == kw::SelfUpper => { format!("&{}{}self", reg, mutability.prefix_str()) } _ => format!("self: {ty}"), } } else { format!("_: {ty}") } } _ => { if assoc.fn_has_self_parameter && i == 0 { format!("self: {ty}") } else { format!("_: {ty}") } } }) }) .chain(std::iter::once(if sig.c_variadic { Some("...".to_string()) } else { None })) .flatten() .collect::<Vec<String>>() .join(", "); let output = sig.output(); let output = if !output.is_unit() { format!(" -> {output}") } else { String::new() }; let unsafety = sig.unsafety.prefix_str(); let (generics, where_clauses) = bounds_from_generic_predicates(tcx, predicates); // FIXME: this is not entirely correct, as the lifetimes from borrowed params will // not be present in the `fn` definition, not will we account for renamed // lifetimes between the `impl` and the `trait`, but this should be good enough to // fill in a significant portion of the missing code, and other subsequent // suggestions can help the user fix the code. format!("{unsafety}fn {ident}{generics}({args}){output}{where_clauses} {{ todo!() }}") } /// Return placeholder code for the given associated item. /// Similar to `ty::AssocItem::suggestion`, but appropriate for use as the code snippet of a /// structured suggestion. fn suggestion_signature(assoc: &ty::AssocItem, tcx: TyCtxt<'_>) -> String { match assoc.kind { ty::AssocKind::Fn => { // We skip the binder here because the binder would deanonymize all // late-bound regions, and we don't want method signatures to show up // `as for<'r> fn(&'r MyType)`. Pretty-printing handles late-bound // regions just fine, showing `fn(&MyType)`. fn_sig_suggestion( tcx, tcx.fn_sig(assoc.def_id).skip_binder(), assoc.ident(tcx), tcx.predicates_of(assoc.def_id), assoc, ) } ty::AssocKind::Type => format!("type {} = Type;", assoc.name), ty::AssocKind::Const => { let ty = tcx.type_of(assoc.def_id); let val = expr::ty_kind_suggestion(ty).unwrap_or("value"); format!("const {}: {} = {};", assoc.name, ty, val) } } } /// Emit an error when encountering two or more variants in a transparent enum. fn bad_variant_count<'tcx>(tcx: TyCtxt<'tcx>, adt: ty::AdtDef<'tcx>, sp: Span, did: DefId) { let variant_spans: Vec<_> = adt .variants() .iter() .map(|variant| tcx.hir().span_if_local(variant.def_id).unwrap()) .collect(); let msg = format!("needs exactly one variant, but has {}", adt.variants().len(),); let mut err = struct_span_err!(tcx.sess, sp, E0731, "transparent enum {msg}"); err.span_label(sp, &msg); if let [start @ .., end] = &*variant_spans { for variant_span in start { err.span_label(*variant_span, ""); } err.span_label(*end, &format!("too many variants in `{}`", tcx.def_path_str(did))); } err.emit(); } /// Emit an error when encountering two or more non-zero-sized fields in a transparent /// enum. fn bad_non_zero_sized_fields<'tcx>( tcx: TyCtxt<'tcx>, adt: ty::AdtDef<'tcx>, field_count: usize, field_spans: impl Iterator<Item = Span>, sp: Span, ) { let msg = format!("needs at most one non-zero-sized field, but has {field_count}"); let mut err = struct_span_err!( tcx.sess, sp, E0690, "{}transparent {} {}", if adt.is_enum() { "the variant of a " } else { "" }, adt.descr(), msg, ); err.span_label(sp, &msg); for sp in field_spans { err.span_label(sp, "this field is non-zero-sized"); } err.emit(); } fn report_unexpected_variant_res(tcx: TyCtxt<'_>, res: Res, span: Span) { struct_span_err!( tcx.sess, span, E0533, "expected unit struct, unit variant or constant, found {}{}", res.descr(), tcx.sess .source_map() .span_to_snippet(span) .map_or_else(|_| String::new(), |s| format!(" `{s}`",)), ) .emit(); } /// Controls whether the arguments are tupled. This is used for the call /// operator. /// /// Tupling means that all call-side arguments are packed into a tuple and /// passed as a single parameter. For example, if tupling is enabled, this /// function: /// /// fn f(x: (isize, isize)) /// /// Can be called as: /// /// f(1, 2); /// /// Instead of: /// /// f((1, 2)); #[derive(Clone, Eq, PartialEq)] enum TupleArgumentsFlag { DontTupleArguments, TupleArguments, } /// A wrapper for `InferCtxt`'s `in_progress_typeck_results` field. #[derive(Copy, Clone)] struct MaybeInProgressTables<'a, 'tcx> { maybe_typeck_results: Option<&'a RefCell<ty::TypeckResults<'tcx>>>, } impl<'a, 'tcx> MaybeInProgressTables<'a, 'tcx> { fn borrow(self) -> Ref<'a, ty::TypeckResults<'tcx>> { match self.maybe_typeck_results { Some(typeck_results) => typeck_results.borrow(), None => bug!( "MaybeInProgressTables: inh/fcx.typeck_results.borrow() with no typeck results" ), } } fn borrow_mut(self) -> RefMut<'a, ty::TypeckResults<'tcx>> { match self.maybe_typeck_results { Some(typeck_results) => typeck_results.borrow_mut(), None => bug!( "MaybeInProgressTables: inh/fcx.typeck_results.borrow_mut() with no typeck results" ), } } } struct CheckItemTypesVisitor<'tcx> { tcx: TyCtxt<'tcx>, } impl<'tcx> ItemLikeVisitor<'tcx> for CheckItemTypesVisitor<'tcx> { fn visit_item(&mut self, i: &'tcx hir::Item<'tcx>) { check_item_type(self.tcx, i); } fn visit_trait_item(&mut self, _: &'tcx hir::TraitItem<'tcx>) {} fn visit_impl_item(&mut self, _: &'tcx hir::ImplItem<'tcx>) {} fn visit_foreign_item(&mut self, _: &'tcx hir::ForeignItem<'tcx>) {} } fn typeck_item_bodies(tcx: TyCtxt<'_>, (): ()) { tcx.hir().par_body_owners(|body_owner_def_id| tcx.ensure().typeck(body_owner_def_id)); } fn fatally_break_rust(sess: &Session) { let handler = sess.diagnostic(); handler.span_bug_no_panic( MultiSpan::new(), "It looks like you're trying to break rust; would you like some ICE?", ); handler.note_without_error("the compiler expectedly panicked. this is a feature."); handler.note_without_error( "we would appreciate a joke overview: \ https://github.com/rust-lang/rust/issues/43162#issuecomment-320764675", ); handler.note_without_error(&format!( "rustc {} running on {}", option_env!("CFG_VERSION").unwrap_or("unknown_version"), config::host_triple(), )); } fn potentially_plural_count(count: usize, word: &str) -> String { format!("{} {}{}", count, word, pluralize!(count)) } fn has_expected_num_generic_args<'tcx>( tcx: TyCtxt<'tcx>, trait_did: Option<DefId>, expected: usize, ) -> bool { trait_did.map_or(true, |trait_did| { let generics = tcx.generics_of(trait_did); generics.count() == expected + if generics.has_self { 1 } else { 0 } }) } /// Suggests calling the constructor of a tuple struct or enum variant /// /// * `snippet` - The snippet of code that references the constructor /// * `span` - The span of the snippet /// * `params` - The number of parameters the constructor accepts /// * `err` - A mutable diagnostic builder to add the suggestion to fn suggest_call_constructor<G: EmissionGuarantee>( span: Span, kind: CtorOf, params: usize, err: &mut DiagnosticBuilder<'_, G>, ) { // Note: tuple-structs don't have named fields, so just use placeholders let args = vec!["_"; params].join(", "); let applicable = if params > 0 { Applicability::HasPlaceholders } else { // When n = 0, it's an empty-tuple struct/enum variant // so we trivially know how to construct it Applicability::MachineApplicable }; let kind = match kind { CtorOf::Struct => "a struct", CtorOf::Variant => "an enum variant", }; err.span_label(span, &format!("this is the constructor of {kind}")); err.multipart_suggestion( "call the constructor", vec![(span.shrink_to_lo(), "(".to_string()), (span.shrink_to_hi(), format!(")({args})"))], applicable, ); }
{ let fallback = move || { let span = tcx.hir().span(tcx.hir().local_def_id_to_hir_id(def_id)); tcx.ty_error_with_message(span, "diagnostic only typeck table used") }; typeck_with_fallback(tcx, def_id, fallback) }
organizations.tsx
import prisma from 'utils/prisma'; import { TableOrganization, tableOrganizationArgs } from 'interfaces/admin'; import AdminIndex from 'components/admin/AdminIndex'; import AdminTable from 'components/admin/AdminTable'; import Layout from 'components/Layout'; import { GetServerSideProps } from 'next'; import getSession from 'utils/getSession'; type AdminOrgIndexProps = { orgs: TableOrganization[]; }; const AdminOrgIndex: React.FunctionComponent<AdminOrgIndexProps> = ({ orgs, }) => ( <Layout> <AdminIndex page="Organization" search="Look for an Organization" // TODO: Add button on click // eslint-disable-next-line @typescript-eslint/no-empty-function addButtonOnClick={() => {}} > <AdminTable data={orgs} pageType="organizations" /> </AdminIndex> </Layout> ); export default AdminOrgIndex; export const getServerSideProps: GetServerSideProps = async (context) => { try { const session = await getSession(context); if (session && session.user.role === 'admin') { const orgs = await prisma.organization.findMany({ where: { active: true, }, select: tableOrganizationArgs.select, }); return { props: { orgs }, }; } return { redirect: { permanent: false, destination: '/', }, }; } catch (err) { console.log(err); return { redirect: {
}; } };
permanent: false, destination: '/', },
messages.rs
pub fn hello_world()
{ println!("Hello, world! From `src/message.rs`"); }
sparse_cs.rs
use vector::BasicReadableVector; use vector::BasicWriteableVector; use vector::SparseVector; pub struct SparseCS { element: Vec<f32>, col_ind: Vec<usize>, size: usize } impl SparseCS { /// creates a new vector pub fn new(col: usize) -> SparseCS { let v = SparseCS{element: vec![], col_ind:vec![0], size: col}; return v; } /// resizes the vector pub fn set_size(&mut self, new_size: usize) { if self.size < new_size { self.size = new_size; return; } //TODO trim vector let index = self.col_ind.iter().position(|&idx| idx >= new_size).unwrap(); self.col_ind.truncate(index); self.element.truncate(index); } } impl BasicReadableVector for SparseCS { /// returns the vector's size fn get_size(&self) -> usize { return self.size; } /// returns the i-th element of the sparse vector fn get_element(&self, i: usize)-> f32 { if i >= self.get_size() { panic!("assignment out of bounds"); } if self.nnz() == 0 { return 0.0 } let index = match self.col_ind.binary_search(&i) { Err(_) => return 0.0, Ok(index) => index, }; let idx = self.col_ind[index]; return self.element[idx]; } } impl BasicWriteableVector for SparseCS { /// sets the i-th element of the sparse vector fn set_element(&mut self, i: usize, value: f32) { if i >= self.get_size() { panic!("assignment out of bounds"); }
if i >= self.element.len() { self.element.push(value); self.col_ind.push(i); return; } let index = match self.col_ind.binary_search(&i) { Err(_) => panic!(), Ok(index) => index, }; self.col_ind.insert(index, i); self.element.insert(index, value); } /// sets all elements to zero fn set_zero(&mut self) { self.element = vec![]; self.col_ind = vec![0]; } } impl SparseVector for SparseCS { /// returns the number of non-null vector elements fn nnz(&self) -> usize { return self.element.len(); } }
code.ts
// This plugin will open a modal to prompt the user to enter a number, and // it will then create that many rectangles on the screen. // This file holds the main code for the plugins. It has access to the *document*. // You can access browser APIs in the <script> tag inside "ui.html" which has a // full browser enviroment (see documentation). // This shows the HTML page in "ui.html". figma.showUI(__html__) // Calls to "parent.postMessage" from within the HTML page will trigger this // callback. The callback will be passed the "pluginMessage" property of the // posted message. const storageKey = 'VSCodeIconsSettingsData' const defaultDisplayType = 'display-type-tile' const defaultSymbolType = 'symbol-type-codicon' const defaultSettingsData = { clickAction: 'create', displayType: defaultDisplayType, symbolType: defaultSymbolType, windowHeight: 600, fontSize: 16 } var settingsData = JSON.parse(JSON.stringify(defaultSettingsData)); var textObjectLength = 0 function init(){ figma.clientStorage.getAsync(storageKey).then(result => { if (result){ Object.keys(defaultSettingsData).forEach((key) => { let data = JSON.parse(result) settingsData[key] = data[key] if(!settingsData[key]){ settingsData[key] = defaultSettingsData[key] } }); figma.clientStorage.setAsync(storageKey, JSON.stringify(settingsData)) } else { figma.clientStorage.setAsync(storageKey, JSON.stringify(defaultSettingsData)) settingsData = defaultSettingsData } figma.ui.resize(370, parseInt(settingsData.windowHeight)) figma.ui.postMessage({ settings : true, data : settingsData }) }) } function pasteFunction(nodeObjectsArray, copiedText, symbolType){ if (nodeObjectsArray.length){ for (let i = 0; i < nodeObjectsArray.length; i++) { if(nodeObjectsArray[i].type == 'TEXT'){ updateText(nodeObjectsArray[i], copiedText, symbolType) textObjectLength++ } } if (textObjectLength == 0){ // none createTextAndPaste(copiedText, symbolType) textObjectLength++ figma.notify('Copy & Create symbol glyph object!') }else{ figma.notify('Copy & Paste symbol glyph to selected text objects!') } }else{ createTextAndPaste(copiedText, symbolType) figma.notify('Create symbol glyph object!') } return textObjectLength } function createFunction(copiedText, symbolType){ // console.log('createFunction') createTextAndPaste(copiedText, symbolType) return textObjectLength } async function
(selectedItem, pasteValue, symbolType) { let selectedItemFontName = selectedItem.getRangeFontName(0, 1) let textStyleId = selectedItem.getRangeTextStyleId(0, 1) if (symbolType == "codicon-icons") { let tempFontName = { family: '', style: '' } tempFontName.family = 'codicon' tempFontName.style = "Regular" await figma.loadFontAsync({ family: tempFontName.family, style: tempFontName.style }) selectedItem.setRangeFontName(0, selectedItem.characters.length, tempFontName) await figma.loadFontAsync({ family: selectedItemFontName.family, style: selectedItemFontName.style }) } else if (symbolType == 'seti-icons') { let tempFontName = { family: '', style: '' } tempFontName.family = 'seti' tempFontName.style = "Regular" await figma.loadFontAsync({ family: tempFontName.family, style: tempFontName.style }) selectedItem.setRangeFontName(0, selectedItem.characters.length, tempFontName) await figma.loadFontAsync({ family: selectedItemFontName.family, style: selectedItemFontName.style }) } if(textStyleId){ selectedItem.setRangeTextStyleId(0, selectedItem.characters.length, textStyleId) }else{ selectedItem.setRangeFontSize(0, selectedItem.characters.length, selectedItem.getRangeFontSize(0, 1)) selectedItem.setRangeTextCase(0, selectedItem.characters.length, selectedItem.getRangeTextCase(0, 1)) selectedItem.setRangeTextDecoration(0, selectedItem.characters.length, selectedItem.getRangeTextDecoration(0, 1)) selectedItem.setRangeLetterSpacing(0, selectedItem.characters.length, selectedItem.getRangeLetterSpacing(0, 1)) selectedItem.setRangeLineHeight(0, selectedItem.characters.length, selectedItem.getRangeLineHeight(0, 1)) } if(selectedItem.getRangeFillStyleId(0, 1)){ selectedItem.setRangeFillStyleId(0, selectedItem.characters.length, selectedItem.getRangeFillStyleId(0, 1)) }else{ selectedItem.setRangeFills(0, selectedItem.characters.length, selectedItem.getRangeFills(0, 1)) } selectedItem.characters = pasteValue } async function createTextAndPaste(pasteValue, symbolType) { let tempFontName = {family: '', style: ''} if (symbolType == "codicon-icons") { tempFontName.family = 'codicon' tempFontName.style = "Regular" } else if (symbolType == 'seti-icons') { tempFontName.family = 'seti' tempFontName.style = "Regular" } await figma.loadFontAsync({ family: tempFontName.family, style: tempFontName.style }) const newTextNode = figma.createText() newTextNode.fontName = tempFontName newTextNode.fontSize = Number(settingsData.fontSize) newTextNode.characters = pasteValue newTextNode.x = figma.viewport.center.x - (newTextNode.width / 2) newTextNode.y = figma.viewport.center.y - (newTextNode.height / 2) figma.currentPage.appendChild(newTextNode) figma.currentPage.selection = [newTextNode] return newTextNode; } figma.ui.onmessage = message => { if (message.copied) { // console.log(settingsData.clickAction) if (settingsData.clickAction == 'copy'){ figma.notify('Copy symbol glyph to clipboard!') } if (settingsData.clickAction == 'paste'){ let num = pasteFunction(figma.currentPage.selection, message.copiedGlyph, message.symbolType) textObjectLength = 0 } if (settingsData.clickAction == 'create'){ // console.log(settingsData.clickAction) let num = createFunction(message.copiedGlyph, message.symbolType) textObjectLength = 0 figma.notify('Create symbol glyph object!') } }else if(message.updatedSettingsData){ if(settingsData.windowHeight != message.updatedSettingsData.windowHeight){ figma.ui.resize(370, parseInt(message.updatedSettingsData.windowHeight)) } settingsData = message.updatedSettingsData figma.clientStorage.setAsync(storageKey, JSON.stringify(message.updatedSettingsData)) } } init()
updateText
seeing.py
#!/usr/bin/env python """ Generic python script. """ __author__ = "Alex Drlica-Wagner" from collections import OrderedDict as odict import logging import copy import numpy as np import pandas as pd import dateutil.parser import ephem from obztak.utils import fileio from obztak.utils.date import datestring from obztak.utils.database import Database # These are nominal transformation values from Eric Neilsen # WAVE[x] = (lambda[x]/lambda[i])**0.2 WAVE = odict([ ( 'u' , 0.86603 ), # u (380nm) -> i (780nm) ( 'g' , 0.9067 ), # g (480nm) -> i (780nm) ( 'r' , 0.9609 ), # r (640nm) -> i (780nm) ( 'i' , 1.0 ), # i (780nm) -> i (780nm) ( 'z' , 1.036 ), # z (920nm) -> i (780nm) ( 'Y' , 1.0523 ), # Y (990nm) -> i (780nm) ('dimm', 1/1.0916 ), # dimm (500 nm)->i (780nm) ('VR' , 0.9551 ), # VR (620 nm)->i (780nm) ]) WAVE_DF = pd.DataFrame({'filter':WAVE.keys(),'trans':WAVE.values()}) DECAMINST = 0.5 # DECam instrumental contribution to the PSF [arcsec] DIMMINST = 0.0 # DIMM instrumental contribution to the PSF [arcsec] def convert(fwhm_1, band_1='dimm', airmass_1=1.0, inst_1=DIMMINST, band_2='i', airmass_2=1.0, inst_2=DECAMINST): """ Convert observed seeing value to another band and airmass. Parameters: ----------- fwhm_1 : input fwhm [arcsec] band_1 : input band ['g','r','i','z','Y','dimm'] airmass_1: input airmass inst_1 : instrumental contribution to the observed psf [arcsec] band_2 : output band ['g','r','i','z','Y','dimm'] airmass_2 : output airmass inst_2 : instrumental contribution to the output psf [arcsec] Returns: -------- fwhm_2 : output fwhm [arcsec] """ fwhm = np.sqrt(fwhm_1**2 - inst_1**2) if np.isscalar(band_1): wave_1 = WAVE[band_1] else: wave_1 = WAVE_DF.merge(pd.DataFrame({'filter':band_1}), on='filter').to_records()['trans'] if np.isscalar(band_2): wave_2 = WAVE[band_2] else: wave_2 = WAVE_DF.merge(pd.DataFrame({'filter':band_2}), on='filter').to_records()['trans'] fwhm_2 = fwhm * (wave_1/wave_2) * (airmass_2/airmass_1)**(0.6) return np.hypot(fwhm_2, inst_2) class Seeing(): """Class to manage seeing data. Seeign data is stored in two member variables: self.raw : the raw data before transformation self.data: seeing data transformed atmospheric i-band zenith The two values differ in that self.raw can have any source and includes the instrumental contribution. In contrast, self.data is the "atmospheric" i-band FWHM (arcsec). To get a prediction of the observed PSF, use `get_fwhm`. """ DTYPE = [('date','<M8[ns]'),('fwhm',float),('airmass',float),('filter','S4')] def __init__(self, date=None, db='fnal', filename=None): self.set_date(date) self.df = self.read_file(filename) self.db = 'db-'+db def set_date(self, date): if date is None: #NOOP (consistent with Tactician) return elif date == 'now': self.date = dateutil.parser.parse(datestring(ephem.now())) else: self.date = dateutil.parser.parse(date) def get_fwhm(self, timedelta='15m', band='i', airmass=1.0, inst=DECAMINST): """Calculate the predict PSF FWHM (arcsec). Parameters: ----------- date : date to estimate the psf (defualt: now) timedelta : time range to use to estimate the psf band : output band airmass : output airmass inst : output instrument contribution Returns: -------- fwhm : predicted fwhm (arcsec) """ timedelta = pd.Timedelta(timedelta) self.load_data(timedelta=max(3*timedelta,pd.Timedelta('1h'))) dt = pd.DatetimeIndex(self.data['date']) previous = slice(-1,None) # most recent exposure recent = (dt < self.date) & (dt > (self.date - timedelta)) ancient = (dt < (self.date - timedelta)) & (dt > (self.date - 2*timedelta)) # Nominal atmospheric psf i-band zenith fwhm = 0.9" xmu = np.log10(0.74833) # sqrt(0.9**2 - 0.5**2) if not len(self.data): # No data, use the mean and print a warning logging.warn("No fwhm data available; using DECam median") xpred = xmu elif np.any(recent) and np.any(ancient): # Weighted median of recent and ancient exposures logging.debug("Seeing from recent and ancient exposures") # Log of the observed atmospheric psf i-band zenith x = np.log10([np.median(self.data[recent]['fwhm']), np.median(self.data[ancient]['fwhm'])]) # Predicted log of the atmospheric psf # NB: These constants were derived for timedelta=5min # they may not hold for arbitrary time windows. xpred = xmu + 0.8 * (x[0] - xmu) + 0.14 * (x[1] - xmu) elif np.any(recent): # Median of the log of the observed atmospheric psf i-band zenith logging.debug("Seeing from recent exposures") xpred = np.log10(np.median(self.data[recent]['fwhm'])) else: # Log of the i-band zenith fwhm from the previous exposure logging.debug("Seeing from previous exposure") xpred = np.log10(np.median(self.data[previous]['fwhm'])) fwhm_pred = convert(10**xpred, band_1='i' , airmass_1=1.0 , inst_1=0.0, band_2=band, airmass_2=airmass, inst_2=inst) #import pdb; pdb.set_trace() return fwhm_pred class DimmSeeing(Seeing): """Estimate seeing from the DIMM.""" @classmethod def
(cls, filename): if filename is None: return None df = pd.read_csv(filename,names=['date','fwhm'], parse_dates=['date'],index_col=['date']) return df def get_data(self, date=None, timedelta='30m'): self.set_date(date) tmax = self.date tmin = self.date - pd.Timedelta(timedelta) if self.df is None: # Don't want to create the DB each time? db = Database(self.db) db.connect() query =""" select date, dimm2see as fwhm from exposure where date > '%s' and date < '%s' and dimm2see is not NULL """%(tmin, tmax) logging.debug(query) raw = db.query2rec(query) else: sel = (self.df.index > tmin) & (self.df.index < tmax) raw = self.df[sel].to_records() return raw def load_data(self, date=None, timedelta='30m'): raw = self.get_data(date, timedelta) # Save the raw dimm values self.raw = np.recarray(len(raw),dtype=self.DTYPE) self.raw['date'] = raw['date'] self.raw['fwhm'] = raw['fwhm'] self.raw['airmass'] = 1.0 self.raw['filter'] = 'dimm' # Convert to i-band zenith self.data = copy.deepcopy(self.raw) self.data['filter'] = 'i' self.data['airmass'] = 1.0 kwargs = dict(band_1='dimm', inst_1=DIMMINST, airmass_1=self.raw['airmass']) kwargs.update(band_2='i', inst_2=0.0 , airmass_2=self.data['airmass']) self.data['fwhm'] = convert(self.raw['fwhm'],**kwargs) return self.data class QcSeeing(Seeing): """Estimate seeing from the DECam QC values.""" @classmethod def read_file(cls, filename): if filename is None: return None df = pd.read_csv(filename,names=['date','fwhm','airmass','filter'], parse_dates=['date'],index_col=['date']) return df def get_data(self, date=None, timedelta='30m'): self.set_date(date) tmax = self.date tmin = self.date - pd.Timedelta(timedelta) if self.df is None: # Don't want to create the DB each time? try: db = Database() db.connect() query =""" select date, qc_fwhm as fwhm, airmass, filter from exposure where date > '%s' and date < '%s' --and filter != 'VR' and qc_fwhm is not NULL and qc_fwhm is not NULL and qc_fwhm > 0 """%(tmin, tmax) logging.debug(query) raw = db.query2rec(query) except Exception as e: logging.warn("Couldn't connect to database:\n%s"%str(e)) dtype=[('date', '<M8[ns]'), ('fwhm', '<f8'), ('airmass', '<f8'), ('filter', 'S4')] raw = np.recarray(0,dtype=dtype) else: sel = (self.df.index > tmin) & (self.df.index < tmax) raw = self.df[sel].to_records() return raw def load_data(self, date=None, timedelta='30m'): raw = self.get_data(date,timedelta) # Save the raw dimm values self.raw = np.recarray(len(raw),dtype=self.DTYPE) self.raw['date'] = raw['date'] self.raw['fwhm'] = raw['fwhm'] self.raw['airmass'] = raw['airmass'] self.raw['filter'] = raw['filter'] # Convert to i-band zenith self.data = copy.deepcopy(self.raw) self.data['filter'] = 'i' self.data['airmass'] = 1.0 kwargs = dict(band_1=self.raw['filter'], inst_1=DECAMINST, airmass_1=self.raw['airmass']) kwargs.update(band_2='i', inst_2=0.0 , airmass_2=self.data['airmass']) self.data['fwhm'] = convert(self.raw['fwhm'],**kwargs) return self.data if __name__ == "__main__": import argparse parser = argparse.ArgumentParser(description=__doc__) args = parser.parse_args()
read_file
train_deploy-checkpoint.py
import argparse import json import logging import os import sys import numpy as np import pandas as pd import torch import torch.distributed as dist import torch.utils.data import torch.utils.data.distributed from torch.utils.data import DataLoader, RandomSampler, TensorDataset from transformers import AdamW, BertForSequenceClassification, BertTokenizer import glob logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) logger.addHandler(logging.StreamHandler(sys.stdout)) MAX_LEN = 64 # this is the max length of the sentence print("Loading BERT tokenizer...") tokenizer = BertTokenizer.from_pretrained("bert-base-uncased", do_lower_case=True) def flat_accuracy(preds, labels): pred_flat = np.argmax(preds, axis=1).flatten() labels_flat = labels.flatten() return np.sum(pred_flat == labels_flat) / len(labels_flat) def
(batch_size, training_dir, is_distributed): logger.info("Get train data loader") dataset = pd.concat(map(pd.read_csv, glob.glob(os.path.join(training_dir, "*.csv")))) #current dir and all csvs sentences = dataset.sentence.values labels = dataset.label.values input_ids = [] for sent in sentences: encoded_sent = tokenizer.encode(sent, add_special_tokens=True) input_ids.append(encoded_sent) # pad shorter sentences input_ids_padded = [] for i in input_ids: while len(i) < MAX_LEN: i.append(0) input_ids_padded.append(i) input_ids = input_ids_padded # mask; 0: added, 1: otherwise attention_masks = [] # For each sentence... for sent in input_ids: att_mask = [int(token_id > 0) for token_id in sent] attention_masks.append(att_mask) # convert to PyTorch data types. train_inputs = torch.tensor(input_ids) train_labels = torch.tensor(labels) train_masks = torch.tensor(attention_masks) train_data = TensorDataset(train_inputs, train_masks, train_labels) if is_distributed: train_sampler = torch.utils.data.distributed.DistributedSampler(dataset) else: train_sampler = RandomSampler(train_data) train_dataloader = DataLoader(train_data, sampler=train_sampler, batch_size=batch_size) return train_dataloader def _get_test_data_loader(test_batch_size, training_dir): dataset = pd.concat(map(pd.read_csv, glob.glob(os.path.join(training_dir, "*.csv")))) #current dir and all csvs sentences = dataset.sentence.values labels = dataset.label.values input_ids = [] for sent in sentences: encoded_sent = tokenizer.encode(sent, add_special_tokens=True) input_ids.append(encoded_sent) # pad shorter sentences input_ids_padded = [] for i in input_ids: while len(i) < MAX_LEN: i.append(0) input_ids_padded.append(i) input_ids = input_ids_padded # mask; 0: added, 1: otherwise attention_masks = [] # For each sentence... for sent in input_ids: att_mask = [int(token_id > 0) for token_id in sent] attention_masks.append(att_mask) # convert to PyTorch data types. train_inputs = torch.tensor(input_ids) train_labels = torch.tensor(labels) train_masks = torch.tensor(attention_masks) train_data = TensorDataset(train_inputs, train_masks, train_labels) train_sampler = RandomSampler(train_data) train_dataloader = DataLoader(train_data, sampler=train_sampler, batch_size=test_batch_size) return train_dataloader def train(args): is_distributed = len(args.hosts) > 1 and args.backend is not None logger.debug("Distributed training - %s", is_distributed) use_cuda = args.num_gpus > 0 logger.debug("Number of gpus available - %d", args.num_gpus) device = torch.device("cuda" if use_cuda else "cpu") if is_distributed: # Initialize the distributed environment. world_size = len(args.hosts) os.environ["WORLD_SIZE"] = str(world_size) host_rank = args.hosts.index(args.current_host) os.environ["RANK"] = str(host_rank) dist.init_process_group(backend=args.backend, rank=host_rank, world_size=world_size) logger.info( "Initialized the distributed environment: '%s' backend on %d nodes. " "Current host rank is %d. Number of gpus: %d", args.backend, dist.get_world_size(), dist.get_rank(), args.num_gpus ) # set the seed for generating random numbers torch.manual_seed(args.seed) if use_cuda: torch.cuda.manual_seed(args.seed) train_loader = _get_train_data_loader(args.batch_size, args.data_dir, is_distributed) test_loader = _get_test_data_loader(args.test_batch_size, args.test) logger.debug( "Processes {}/{} ({:.0f}%) of train data".format( len(train_loader.sampler), len(train_loader.dataset), 100.0 * len(train_loader.sampler) / len(train_loader.dataset), ) ) logger.debug( "Processes {}/{} ({:.0f}%) of test data".format( len(test_loader.sampler), len(test_loader.dataset), 100.0 * len(test_loader.sampler) / len(test_loader.dataset), ) ) logger.info("Starting BertForSequenceClassification\n") model = BertForSequenceClassification.from_pretrained( "bert-base-uncased", # Use the 12-layer BERT model, with an uncased vocab. num_labels=args.num_labels, # The number of output labels--2 for binary classification. output_attentions=False, # Whether the model returns attentions weights. output_hidden_states=False, # Whether the model returns all hidden-states. ) model = model.to(device) if is_distributed and use_cuda: # multi-machine multi-gpu case model = torch.nn.parallel.DistributedDataParallel(model) else: # single-machine multi-gpu case or single-machine or multi-machine cpu case model = torch.nn.DataParallel(model) optimizer = AdamW( model.parameters(), lr=2e-5, # args.learning_rate - default is 5e-5, our notebook had 2e-5 eps=1e-8, # args.adam_epsilon - default is 1e-8. ) logger.info("End of defining BertForSequenceClassification\n") for epoch in range(1, args.epochs + 1): total_loss = 0 model.train() for step, batch in enumerate(train_loader): b_input_ids = batch[0].to(device) b_input_mask = batch[1].to(device) b_labels = batch[2].to(device) model.zero_grad() outputs = model(b_input_ids, token_type_ids=None, attention_mask=b_input_mask, labels=b_labels) loss = outputs[0] total_loss += loss.item() loss.backward() torch.nn.utils.clip_grad_norm_(model.parameters(), 1.0) # modified based on their gradients, the learning rate, etc. optimizer.step() if step % args.log_interval == 0: logger.info( "Train Epoch: {} [{}/{} ({:.0f}%)] Loss: {:.6f}".format( epoch, step * len(batch[0]), len(train_loader.sampler), 100.0 * step / len(train_loader), loss.item(), ) ) logger.info("Average training loss: %f\n", total_loss / len(train_loader)) test(model, test_loader, device) logger.info("Saving tuned model.") model_2_save = model.module if hasattr(model, "module") else model model_2_save.save_pretrained(save_directory=args.model_dir) def test(model, test_loader, device): model.eval() _, eval_accuracy = 0, 0 with torch.no_grad(): for batch in test_loader: b_input_ids = batch[0].to(device) b_input_mask = batch[1].to(device) b_labels = batch[2].to(device) outputs = model(b_input_ids, token_type_ids=None, attention_mask=b_input_mask) logits = outputs[0] logits = logits.detach().cpu().numpy() label_ids = b_labels.to("cpu").numpy() tmp_eval_accuracy = flat_accuracy(logits, label_ids) eval_accuracy += tmp_eval_accuracy logger.info("Test set: Accuracy: %f\n", tmp_eval_accuracy) def model_fn(model_dir): device = torch.device("cuda" if torch.cuda.is_available() else "cpu") print("================ objects in model_dir ===================") print(os.listdir(model_dir)) model = BertForSequenceClassification.from_pretrained(model_dir) print("================ model loaded ===========================") return model.to(device) def input_fn(request_body, request_content_type): """An input_fn that loads a pickled tensor""" if request_content_type == "application/json": data = json.loads(request_body) print("================ input sentences ===============") print(data) if isinstance(data, str): data = [data] elif isinstance(data, list) and len(data) > 0 and isinstance(data[0], str): pass else: raise ValueError("Unsupported input type. Input type can be a string or an non-empty list. \ I got {}".format(data)) #encoded = [tokenizer.encode(x, add_special_tokens=True) for x in data] #encoded = tokenizer(data, add_special_tokens=True) # for backward compatibility use the following way to encode # https://github.com/huggingface/transformers/issues/5580 input_ids = [tokenizer.encode(x, add_special_tokens=True) for x in data] print("================ encoded sentences ==============") print(input_ids) # pad shorter sentence padded = torch.zeros(len(input_ids), MAX_LEN) for i, p in enumerate(input_ids): padded[i, :len(p)] = torch.tensor(p) # create mask mask = (padded != 0) print("================= padded input and attention mask ================") print(padded, '\n', mask) return padded.long(), mask.long() raise ValueError("Unsupported content type: {}".format(request_content_type)) def predict_fn(input_data, model): device = torch.device("cuda" if torch.cuda.is_available() else "cpu") model.to(device) model.eval() input_id, input_mask = input_data input_id = input_id.to(device) input_mask = input_mask.to(device) print("============== encoded data =================") print(input_id, input_mask) with torch.no_grad(): y = model(input_id, attention_mask=input_mask)[0] print("=============== inference result =================") print(y) return y if __name__ == "__main__": parser = argparse.ArgumentParser() # Data and model checkpoints directories parser.add_argument( "--num_labels", type=int, default=2, metavar="N", help="input batch size for training (default: 64)" ) parser.add_argument( "--batch-size", type=int, default=64, metavar="N", help="input batch size for training (default: 64)" ) parser.add_argument( "--test-batch-size", type=int, default=1000, metavar="N", help="input batch size for testing (default: 1000)" ) parser.add_argument("--epochs", type=int, default=2, metavar="N", help="number of epochs to train (default: 10)") parser.add_argument("--lr", type=float, default=0.01, metavar="LR", help="learning rate (default: 0.01)") parser.add_argument("--momentum", type=float, default=0.5, metavar="M", help="SGD momentum (default: 0.5)") parser.add_argument("--seed", type=int, default=1, metavar="S", help="random seed (default: 1)") parser.add_argument( "--log-interval", type=int, default=50, metavar="N", help="how many batches to wait before logging training status", ) parser.add_argument( "--backend", type=str, default=None, help="backend for distributed training (tcp, gloo on cpu and gloo, nccl on gpu)", ) # Container environment parser.add_argument("--hosts", type=list, default=json.loads(os.environ["SM_HOSTS"])) parser.add_argument("--current-host", type=str, default=os.environ["SM_CURRENT_HOST"]) parser.add_argument("--model-dir", type=str, default=os.environ["SM_MODEL_DIR"]) parser.add_argument("--data-dir", type=str, default=os.environ["SM_CHANNEL_TRAIN"]) parser.add_argument("--test", type=str, default=os.environ["SM_CHANNEL_TESTING"]) parser.add_argument("--num-gpus", type=int, default=os.environ["SM_NUM_GPUS"]) train(parser.parse_args())
_get_train_data_loader
create-credit-card.contract.ts
import { Injectable } from '@nestjs/common'; import { Flunt } from 'src/utils/flunt'; import { Contract } from '../contract'; import { CreditCard } from '../../models/credit-card.model'; @Injectable() export class CreateCreditCardContract implements Contract {
errors: any[]; validate(model: CreditCard): boolean { const flunt = new Flunt(); flunt.hasMinLen(model.holder, 5, 'Nome no cartão inválido'); flunt.isFixedLen(model.number, 16, 'Número do cartão inválido'); flunt.isFixedLen( model.expiration, 4, 'Data de expiração do cartão inválida', ); this.errors = flunt.errors; return flunt.isValid(); } }
toggle_switch.rs
//! Source code example of how to create your own widget. //! This is meant to be read as a tutorial, hence the plethora of comments. /// iOS-style toggle switch: /// /// ``` text /// _____________ /// / /.....\ /// | |.......| /// \_______\_____/ /// ``` pub fn toggle(ui: &mut egui::Ui, on: &mut bool) -> egui::Response { // Widget code can be broken up in four steps: // 1. Decide a size for the widget // 2. Allocate space for it // 3. Handle interactions with the widget (if any) // 4. Paint the widget // 1. Deciding widget size: // You can query the `ui` how much space is available, // but in this example we have a fixed size widget of the default size for a button: let desired_size = ui.style().spacing.interact_size; // 2. Allocating space: // This is where we get a region of the screen assigned. // We also tell the Ui to sense clicks in the allocated region. let (rect, response) = ui.allocate_exact_size(desired_size, egui::Sense::click()); // 3. Interact: Time to check for clicks!. if response.clicked { *on = !*on; } // 4. Paint! // First let's ask for a simple animation from Egui. // Egui keeps track of changes in the boolean associated with the id and // returns an animated value in the 0-1 range for how much "on" we are. let how_on = ui.ctx().animate_bool(response.id, *on); // We will follow the current style by asking // "how should something that is being interacted with be painted?". // This will, for instance, give us different colors when the widget is hovered or clicked. let visuals = ui.style().interact(&response); let off_bg_fill = egui::Rgba::TRANSPARENT; let on_bg_fill = egui::Rgba::from_rgb(0.0, 0.5, 0.25); let bg_fill = egui::lerp(off_bg_fill..=on_bg_fill, how_on); // All coordinates are in absolute screen coordinates so we use `rect` to place the elements. let radius = 0.5 * rect.height(); ui.painter().rect(rect, radius, bg_fill, visuals.bg_stroke); // Paint the circle, animating it from left to right with `how_on`: let circle_x = egui::lerp((rect.left() + radius)..=(rect.right() - radius), how_on); let center = egui::pos2(circle_x, rect.center().y); ui.painter() .circle(center, 0.75 * radius, visuals.fg_fill, visuals.fg_stroke); // All done! Return the interaction response so the user can check what happened // (hovered, clicked, ...) and maybe show a tooltip: response } /// Here is the same code again, but a bit more compact: #[allow(dead_code)] fn toggle_compact(ui: &mut egui::Ui, on: &mut bool) -> egui::Response
pub fn demo(ui: &mut egui::Ui, on: &mut bool) { ui.horizontal_wrapped_for_text(egui::TextStyle::Button, |ui| { ui.label("It's easy to create your own widgets!"); ui.label("This toggle switch is just one function and 15 lines of code:"); toggle(ui, on).on_hover_text("Click to toggle"); ui.add(crate::__egui_github_link_file!()); }); }
{ let desired_size = ui.style().spacing.interact_size; let (rect, response) = ui.allocate_exact_size(desired_size, egui::Sense::click()); *on ^= response.clicked; // toggle if clicked let how_on = ui.ctx().animate_bool(response.id, *on); let visuals = ui.style().interact(&response); let off_bg_fill = egui::Rgba::TRANSPARENT; let on_bg_fill = egui::Rgba::from_rgb(0.0, 0.5, 0.25); let bg_fill = egui::lerp(off_bg_fill..=on_bg_fill, how_on); let radius = 0.5 * rect.height(); ui.painter().rect(rect, radius, bg_fill, visuals.bg_stroke); let circle_x = egui::lerp((rect.left() + radius)..=(rect.right() - radius), how_on); let center = egui::pos2(circle_x, rect.center().y); ui.painter() .circle(center, 0.75 * radius, visuals.fg_fill, visuals.fg_stroke); response }
do_flask_mail.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- from flask import Flask from flask_mail import Mail from flask_mail import Message import os #测试成功,部分参数作用不明 app = Flask(__name__) app.config['MAIL_SERVER'] = 'smtp.163.com' app.config['MAIL_PORT'] = 25 app.config['MAIL_USE_TLS'] = True app.config['MAIL_USERNAME'] = '[email protected]' app.config['MAIL_PASSWORD'] = '******' app.config['FLASKY_MAIL_SUBJECT_PREFIX'] = '[Flasky]' #邮件主题 #app.config['FLASKY_MAIL_SENDER'] = '[email protected]' #app.config['FLASKY_ADMIN'] = '[email protected]' mail = Mail(app) """ app.config['MAIL_USERNAME'] = os.environ.get('MAIL_USERNAME') app.config['MAIL_PASSWORD'] = os.environ.get('MAIL_PASSWORD') app.config['FLASKY_MAIL_SUBJECT_PREFIX'] = '[Flasky]' app.config['FLASKY_MAIL_SENDER'] = 'Flasky Admin <[email protected]>' app.config['FLASKY_ADMIN'] = os.environ.get('FLASKY_ADMIN') """ @app.route("/") def index(): #Message(主题,发件人,收件人) msg = Message("Hello", sender=
n(debug=True) """ msg = Message("Hello", sender="[email protected]", recipients=["[email protected]"]) msg.body = "testing" msg.html = "<b>testing</b>" mail.send(msg) if __name__ == '__main__': mail.send(msg) pip install --no-deps lamson chardet flask-mail set [email protected] set MAIL_PASSWORD=****** set [email protected] >>> from flask.ext.mail import Message >>> from hello import mail >>> msg = Message('test subject', sender='[email protected]',recipients=['[email protected]']) >>> msg.body = 'text body' >>> msg.html = '<b>HTML</b> body' >>> with app.app_context(): ... mail.send(msg) """
"[email protected]", recipients=["[email protected]"]) msg.body = "testing" msg.html = "<b>testing</b>" mail.send(msg) return '<h1>Hello World!</h1>' if __name__ == '__main__': app.ru
pattern.py
import dateutil.parser import datetime import logging import re from kestrel.utils import dedup_dicts from kestrel.semantics import get_entity_table from kestrel.syntax.paramstix import parse_extended_stix_pattern from kestrel.exceptions import ( InvalidAttribute, UnsupportedStixSyntax, KestrelInternalError, ) from firepit.exceptions import InvalidAttr _logger = logging.getLogger(__name__) def or_patterns(patterns): bodies = [] time_range = [] for pattern in patterns: if pattern: pieces = pattern.split() if len(pieces) > 4 and pieces[-4] == "START" and pieces[-2] == "STOP": time_range.append((pieces[-3], pieces[-1])) bodies.append("(" + " ".join(pieces[:-4]) + ")") else: bodies.append(pattern) if bodies: if time_range: start = min([t[0] for t in time_range]) end = max([t[1] for t in time_range]) final_pattern = ( "(" + " OR ".join(bodies) + ")" + " START " + start + " STOP " + end ) else: final_pattern = " OR ".join(bodies) _logger.debug(f"or pattern merged: {final_pattern}") else: final_pattern = None _logger.info(f"all None patterns input into or_patterns()") return final_pattern def build_pattern( raw_pattern_body, time_range, start_offset, end_offset, symtable, store ): """Dereference variables in a STIX pattern and output the unfolded pattern.""" references = parse_extended_stix_pattern(raw_pattern_body) pattern_body = raw_pattern_body _logger.debug(f"building pattern for: {raw_pattern_body}") if references: _logger.debug(f"references found: {list(references.keys())}") var_attr_to_vals_str = _dereference_multiple_variables( store, symtable, references ) for var_attr, vals_str in var_attr_to_vals_str.items(): pattern_body = _replace_ref_with_op(pattern_body, var_attr, vals_str) _logger.debug(f'pattern body dereferred: "{pattern_body}"') if pattern_body and not time_range: try: ref_var_time_ranges = [ _get_variable_time_range(store, symtable, var_name) for var_name in references.keys() ] start = min([t[0] for t in ref_var_time_ranges]) end = max([t[1] for t in ref_var_time_ranges]) start_adj = start + datetime.timedelta(seconds=start_offset) end_adj = end + datetime.timedelta(seconds=end_offset) start_stix = start_adj.strftime("%Y-%m-%dT%H:%M:%S.000Z") stop_stix = end_adj.strftime("%Y-%m-%dT%H:%M:%S.000Z") time_range = (start_stix, stop_stix) _logger.debug(f"pattern time range computed: {time_range}") except InvalidAttribute: time_range = None _logger.warning( f"pattern time range searching failed on variable {var_name}" ) if pattern_body: if time_range: pattern = ( f"({pattern_body}) START t'{time_range[0]}' STOP t'{time_range[1]}'" ) else: pattern = f"{pattern_body}" _logger.debug(f'final pattern assembled: "{pattern}"') else: pattern = None _logger.info(f"empty pattern assembled") return pattern def build_pattern_from_ids(return_type, ids): if ids: return "[" + return_type + ":id IN (" + ", ".join(map(_type_value, ids)) + ")]" else: return None def _dereference_multiple_variables(store, symtable, references): return { var + "." + attr: "(" + ", ".join(map(_type_value, vals)) + ")" for var, attrs in references.items() for attr, vals in _dereference_variable(store, symtable, var, attrs).items() } def _dereference_variable(store, symtable, var_name, attributes): attr_line = ",".join(attributes) _logger.debug(f'deref "{var_name}" with attributes "{attr_line}"') var_entity_table = get_entity_table(var_name, symtable) try: store_return = store.lookup(var_entity_table, attr_line) except InvalidAttr as e: _logger.warning(f"cannot deref {attr_line}. Invalid attribute in firepit.") raise InvalidAttribute(e.message) attr_to_values = {k: [] for k in attributes} for row in store_return: for k, v in row.items(): if v and v not in attr_to_values[k]:
for k, v in attr_to_values.items(): if not v: raise InvalidAttribute(var_name + "." + k) _logger.debug(f"deref results: {str(attr_to_values)}") return attr_to_values def _get_variable_time_range(store, symtable, var_name): """ Returns: start (datetime.datetime): the time any entities is observed first. end (datetime.datetime): the time any entities is observed last. """ time_attr_line = ",".join(["first_observed", "last_observed"]) var_entity_table = get_entity_table(var_name, symtable) try: store_return = store.lookup(var_entity_table, time_attr_line) except InvalidAttr as e: raise InvalidAttribute(e.message) life_span = dedup_dicts(store_return) start = min([dateutil.parser.isoparse(e["first_observed"]) for e in life_span]) end = max([dateutil.parser.isoparse(e["last_observed"]) for e in life_span]) return start, end def _type_value(value): if isinstance(value, str): return f"'{value}'" elif isinstance(value, int): return str(value) elif isinstance(value, float): # pandas dataframe and sqlite may save integers as floats return str(round(value)) else: return str(value) def _replace_ref_with_op(pattern, var_attr, vals_str): # avoid adhesive parans/ops that prevent correct splitting pattern = re.sub(r"([=><\[\]])", r" \1 ", pattern) pieces = pattern.split() try: ref_index = pieces.index(var_attr) except ValueError: err_msg = f'cannot find "{var_attr}" when assembling pattern "{pattern}"' _logger.error(err_msg) raise KestrelInternalError(err_msg) if pieces[ref_index - 1] == "=": pieces[ref_index - 1] = "IN" pieces[ref_index] = vals_str else: raise UnsupportedStixSyntax( 'only "=" is supported before referred variable in parameterized STIX' ) return " ".join(pieces)
attr_to_values[k].append(v)
sys_validation_workflow.rs
//! The workflow and queue consumer for sys validation #![allow(deprecated)] use super::*; use crate::conductor::api::CellConductorApiT; use crate::core::queue_consumer::OneshotWriter; use crate::core::queue_consumer::TriggerSender; use crate::core::queue_consumer::WorkComplete; use crate::core::sys_validate::*; use crate::core::validation::*; use error::WorkflowError; use error::WorkflowResult; use fallible_iterator::FallibleIterator; use holo_hash::DhtOpHash; use holochain_cascade::Cascade; use holochain_cascade::DbPair; use holochain_cascade::DbPairMut; use holochain_lmdb::buffer::BufferedStore; use holochain_lmdb::buffer::KvBufFresh; use holochain_lmdb::db::INTEGRATION_LIMBO; use holochain_lmdb::fresh_reader; use holochain_lmdb::prelude::*; use holochain_p2p::HolochainP2pCell; use holochain_p2p::HolochainP2pCellT; use holochain_state::prelude::*; use holochain_types::prelude::*; use holochain_zome_types::Entry; use holochain_zome_types::ValidationStatus; use std::collections::BinaryHeap; use std::convert::TryFrom; use std::convert::TryInto; use tracing::*; use produce_dht_ops_workflow::dht_op_light::light_to_op; use types::Outcome; pub mod types; #[cfg(test)] mod chain_test; #[cfg(test)] mod test_ideas; #[cfg(test)] mod tests; #[instrument(skip( workspace, writer, trigger_app_validation, sys_validation_trigger, network, conductor_api ))] pub async fn sys_validation_workflow( mut workspace: SysValidationWorkspace, writer: OneshotWriter, trigger_app_validation: &mut TriggerSender, sys_validation_trigger: TriggerSender, network: HolochainP2pCell, conductor_api: impl CellConductorApiT, ) -> WorkflowResult<WorkComplete> { let complete = sys_validation_workflow_inner( &mut workspace, network, conductor_api, sys_validation_trigger, ) .await?; // --- END OF WORKFLOW, BEGIN FINISHER BOILERPLATE --- // commit the workspace writer.with_writer(|writer| Ok(workspace.flush_to_txn_ref(writer)?))?; // trigger other workflows trigger_app_validation.trigger(); Ok(complete) } async fn sys_validation_workflow_inner( workspace: &mut SysValidationWorkspace, network: HolochainP2pCell, conductor_api: impl CellConductorApiT, sys_validation_trigger: TriggerSender, ) -> WorkflowResult<WorkComplete> { let env = workspace.validation_limbo.env().clone(); // Drain all the ops let sorted_ops: BinaryHeap<OrderedOp<ValidationLimboValue>> = fresh_reader!(env, |r| { let validation_limbo = &mut workspace.validation_limbo; let element_pending = &workspace.element_pending; let sorted_ops: Result<BinaryHeap<OrderedOp<ValidationLimboValue>>, WorkflowError> = validation_limbo .drain_iter_filter(&r, |(_, vlv)| { match vlv.status { // We only want pending or awaiting sys dependency ops ValidationLimboStatus::Pending | ValidationLimboStatus::AwaitingSysDeps(_) => Ok(true), ValidationLimboStatus::SysValidated | ValidationLimboStatus::AwaitingAppDeps(_) => Ok(false), } })? .map_err(WorkflowError::from) .map(|vlv| { // Sort the ops into a min-heap let op = light_to_op(vlv.op.clone(), element_pending)?; let hash = DhtOpHash::with_data_sync(&op); let order = DhtOpOrder::from(&op); let v = OrderedOp { order, hash, op, value: vlv, }; Ok(v) }) .iterator() .collect(); sorted_ops })?; // Process each op for so in sorted_ops.into_sorted_vec() { let OrderedOp { hash: op_hash, op, value: mut vlv, .. } = so; // Create an incoming ops sender for any dependencies we find // that we are meant to be holding but aren't. // If we are not holding them they will be added to our incoming ops. let incoming_dht_ops_sender = IncomingDhtOpSender::new(workspace.env.clone().into(), sys_validation_trigger.clone()); let outcome = validate_op( &op, workspace, network.clone(), &conductor_api, Some(incoming_dht_ops_sender), ) .await?; match outcome { Outcome::Accepted => { vlv.status = ValidationLimboStatus::SysValidated; workspace.put_val_limbo(op_hash, vlv)?; } Outcome::SkipAppValidation => { let iv = IntegrationLimboValue { op: vlv.op, validation_status: ValidationStatus::Valid, }; workspace.put_int_limbo(op_hash, iv)?; } Outcome::AwaitingOpDep(missing_dep) => { // TODO: Try and get this dependency to add to limbo // // I actually can't see how we can do this because there's no // way to get an DhtOpHash without either having the op or the full // header. We have neither that's why where here. // // We need to be holding the dependency because // we were meant to get a StoreElement or StoreEntry or // RegisterAgentActivity or RegisterAddLink. vlv.status = ValidationLimboStatus::AwaitingSysDeps(missing_dep); workspace.put_val_limbo(op_hash, vlv)?; } Outcome::MissingDhtDep => { vlv.status = ValidationLimboStatus::Pending; workspace.put_val_limbo(op_hash, vlv)?; } Outcome::Rejected => { let iv = IntegrationLimboValue { op: vlv.op, validation_status: ValidationStatus::Rejected, }; workspace.put_int_limbo(op_hash, iv)?; } } } Ok(WorkComplete::Complete) } async fn validate_op( op: &DhtOp, workspace: &mut SysValidationWorkspace, network: HolochainP2pCell, conductor_api: &impl CellConductorApiT, incoming_dht_ops_sender: Option<IncomingDhtOpSender>, ) -> WorkflowResult<Outcome> { match validate_op_inner( op, workspace, network, conductor_api, incoming_dht_ops_sender, ) .await { Ok(_) => match op { // TODO: Check strict mode where store element // is also run through app validation DhtOp::RegisterAgentActivity(_, _) => Ok(Outcome::SkipAppValidation), _ => Ok(Outcome::Accepted), }, // Handle the errors that result in pending or awaiting deps Err(SysValidationError::ValidationOutcome(e)) => { warn!( agent = %which_agent(conductor_api.cell_id().agent_pubkey()), msg = "DhtOp has failed system validation", ?op, error = ?e, error_msg = %e ); Ok(handle_failed(e)) } Err(e) => Err(e.into()), } } /// For now errors result in an outcome but in the future /// we might find it useful to include the reason something /// was rejected etc. /// This is why the errors contain data but is currently unread. fn handle_failed(error: ValidationOutcome) -> Outcome
async fn validate_op_inner( op: &DhtOp, workspace: &mut SysValidationWorkspace, network: HolochainP2pCell, conductor_api: &impl CellConductorApiT, incoming_dht_ops_sender: Option<IncomingDhtOpSender>, ) -> SysValidationResult<()> { match op { DhtOp::StoreElement(_, header, entry) => { store_element(header, workspace, network.clone()).await?; if let Some(entry) = entry { store_entry( (header) .try_into() .map_err(|_| ValidationOutcome::NotNewEntry(header.clone()))?, entry.as_ref(), conductor_api, workspace, network, ) .await?; } Ok(()) } DhtOp::StoreEntry(_, header, entry) => { store_entry( (header).into(), entry.as_ref(), conductor_api, workspace, network.clone(), ) .await?; let header = header.clone().into(); store_element(&header, workspace, network).await?; Ok(()) } DhtOp::RegisterAgentActivity(_, header) => { register_agent_activity(header, workspace, network.clone(), incoming_dht_ops_sender) .await?; store_element(header, workspace, network).await?; Ok(()) } DhtOp::RegisterUpdatedContent(_, header, entry) => { register_updated_content(header, workspace, network.clone(), incoming_dht_ops_sender) .await?; if let Some(entry) = entry { store_entry( NewEntryHeaderRef::Update(header), entry.as_ref(), conductor_api, workspace, network.clone(), ) .await?; } Ok(()) } DhtOp::RegisterUpdatedElement(_, header, entry) => { register_updated_element(header, workspace, network.clone(), incoming_dht_ops_sender) .await?; if let Some(entry) = entry { store_entry( NewEntryHeaderRef::Update(header), entry.as_ref(), conductor_api, workspace, network.clone(), ) .await?; } Ok(()) } DhtOp::RegisterDeletedBy(_, header) => { register_deleted_by(header, workspace, network, incoming_dht_ops_sender).await?; Ok(()) } DhtOp::RegisterDeletedEntryHeader(_, header) => { register_deleted_entry_header(header, workspace, network, incoming_dht_ops_sender) .await?; Ok(()) } DhtOp::RegisterAddLink(_, header) => { register_add_link(header, workspace, network, incoming_dht_ops_sender).await?; Ok(()) } DhtOp::RegisterRemoveLink(_, header) => { register_delete_link(header, workspace, network, incoming_dht_ops_sender).await?; Ok(()) } } } #[instrument(skip(element, call_zome_workspace, network, conductor_api))] /// Direct system validation call that takes /// an Element instead of an op. /// Does not require holding dependencies. /// Will not await dependencies and instead returns /// that outcome immediately. pub async fn sys_validate_element( element: &Element, call_zome_workspace: &mut CallZomeWorkspace, network: HolochainP2pCell, conductor_api: &impl CellConductorApiT, ) -> SysValidationOutcome<()> { trace!(?element); // Create a SysValidationWorkspace with the scratches from the CallZomeWorkspace let mut workspace = SysValidationWorkspace::try_from(&*call_zome_workspace)?; let result = match sys_validate_element_inner(element, &mut workspace, network, conductor_api).await { // Validation succeeded Ok(_) => Ok(()), // Validation failed so exit with that outcome Err(SysValidationError::ValidationOutcome(validation_outcome)) => { error!(msg = "Direct validation failed", ?element); validation_outcome.into_outcome() } // An error occurred so return it Err(e) => Err(OutcomeOrError::Err(e)), }; // Set the call zome workspace to the updated // cache from the sys validation workspace call_zome_workspace.meta_cache = workspace.meta_cache; call_zome_workspace.element_cache = workspace.element_cache; result } async fn sys_validate_element_inner( element: &Element, workspace: &mut SysValidationWorkspace, network: HolochainP2pCell, conductor_api: &impl CellConductorApiT, ) -> SysValidationResult<()> { let signature = element.signature(); let header = element.header(); let entry = element.entry().as_option(); let incoming_dht_ops_sender = None; if !counterfeit_check(signature, header).await? { return Err(ValidationOutcome::Counterfeit(signature.clone(), header.clone()).into()); } store_element(header, workspace, network.clone()).await?; if let Some((entry, EntryVisibility::Public)) = &entry.and_then(|e| header.entry_type().map(|et| (e, et.visibility()))) { store_entry( (header) .try_into() .map_err(|_| ValidationOutcome::NotNewEntry(header.clone()))?, entry, conductor_api, workspace, network.clone(), ) .await?; } match header { Header::Update(header) => { register_updated_content(header, workspace, network, incoming_dht_ops_sender).await?; } Header::Delete(header) => { register_deleted_entry_header(header, workspace, network, incoming_dht_ops_sender) .await?; } Header::CreateLink(header) => { register_add_link(header, workspace, network, incoming_dht_ops_sender).await?; } Header::DeleteLink(header) => { register_delete_link(header, workspace, network, incoming_dht_ops_sender).await?; } _ => {} } Ok(()) } /// Check if the op has valid signature and author. /// Ops that fail this check should be dropped. pub async fn counterfeit_check( signature: &Signature, header: &Header, ) -> SysValidationResult<bool> { Ok(verify_header_signature(&signature, &header).await? && author_key_is_valid(header.author()).await?) } async fn register_agent_activity( header: &Header, workspace: &mut SysValidationWorkspace, network: HolochainP2pCell, incoming_dht_ops_sender: Option<IncomingDhtOpSender>, ) -> SysValidationResult<()> { // Get data ready to validate let prev_header_hash = header.prev_header(); // Checks check_prev_header(&header)?; check_valid_if_dna(&header, &workspace.meta_vault).await?; if let Some(prev_header_hash) = prev_header_hash { check_and_hold_register_agent_activity( prev_header_hash, workspace, network, incoming_dht_ops_sender, |_| Ok(()), ) .await?; } check_chain_rollback(&header, &workspace).await?; Ok(()) } async fn store_element( header: &Header, workspace: &mut SysValidationWorkspace, network: HolochainP2pCell, ) -> SysValidationResult<()> { // Get data ready to validate let prev_header_hash = header.prev_header(); // Checks check_prev_header(header)?; if let Some(prev_header_hash) = prev_header_hash { let mut cascade = workspace.full_cascade(network); let prev_header = cascade .retrieve_header(prev_header_hash.clone(), Default::default()) .await? .ok_or_else(|| ValidationOutcome::DepMissingFromDht(prev_header_hash.clone().into()))?; check_prev_timestamp(&header, prev_header.header())?; check_prev_seq(&header, prev_header.header())?; } Ok(()) } async fn store_entry( header: NewEntryHeaderRef<'_>, entry: &Entry, conductor_api: &impl CellConductorApiT, workspace: &mut SysValidationWorkspace, network: HolochainP2pCell, ) -> SysValidationResult<()> { // Get data ready to validate let entry_type = header.entry_type(); let entry_hash = header.entry_hash(); // Checks check_entry_type(entry_type, entry)?; if let EntryType::App(app_entry_type) = entry_type { let entry_def = check_app_entry_type(app_entry_type, conductor_api).await?; check_not_private(&entry_def)?; } check_entry_hash(entry_hash, entry).await?; check_entry_size(entry)?; // Additional checks if this is an Update if let NewEntryHeaderRef::Update(entry_update) = header { let original_header_address = &entry_update.original_header_address; let mut cascade = workspace.full_cascade(network); let original_header = cascade .retrieve_header(original_header_address.clone(), Default::default()) .await? .ok_or_else(|| { ValidationOutcome::DepMissingFromDht(original_header_address.clone().into()) })?; update_check(entry_update, original_header.header())?; } Ok(()) } async fn register_updated_content( entry_update: &Update, workspace: &mut SysValidationWorkspace, network: HolochainP2pCell, incoming_dht_ops_sender: Option<IncomingDhtOpSender>, ) -> SysValidationResult<()> { // Get data ready to validate let original_header_address = &entry_update.original_header_address; let dependency_check = |original_element: &Element| update_check(entry_update, original_element.header()); check_and_hold_store_entry( original_header_address, workspace, network, incoming_dht_ops_sender, dependency_check, ) .await?; Ok(()) } async fn register_updated_element( entry_update: &Update, workspace: &mut SysValidationWorkspace, network: HolochainP2pCell, incoming_dht_ops_sender: Option<IncomingDhtOpSender>, ) -> SysValidationResult<()> { // Get data ready to validate let original_header_address = &entry_update.original_header_address; let dependency_check = |original_element: &Element| update_check(entry_update, original_element.header()); check_and_hold_store_element( original_header_address, workspace, network, incoming_dht_ops_sender, dependency_check, ) .await?; Ok(()) } async fn register_deleted_by( element_delete: &Delete, workspace: &mut SysValidationWorkspace, network: HolochainP2pCell, incoming_dht_ops_sender: Option<IncomingDhtOpSender>, ) -> SysValidationResult<()> { // Get data ready to validate let removed_header_address = &element_delete.deletes_address; // Checks let dependency_check = |removed_header: &Element| check_new_entry_header(removed_header.header()); check_and_hold_store_element( removed_header_address, workspace, network, incoming_dht_ops_sender, dependency_check, ) .await?; Ok(()) } async fn register_deleted_entry_header( element_delete: &Delete, workspace: &mut SysValidationWorkspace, network: HolochainP2pCell, incoming_dht_ops_sender: Option<IncomingDhtOpSender>, ) -> SysValidationResult<()> { // Get data ready to validate let removed_header_address = &element_delete.deletes_address; // Checks let dependency_check = |removed_header: &Element| check_new_entry_header(removed_header.header()); check_and_hold_store_entry( removed_header_address, workspace, network, incoming_dht_ops_sender, dependency_check, ) .await?; Ok(()) } async fn register_add_link( link_add: &CreateLink, workspace: &mut SysValidationWorkspace, network: HolochainP2pCell, incoming_dht_ops_sender: Option<IncomingDhtOpSender>, ) -> SysValidationResult<()> { // Get data ready to validate let base_entry_address = &link_add.base_address; let target_entry_address = &link_add.target_address; // Checks check_and_hold_any_store_entry( base_entry_address, workspace, network.clone(), incoming_dht_ops_sender, |_| Ok(()), ) .await?; let mut cascade = workspace.full_cascade(network); cascade .retrieve_entry(target_entry_address.clone(), Default::default()) .await? .ok_or_else(|| ValidationOutcome::DepMissingFromDht(target_entry_address.clone().into()))?; check_tag_size(&link_add.tag)?; Ok(()) } async fn register_delete_link( link_remove: &DeleteLink, workspace: &mut SysValidationWorkspace, network: HolochainP2pCell, incoming_dht_ops_sender: Option<IncomingDhtOpSender>, ) -> SysValidationResult<()> { // Get data ready to validate let link_add_address = &link_remove.link_add_address; // Checks check_and_hold_register_add_link( link_add_address, workspace, network, incoming_dht_ops_sender, |_| Ok(()), ) .await?; Ok(()) } fn update_check(entry_update: &Update, original_header: &Header) -> SysValidationResult<()> { check_new_entry_header(original_header)?; let original_header: NewEntryHeaderRef = original_header .try_into() .expect("This can't fail due to the above check_new_entry_header"); check_update_reference(entry_update, &original_header)?; Ok(()) } pub struct SysValidationWorkspace { pub integration_limbo: IntegrationLimboStore, pub validation_limbo: ValidationLimboStore, /// Integrated data pub element_vault: ElementBuf, pub meta_vault: MetadataBuf, /// Data pending validation pub element_pending: ElementBuf<PendingPrefix>, pub meta_pending: MetadataBuf<PendingPrefix>, /// Read only rejected store for finding dependency data pub element_rejected: ElementBuf<RejectedPrefix>, pub meta_rejected: MetadataBuf<RejectedPrefix>, // Read only authored store for finding dependency data pub element_authored: ElementBuf<AuthoredPrefix>, pub meta_authored: MetadataBuf<AuthoredPrefix>, /// Cached data pub element_cache: ElementBuf, pub meta_cache: MetadataBuf, pub env: EnvironmentRead, } impl<'a> SysValidationWorkspace { pub fn cascade<Network: HolochainP2pCellT + Clone + Send + 'static>( &'a mut self, network: Network, ) -> Cascade<'a, Network> { Cascade::new( self.validation_limbo.env().clone(), &self.element_authored, &self.meta_authored, &self.element_vault, &self.meta_vault, &self.element_rejected, &self.meta_rejected, &mut self.element_cache, &mut self.meta_cache, network, ) } } impl SysValidationWorkspace { pub fn new(env: EnvironmentRead) -> WorkspaceResult<Self> { let db = env.get_db(&*INTEGRATION_LIMBO)?; let integration_limbo = KvBufFresh::new(env.clone(), db); let validation_limbo = ValidationLimboStore::new(env.clone())?; let element_vault = ElementBuf::vault(env.clone(), false)?; let meta_vault = MetadataBuf::vault(env.clone())?; let element_cache = ElementBuf::cache(env.clone())?; let meta_cache = MetadataBuf::cache(env.clone())?; let element_pending = ElementBuf::pending(env.clone())?; let meta_pending = MetadataBuf::pending(env.clone())?; // READ ONLY let element_authored = ElementBuf::authored(env.clone(), false)?; let meta_authored = MetadataBuf::authored(env.clone())?; let element_rejected = ElementBuf::rejected(env.clone())?; let meta_rejected = MetadataBuf::rejected(env.clone())?; Ok(Self { integration_limbo, validation_limbo, element_vault, meta_vault, element_pending, meta_pending, element_rejected, meta_rejected, element_authored, meta_authored, element_cache, meta_cache, env, }) } fn put_val_limbo( &mut self, hash: DhtOpHash, mut vlv: ValidationLimboValue, ) -> WorkflowResult<()> { vlv.last_try = Some(timestamp::now()); vlv.num_tries += 1; self.validation_limbo.put(hash, vlv)?; Ok(()) } #[tracing::instrument(skip(self, hash))] fn put_int_limbo(&mut self, hash: DhtOpHash, iv: IntegrationLimboValue) -> WorkflowResult<()> { self.integration_limbo.put(hash, iv)?; Ok(()) } pub fn network_only_cascade<Network: HolochainP2pCellT + Clone + Send + 'static>( &mut self, network: Network, ) -> Cascade<'_, Network> { let cache_data = DbPairMut { element: &mut self.element_cache, meta: &mut self.meta_cache, }; Cascade::empty() .with_network(network) .with_cache(cache_data) } /// Create a cascade with local data only pub fn local_cascade(&mut self) -> Cascade<'_> { let integrated_data = DbPair { element: &self.element_vault, meta: &self.meta_vault, }; let authored_data = DbPair { element: &self.element_authored, meta: &self.meta_authored, }; let pending_data = DbPair { element: &self.element_pending, meta: &self.meta_pending, }; let rejected_data = DbPair { element: &self.element_rejected, meta: &self.meta_rejected, }; let cache_data = DbPairMut { element: &mut self.element_cache, meta: &mut self.meta_cache, }; Cascade::empty() .with_integrated(integrated_data) .with_authored(authored_data) .with_pending(pending_data) .with_cache(cache_data) .with_rejected(rejected_data) } /// Get a cascade over all local databases and the network pub fn full_cascade<Network: HolochainP2pCellT + Clone>( &mut self, network: Network, ) -> Cascade<'_, Network> { self.local_cascade().with_network(network) } } impl Workspace for SysValidationWorkspace { fn flush_to_txn_ref(&mut self, writer: &mut Writer) -> WorkspaceResult<()> { self.validation_limbo.0.flush_to_txn_ref(writer)?; self.integration_limbo.flush_to_txn_ref(writer)?; // Flush for cascade self.element_cache.flush_to_txn_ref(writer)?; self.meta_cache.flush_to_txn_ref(writer)?; self.element_pending.flush_to_txn_ref(writer)?; self.meta_pending.flush_to_txn_ref(writer)?; Ok(()) } } /// Create a new SysValidationWorkspace with the scratches from the CallZomeWorkspace impl TryFrom<&CallZomeWorkspace> for SysValidationWorkspace { type Error = WorkspaceError; fn try_from(call_zome: &CallZomeWorkspace) -> Result<Self, Self::Error> { let CallZomeWorkspace { source_chain, meta_authored, element_integrated, meta_integrated, element_rejected, meta_rejected, element_cache, meta_cache, } = call_zome; let mut sys_val = Self::new(call_zome.env().clone())?; sys_val.element_authored = source_chain.elements().into(); sys_val.meta_authored = meta_authored.into(); sys_val.element_vault = element_integrated.into(); sys_val.meta_vault = meta_integrated.into(); sys_val.element_rejected = element_rejected.into(); sys_val.meta_rejected = meta_rejected.into(); sys_val.element_cache = element_cache.into(); sys_val.meta_cache = meta_cache.into(); Ok(sys_val) } }
{ use Outcome::*; match error { ValidationOutcome::Counterfeit(_, _) => { unreachable!("Counterfeit ops are dropped before sys validation") } ValidationOutcome::DepMissingFromDht(_) => MissingDhtDep, ValidationOutcome::EntryDefId(_) => Rejected, ValidationOutcome::EntryHash => Rejected, ValidationOutcome::EntryTooLarge(_, _) => Rejected, ValidationOutcome::EntryType => Rejected, ValidationOutcome::EntryVisibility(_) => Rejected, ValidationOutcome::TagTooLarge(_, _) => Rejected, ValidationOutcome::NotCreateLink(_) => Rejected, ValidationOutcome::NotNewEntry(_) => Rejected, ValidationOutcome::NotHoldingDep(dep) => AwaitingOpDep(dep), ValidationOutcome::PrevHeaderError(PrevHeaderError::MissingMeta(dep)) => { AwaitingOpDep(dep.into()) } ValidationOutcome::PrevHeaderError(_) => Rejected, ValidationOutcome::PrivateEntry => Rejected, ValidationOutcome::UpdateTypeMismatch(_, _) => Rejected, ValidationOutcome::VerifySignature(_, _) => Rejected, ValidationOutcome::ZomeId(_) => Rejected, } }
package.py
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Seqan(CMakePackage): """SeqAn is an open source C++ library of efficient algorithms and data
Our library applies a unique generic design that guarantees high performance, generality, extensibility, and integration with other libraries. SeqAn is easy to use and simplifies the development of new software tools with a minimal loss of performance""" homepage = "https://www.seqan.de" url = "https://github.com/seqan/seqan/archive/seqan-v2.4.0.tar.gz" version('2.4.0', sha256='d7084d17729214003e84818e0280a16f223c8f1c6a30eeef040c27e0c0047bd7') depends_on('[email protected]:', type='build') depends_on('[email protected]:', type='build') depends_on('py-nose', type='build') depends_on('py-sphinx', type='build') depends_on('boost', type=('build', 'link')) depends_on('zlib', type=('build', 'link')) depends_on('bzip2', type=('build', 'link')) conflicts('%intel@:16.0.4') conflicts('%gcc@:4.9.4') conflicts('%llvm@:3.5.1')
structures for the analysis of sequences with the focus on biological data.
btp_top_level_import285_all_of.py
# coding: utf-8 """ Onshape REST API The Onshape REST API consumed by all clients. # noqa: E501 The version of the OpenAPI document: 1.113 Contact: [email protected] Generated by: https://openapi-generator.tech """ from __future__ import absolute_import import re # noqa: F401 import sys # noqa: F401 import six # noqa: F401 import nulltype # noqa: F401 from onshape_client.oas.model_utils import ( # noqa: F401 ModelComposed, ModelNormal, ModelSimple, date, datetime, file_type, int, none_type, str, validate_get_composed_info, ) try: from onshape_client.oas.models import btp_identifier8 except ImportError: btp_identifier8 = sys.modules["onshape_client.oas.models.btp_identifier8"] try: from onshape_client.oas.models import btp_module_id235 except ImportError: btp_module_id235 = sys.modules["onshape_client.oas.models.btp_module_id235"] try: from onshape_client.oas.models import btp_space10 except ImportError: btp_space10 = sys.modules["onshape_client.oas.models.btp_space10"] class BTPTopLevelImport285AllOf(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. Attributes: allowed_values (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict with a capitalized key describing the allowed value and an allowed value. These dicts store the allowed enum values. attribute_map (dict): The key is attribute name and the value is json key in definition. discriminator_value_class_map (dict): A dict to go from the discriminator variable value to the discriminator class name. validations (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict that stores validations for max_length, min_length, max_items, min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, inclusive_minimum, and regex. additional_properties_type (tuple): A tuple of classes accepted as additional properties values. """ allowed_values = {} validations = {} additional_properties_type = None @staticmethod def openapi_types():
@staticmethod def discriminator(): return None attribute_map = { "bt_type": "btType", # noqa: E501 "combined_namespace_path_and_version": "combinedNamespacePathAndVersion", # noqa: E501 "import_microversion": "importMicroversion", # noqa: E501 "module_id": "moduleId", # noqa: E501 "namespace": "namespace", # noqa: E501 "namespace_string": "namespaceString", # noqa: E501 "space_before_import": "spaceBeforeImport", # noqa: E501 } @staticmethod def _composed_schemas(): return None required_properties = set( [ "_data_store", "_check_type", "_from_server", "_path_to_item", "_configuration", ] ) def __init__( self, _check_type=True, _from_server=False, _path_to_item=(), _configuration=None, **kwargs ): # noqa: E501 """btp_top_level_import285_all_of.BTPTopLevelImport285AllOf - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. Defaults to True _path_to_item (tuple/list): This is a list of keys or values to drill down to the model in received_data when deserializing a response _from_server (bool): True if the data is from the server False if the data is from the client (default) _configuration (Configuration): the instance to use when deserializing a file_type parameter. If passed, type conversion is attempted If omitted no type conversion is done. bt_type (str): [optional] # noqa: E501 combined_namespace_path_and_version (str): [optional] # noqa: E501 import_microversion (str): [optional] # noqa: E501 module_id (btp_module_id235.BTPModuleId235): [optional] # noqa: E501 namespace ([btp_identifier8.BTPIdentifier8]): [optional] # noqa: E501 namespace_string (str): [optional] # noqa: E501 space_before_import (btp_space10.BTPSpace10): [optional] # noqa: E501 """ self._data_store = {} self._check_type = _check_type self._from_server = _from_server self._path_to_item = _path_to_item self._configuration = _configuration for var_name, var_value in six.iteritems(kwargs): if ( var_name not in self.attribute_map and self._configuration is not None and self._configuration.discard_unknown_keys and self.additional_properties_type is None ): # discard variable. continue setattr(self, var_name, var_value)
""" This must be a class method so a model may have properties that are of type self, this ensures that we don't create a cyclic import Returns openapi_types (dict): The key is attribute name and the value is attribute type. """ return { "bt_type": (str,), # noqa: E501 "combined_namespace_path_and_version": (str,), # noqa: E501 "import_microversion": (str,), # noqa: E501 "module_id": (btp_module_id235.BTPModuleId235,), # noqa: E501 "namespace": ([btp_identifier8.BTPIdentifier8],), # noqa: E501 "namespace_string": (str,), # noqa: E501 "space_before_import": (btp_space10.BTPSpace10,), # noqa: E501 }
values.py
# flake8: noqa from cereal import car from selfdrive.car import dbc_dict from selfdrive.config import Conversions as CV Ecu = car.CarParams.Ecu MIN_ACC_SPEED = 19. * CV.MPH_TO_MS PEDAL_HYST_GAP = 3. * CV.MPH_TO_MS PEDAL_SCALE = 3.0 class CarControllerParams: ACCEL_HYST_GAP = 0.06 # don't change accel command for small oscilalitons within this value ACCEL_MAX = 1.5 # m/s2, lower than allowed 2.0 m/s2 for tuning reasons ACCEL_MIN = -3.5 # m/s2 STEER_MAX = 1500 STEER_DELTA_UP = 10 # 1.5s time to peak torque STEER_DELTA_DOWN = 25 # always lower than 45 otherwise the Rav4 faults (Prius seems ok with 50) STEER_ERROR_MAX = 350 # max delta between torque cmd and torque motor class
: PRIUS = "TOYOTA PRIUS 2017" PRIUS_TSS2 = "TOYOTA PRIUS TSS2 2021" RAV4H = "TOYOTA RAV4 HYBRID 2017" RAV4 = "TOYOTA RAV4 2017" COROLLA = "TOYOTA COROLLA 2017" LEXUS_RX = "LEXUS RX 2016" LEXUS_RXH = "LEXUS RX HYBRID 2017" LEXUS_RX_TSS2 = "LEXUS RX 2020" LEXUS_RXH_TSS2 = "LEXUS RX HYBRID 2020" CHR = "TOYOTA C-HR 2018" CHRH = "TOYOTA C-HR HYBRID 2018" CAMRY = "TOYOTA CAMRY 2018" CAMRYH = "TOYOTA CAMRY HYBRID 2018" CAMRY_TSS2 = "TOYOTA CAMRY 2021" # TSS 2.5 CAMRYH_TSS2 = "TOYOTA CAMRY HYBRID 2021" HIGHLANDER = "TOYOTA HIGHLANDER 2017" HIGHLANDER_TSS2 = "TOYOTA HIGHLANDER 2020" HIGHLANDERH = "TOYOTA HIGHLANDER HYBRID 2018" HIGHLANDERH_TSS2 = "TOYOTA HIGHLANDER HYBRID 2020" AVALON = "TOYOTA AVALON 2016" AVALON_2019 = "TOYOTA AVALON 2019" AVALONH_2019 = "TOYOTA AVALON HYBRID 2019" RAV4_TSS2 = "TOYOTA RAV4 2019" COROLLA_TSS2 = "TOYOTA COROLLA TSS2 2019" # LSS2 Lexus UX Hybrid is same as a TSS2 Corolla Hybrid COROLLAH_TSS2 = "TOYOTA COROLLA HYBRID TSS2 2019" LEXUS_ES_TSS2 = "LEXUS ES 2019" LEXUS_ESH_TSS2 = "LEXUS ES HYBRID 2019" LEXUS_ESH = "LEXUS ES HYBRID 2018" SIENNA = "TOYOTA SIENNA 2018" LEXUS_IS = "LEXUS IS 2018" LEXUS_CTH = "LEXUS CT HYBRID 2018" RAV4H_TSS2 = "TOYOTA RAV4 HYBRID 2019" LEXUS_NXH = "LEXUS NX HYBRID 2018" LEXUS_NX = "LEXUS NX 2018" LEXUS_NX_TSS2 = "LEXUS NX 2020" MIRAI = "TOYOTA MIRAI 2021" # TSS 2.5 ALPHARD_TSS2 = "TOYOTA ALPHARD 2020" # (addr, cars, bus, 1/freq*100, vl) STATIC_DSU_MSGS = [ (0x128, (CAR.PRIUS, CAR.RAV4H, CAR.LEXUS_RXH, CAR.LEXUS_NXH, CAR.LEXUS_NX, CAR.RAV4, CAR.COROLLA, CAR.AVALON), 1, 3, b'\xf4\x01\x90\x83\x00\x37'), (0x128, (CAR.HIGHLANDER, CAR.HIGHLANDERH, CAR.SIENNA, CAR.LEXUS_CTH, CAR.LEXUS_ESH), 1, 3, b'\x03\x00\x20\x00\x00\x52'), (0x141, (CAR.PRIUS, CAR.RAV4H, CAR.LEXUS_RXH, CAR.LEXUS_NXH, CAR.LEXUS_NX, CAR.RAV4, CAR.COROLLA, CAR.HIGHLANDER, CAR.HIGHLANDERH, CAR.AVALON, CAR.SIENNA, CAR.LEXUS_CTH, CAR.LEXUS_ESH, CAR.LEXUS_RX), 1, 2, b'\x00\x00\x00\x46'), (0x160, (CAR.PRIUS, CAR.RAV4H, CAR.LEXUS_RXH, CAR.LEXUS_NXH, CAR.LEXUS_NX, CAR.RAV4, CAR.COROLLA, CAR.HIGHLANDER, CAR.HIGHLANDERH, CAR.AVALON, CAR.SIENNA, CAR.LEXUS_CTH, CAR.LEXUS_ESH, CAR.LEXUS_RX), 1, 7, b'\x00\x00\x08\x12\x01\x31\x9c\x51'), (0x161, (CAR.PRIUS, CAR.RAV4H, CAR.LEXUS_RXH, CAR.LEXUS_NXH, CAR.LEXUS_NX, CAR.RAV4, CAR.COROLLA, CAR.AVALON, CAR.LEXUS_RX), 1, 7, b'\x00\x1e\x00\x00\x00\x80\x07'), (0X161, (CAR.HIGHLANDERH, CAR.HIGHLANDER, CAR.SIENNA, CAR.LEXUS_CTH, CAR.LEXUS_ESH), 1, 7, b'\x00\x1e\x00\xd4\x00\x00\x5b'), (0x283, (CAR.PRIUS, CAR.RAV4H, CAR.LEXUS_RXH, CAR.LEXUS_NXH, CAR.LEXUS_NX, CAR.RAV4, CAR.COROLLA, CAR.HIGHLANDER, CAR.HIGHLANDERH, CAR.AVALON, CAR.SIENNA, CAR.LEXUS_CTH, CAR.LEXUS_ESH, CAR.LEXUS_RX), 0, 3, b'\x00\x00\x00\x00\x00\x00\x8c'), (0x2E6, (CAR.PRIUS, CAR.RAV4H, CAR.LEXUS_RXH), 0, 3, b'\xff\xf8\x00\x08\x7f\xe0\x00\x4e'), (0x2E7, (CAR.PRIUS, CAR.RAV4H, CAR.LEXUS_RXH), 0, 3, b'\xa8\x9c\x31\x9c\x00\x00\x00\x02'), (0x33E, (CAR.PRIUS, CAR.RAV4H, CAR.LEXUS_RXH), 0, 20, b'\x0f\xff\x26\x40\x00\x1f\x00'), (0x344, (CAR.PRIUS, CAR.RAV4H, CAR.LEXUS_RXH, CAR.LEXUS_NXH, CAR.LEXUS_NX, CAR.RAV4, CAR.COROLLA, CAR.HIGHLANDER, CAR.HIGHLANDERH, CAR.AVALON, CAR.SIENNA, CAR.LEXUS_CTH, CAR.LEXUS_ESH, CAR.LEXUS_RX), 0, 5, b'\x00\x00\x01\x00\x00\x00\x00\x50'), (0x365, (CAR.PRIUS, CAR.LEXUS_RXH, CAR.LEXUS_NXH, CAR.LEXUS_NX, CAR.HIGHLANDERH), 0, 20, b'\x00\x00\x00\x80\x03\x00\x08'), (0x365, (CAR.RAV4, CAR.RAV4H, CAR.COROLLA, CAR.HIGHLANDER, CAR.AVALON, CAR.SIENNA, CAR.LEXUS_CTH, CAR.LEXUS_ESH, CAR.LEXUS_RX), 0, 20, b'\x00\x00\x00\x80\xfc\x00\x08'), (0x366, (CAR.PRIUS, CAR.RAV4H, CAR.LEXUS_RXH, CAR.LEXUS_NXH, CAR.LEXUS_NX, CAR.HIGHLANDERH), 0, 20, b'\x00\x00\x4d\x82\x40\x02\x00'), (0x366, (CAR.RAV4, CAR.COROLLA, CAR.HIGHLANDER, CAR.AVALON, CAR.SIENNA, CAR.LEXUS_CTH, CAR.LEXUS_ESH, CAR.LEXUS_RX), 0, 20, b'\x00\x72\x07\xff\x09\xfe\x00'), (0x470, (CAR.PRIUS, CAR.LEXUS_RXH), 1, 100, b'\x00\x00\x02\x7a'), (0x470, (CAR.HIGHLANDER, CAR.HIGHLANDERH, CAR.RAV4H, CAR.SIENNA, CAR.LEXUS_CTH, CAR.LEXUS_ESH), 1, 100, b'\x00\x00\x01\x79'), (0x4CB, (CAR.PRIUS, CAR.RAV4H, CAR.LEXUS_RXH, CAR.LEXUS_NXH, CAR.LEXUS_NX, CAR.RAV4, CAR.COROLLA, CAR.HIGHLANDERH, CAR.HIGHLANDER, CAR.AVALON, CAR.SIENNA, CAR.LEXUS_CTH, CAR.LEXUS_ESH, CAR.LEXUS_RX), 0, 100, b'\x0c\x00\x00\x00\x00\x00\x00\x00'), ] FINGERPRINTS = { CAR.RAV4: [{ 36: 8, 37: 8, 170: 8, 180: 8, 186: 4, 426: 6, 452: 8, 464: 8, 466: 8, 467: 8, 512: 6, 513: 6, 547: 8, 548: 8, 552: 4, 562: 4, 608: 8, 610: 5, 643: 7, 705: 8, 725: 2, 740: 5, 767: 4, 800: 8, 835: 8, 836: 8, 849: 4, 869: 7, 870: 7, 871: 2, 896: 8, 897: 8, 900: 6, 902: 6, 905: 8, 911: 8, 916: 3, 918: 7, 921: 8, 933: 8, 944: 8, 945: 8, 951: 8, 955: 4, 956: 8, 979: 2, 998: 5, 999: 7, 1000: 8, 1001: 8, 1005: 2, 1008: 2, 1014: 8, 1017: 8, 1041: 8, 1042: 8, 1043: 8, 1044: 8, 1056: 8, 1059: 1, 1114: 8, 1161: 8, 1162: 8, 1163: 8, 1176: 8, 1177: 8, 1178: 8, 1179: 8, 1180: 8, 1181: 8, 1190: 8, 1191: 8, 1192: 8, 1196: 8, 1227: 8, 1228: 8, 1235: 8, 1237: 8, 1263: 8, 1264: 8, 1279: 8, 1408: 8, 1409: 8, 1410: 8, 1552: 8, 1553: 8, 1554: 8, 1555: 8, 1556: 8, 1557: 8, 1561: 8, 1562: 8, 1568: 8, 1569: 8, 1570: 8, 1571: 8, 1572: 8, 1584: 8, 1589: 8, 1592: 8, 1593: 8, 1595: 8, 1596: 8, 1597: 8, 1600: 8, 1656: 8, 1664: 8, 1728: 8, 1745: 8, 1779: 8, 1904: 8, 1912: 8, 1990: 8, 1998: 8, 2015: 8, 2024: 8 }], CAR.RAV4H: [{ 36: 8, 37: 8, 170: 8, 180: 8, 186: 4, 296: 8, 426: 6, 452: 8, 464: 8, 466: 8, 467: 8, 547: 8, 548: 8, 550: 8, 552: 4, 560: 7, 562: 4, 581: 5, 608: 8, 610: 5, 643: 7, 705: 8, 713: 8, 725: 2, 740: 5, 767: 4, 800: 8, 835: 8, 836: 8, 849: 4, 869: 7, 870: 7, 871: 2, 896: 8, 897: 8, 900: 6, 902: 6, 905: 8, 911: 8, 916: 3, 918: 7, 921: 8, 933: 8, 944: 8, 945: 8, 950: 8, 951: 8, 953: 3, 955: 8, 956: 8, 979: 2, 998: 5, 999: 7, 1000: 8, 1001: 8, 1005: 2, 1008: 2, 1014: 8, 1017: 8, 1041: 8, 1042: 8, 1043: 8, 1044: 8, 1056: 8, 1059: 1, 1114: 8, 1161: 8, 1162: 8, 1163: 8, 1176: 8, 1177: 8, 1178: 8, 1179: 8, 1180: 8, 1181: 8, 1184: 8, 1185: 8, 1186: 8, 1190: 8, 1191: 8, 1192: 8, 1196: 8, 1197: 8, 1198: 8, 1199: 8, 1212: 8, 1227: 8, 1228: 8, 1232: 8, 1235: 8, 1237: 8, 1263: 8, 1264: 8, 1279: 8, 1408: 8, 1409: 8, 1410: 8, 1552: 8, 1553: 8, 1554: 8, 1555: 8, 1556: 8, 1557: 8, 1561: 8, 1562: 8, 1568: 8, 1569: 8, 1570: 8, 1571: 8, 1572: 8, 1584: 8, 1589: 8, 1592: 8, 1593: 8, 1595: 8, 1596: 8, 1597: 8, 1600: 8, 1656: 8, 1664: 8, 1728: 8, 1745: 8, 1779: 8, 1872: 8, 1880: 8, 1904: 8, 1912: 8, 1990: 8, 1998: 8 }, # Chinese RAV4 { 36: 8, 37: 8, 170: 8, 180: 8, 186: 4, 355: 5, 426: 6, 452: 8, 464: 8, 466: 8, 467: 8, 512: 6, 513: 6, 547: 8, 548: 8, 552: 4, 562: 4, 608: 8, 610: 5, 643: 7, 705: 8, 725: 2, 740: 5, 742: 8, 743: 8, 767: 4, 800: 8, 830: 7, 835: 8, 836: 8, 849: 4, 869: 7, 870: 7, 871: 2, 896: 8, 897: 8, 900: 6, 902: 6, 905: 8, 911: 8, 916: 3, 921: 8, 922: 8, 933: 8, 944: 8, 945: 8, 951: 8, 955: 8, 956: 8, 979: 2, 998: 5, 999: 7, 1000: 8, 1001: 8, 1008: 2, 1017: 8, 1041: 8, 1042: 8, 1043: 8, 1044: 8, 1056: 8, 1059: 1, 1114: 8, 1161: 8, 1162: 8, 1163: 8, 1176: 8, 1177: 8, 1178: 8, 1179: 8, 1180: 8, 1181: 8, 1190: 8, 1191: 8, 1192: 8, 1196: 8, 1207: 8, 1227: 8, 1235: 8, 1263: 8, 1279: 8, 1552: 8, 1553: 8, 1554: 8, 1555: 8, 1556: 8, 1557: 8, 1561: 8, 1562: 8, 1568: 8, 1569: 8, 1570: 8, 1571: 8, 1572: 8, 1584: 8, 1589: 8, 1592: 8, 1593: 8, 1595: 8, 1596: 8, 1597: 8, 1600: 8, 1664: 8, 1728: 8, 1745: 8, 1779: 8 }], CAR.PRIUS: [{ 36: 8, 37: 8, 166: 8, 170: 8, 180: 8, 295: 8, 296: 8, 426: 6, 452: 8, 466: 8, 467: 8, 512: 6, 513: 6, 550: 8, 552: 4, 560: 7, 562: 6, 581: 5, 608: 8, 610: 8, 614: 8, 643: 7, 658: 8, 713: 8, 740: 5, 742: 8, 743: 8, 767: 4, 800: 8, 810: 2, 814: 8, 824: 2, 829: 2, 830: 7, 835: 8, 836: 8, 845: 5, 863: 8, 869: 7, 870: 7, 871: 2, 898: 8, 900: 6, 902: 6, 905: 8, 913: 8, 918: 8, 921: 8, 933: 8, 944: 8, 945: 8, 950: 8, 951: 8, 953: 8, 955: 8, 956: 8, 971: 7, 974: 8, 975: 5, 993: 8, 998: 5, 999: 7, 1000: 8, 1001: 8, 1005: 2, 1014: 8, 1017: 8, 1020: 8, 1041: 8, 1042: 8, 1044: 8, 1056: 8, 1057: 8, 1059: 1, 1071: 8, 1076: 8, 1077: 8, 1082: 8, 1083: 8, 1084: 8, 1085: 8, 1086: 8, 1114: 8, 1132: 8, 1161: 8, 1162: 8, 1163: 8, 1164: 8, 1165: 8, 1166: 8, 1167: 8, 1175: 8, 1227: 8, 1228: 8, 1235: 8, 1237: 8, 1264: 8, 1279: 8, 1552: 8, 1553: 8, 1556: 8, 1557: 8, 1568: 8, 1570: 8, 1571: 8, 1572: 8, 1595: 8, 1777: 8, 1779: 8, 1904: 8, 1912: 8, 1990: 8, 1998: 8 }, #2019 LE { 36: 8, 37: 8, 166: 8, 170: 8, 180: 8, 295: 8, 296: 8, 426: 6, 452: 8, 466: 8, 467: 8, 550: 8, 552: 4, 560: 7, 562: 6, 581: 5, 608: 8, 610: 8, 614: 8, 643: 7, 658: 8, 713: 8, 740: 5, 742: 8, 743: 8, 767: 4, 800: 8, 810: 2, 814: 8, 829: 2, 830: 7, 835: 8, 836: 8, 863: 8, 865: 8, 869: 7, 870: 7, 871: 2, 896: 8, 898: 8, 900: 6, 902: 6, 905: 8, 918: 8, 921: 8, 933: 8, 944: 8, 945: 8, 950: 8, 951: 8, 953: 8, 955: 8, 956: 8, 971: 7, 975: 5, 993: 8, 998: 5, 999: 7, 1000: 8, 1001: 8, 1002: 8, 1014: 8, 1017: 8, 1020: 8, 1041: 8, 1042: 8, 1044: 8, 1056: 8, 1057: 8, 1059: 1, 1071: 8, 1076: 8, 1077: 8, 1082: 8, 1083: 8, 1084: 8, 1085: 8, 1086: 8, 1114: 8, 1132: 8, 1161: 8, 1162: 8, 1163: 8, 1175: 8, 1227: 8, 1228: 8, 1235: 8, 1237: 8, 1279: 8, 1552: 8, 1553: 8, 1556: 8, 1557: 8, 1568: 8, 1570: 8, 1571: 8, 1572: 8, 1592: 8, 1595: 8, 1777: 8, 1779: 8, 1904: 8, 1912: 8, 1990: 8, 1998: 8 }, # 2020 Prius Prime LE { 36: 8, 37: 8, 166: 8, 170: 8, 180: 8, 295: 8, 296: 8, 426: 6, 452: 8, 466: 8, 467: 8, 550: 8, 552: 4, 560: 7, 562: 6, 581: 5, 608: 8, 610: 8, 643: 7, 713: 8, 740: 5, 742: 8, 743: 8, 764: 8, 767: 4, 800: 8, 810: 2, 824: 8, 829: 2, 830: 7, 835: 8, 836: 8, 863: 8, 865: 8, 869: 7, 870: 7, 871: 2, 896: 8, 898: 8, 900: 6, 902: 6, 905: 8, 913: 8, 921: 8, 933: 8, 934: 8, 935: 8, 944: 8, 945: 8, 950: 8, 951: 8, 953: 8, 955: 8, 956: 8, 971: 7, 974: 8, 975: 5, 993: 8, 998: 5, 999: 7, 1000: 8, 1001: 8, 1017: 8, 1020: 8, 1041: 8, 1042: 8, 1044: 8, 1056: 8, 1057: 8, 1059: 1, 1071: 8, 1076: 8, 1077: 8, 1084: 8, 1085: 8, 1086: 8, 1114: 8, 1132: 8, 1161: 8, 1162: 8, 1163: 8, 1164: 8, 1165: 8, 1166: 8, 1167: 8, 1227: 8, 1235: 8, 1279: 8, 1541: 8, 1552: 8, 1553: 8, 1556: 8, 1557: 8, 1568: 8, 1570: 8, 1571: 8, 1572: 8, 1592: 8, 1595: 8, 1649: 8, 1777: 8, 1779: 8, 1786: 8, 1787: 8, 1788: 8, 1789: 8, 1904: 8, 1912: 8, 1990: 8, 1998: 8 }, #2020 Prius Prime Limited { 36: 8, 37: 8, 166: 8, 170: 8, 180: 8, 295: 8, 296: 8, 426: 6, 452: 8, 466: 8, 467: 8, 550: 8, 552: 4, 560: 7, 562: 6, 581: 5, 608: 8, 610: 8, 614: 8, 643: 7, 658: 8, 713: 8, 740: 5, 742: 8, 743: 8, 767: 4, 800: 8, 810: 2, 814: 8, 824: 2, 829: 2, 830: 7, 835: 8, 836: 8, 863: 8, 865: 8, 869: 7, 870: 7, 871: 2, 896: 8, 898: 8, 900: 6, 902: 6, 905: 8, 913: 8, 918: 8, 921: 8, 933: 8, 944: 8, 945: 8, 950: 8, 951: 8, 953: 8, 955: 8, 956: 8, 971: 7, 974: 8, 975: 5, 993: 8, 998: 5, 999: 7, 1000: 8, 1001: 8, 1014: 8, 1017: 8, 1020: 8, 1041: 8, 1042: 8, 1044: 8, 1056: 8, 1057: 8, 1059: 1, 1071: 8, 1076: 8, 1077: 8, 1082: 8, 1083: 8, 1084: 8, 1085: 8, 1086: 8, 1114: 8, 1132: 8, 1161: 8, 1162: 8, 1163: 8, 1164: 8, 1165: 8, 1166: 8, 1167: 8, 1175: 8, 1227: 8, 1228: 8, 1235: 8, 1237: 8, 1279: 8, 1541: 8, 1552: 8, 1553: 8, 1556: 8, 1557: 8, 1568: 8, 1570: 8, 1571: 8, 1572: 8, 1592: 8, 1595: 8, 1649: 8, 1777: 8, 1779: 8, 1904: 8, 1912: 8, 1990: 8, 1998: 8, 2015: 8, 2024: 8, 2026: 8, 2027: 8, 2029: 8, 2030: 8, 2031: 8 }, #2020 Central Europe Prime { 36: 8, 37: 8, 166: 8, 170: 8, 180: 8, 295: 8, 296: 8, 426: 6, 452: 8, 466: 8, 467: 8, 550: 8, 552: 4, 560: 7, 562: 6, 581: 5, 608: 8, 610: 8, 643: 7, 713: 8, 740: 5, 742: 8, 743: 8, 764: 8, 767: 4, 800: 8, 810: 2, 818: 8, 824: 8, 829: 2, 830: 7, 835: 8, 836: 8, 845: 5, 863: 8, 865: 8, 869: 7, 870: 7, 871: 2, 889: 8, 896: 8, 898: 8, 900: 6, 902: 6, 905: 8, 913: 8, 921: 8, 933: 8, 934: 8, 935: 8, 944: 8, 945: 8, 950: 8, 951: 8, 953: 8, 955: 8, 956: 8, 971: 8, 974: 8, 975: 5, 993: 8, 998: 5, 999: 7, 1000: 8, 1001: 8, 1014: 8, 1017: 8, 1020: 8, 1041: 8, 1042: 8, 1056: 8, 1057: 8, 1059: 1, 1071: 8, 1076: 8, 1077: 8, 1084: 8, 1085: 8, 1086: 8, 1114: 8, 1132: 8, 1161: 8, 1162: 8, 1163: 8, 1164: 8, 1165: 8, 1166: 8, 1167: 8, 1227: 8, 1235: 8, 1237: 8, 1264: 8, 1279: 8, 1541: 8, 1552: 8, 1553: 8, 1556: 8, 1557: 8, 1568: 8, 1570: 8, 1571: 8, 1572: 8, 1592: 8, 1595: 8, 1777: 8, 1779: 8, 1786: 8, 1787: 8, 1788: 8, 1789: 8 }, #2017 German Prius { 36: 8, 37: 8, 166: 8, 170: 8, 180: 8, 295: 8, 296: 8, 426: 6, 452: 8, 466: 8, 467: 8, 550: 8, 552: 4, 560: 7, 562: 6, 581: 5, 608: 8, 610: 8, 614: 8, 643: 7, 658: 8, 713: 8, 740: 5, 742: 8, 743: 8, 767: 4, 800: 8, 810: 2, 814: 8, 829: 2, 830: 7, 835: 8, 836: 8, 845: 5, 863: 8, 869: 7, 870: 7, 871: 2, 898: 8, 900: 6, 902: 6, 905: 8, 913: 8, 918: 8, 921: 8, 933: 8, 944: 8, 945: 8, 950: 8, 951: 8, 953: 8, 955: 8, 956: 8, 971: 7, 975: 5, 993: 8, 998: 5, 999: 7, 1000: 8, 1001: 8, 1005: 2, 1014: 8, 1017: 8, 1020: 8, 1041: 8, 1042: 8, 1044: 8, 1056: 8, 1057: 8, 1059: 1, 1071: 8, 1077: 8, 1082: 8, 1083: 8, 1084: 8, 1085: 8, 1086: 8, 1114: 8, 1132: 8, 1161: 8, 1162: 8, 1163: 8, 1175: 8, 1227: 8, 1228: 8, 1235: 8, 1237: 8, 1264: 8, 1279: 8, 1552: 8, 1553: 8, 1556: 8, 1557: 8, 1568: 8, 1570: 8, 1571: 8, 1572: 8, 1595: 8, 1777: 8, 1779: 8, 1792: 8, 1767: 4, 1863: 8, 1904: 8, 1912: 8, 1984: 8, 1988: 8, 1990: 8, 1992: 8, 1996: 8, 1998: 8, 2002: 8, 2010: 8, 2015: 8, 2016: 8, 2018: 8, 2024: 8, 2026: 8, 2030: 8 }], #Corolla w/ added Pedal Support (512L and 513L) CAR.COROLLA: [{ 36: 8, 37: 8, 170: 8, 180: 8, 186: 4, 426: 6, 452: 8, 464: 8, 466: 8, 467: 8, 512: 6, 513: 6, 547: 8, 548: 8, 552: 4, 608: 8, 610: 5, 643: 7, 705: 8, 740: 5, 767: 4, 800: 8, 835: 8, 836: 8, 849: 4, 869: 7, 870: 7, 871: 2, 896: 8, 897: 8, 900: 6, 902: 6, 905: 8, 911: 8, 916: 2, 921: 8, 933: 8, 944: 8, 945: 8, 951: 8, 955: 4, 956: 8, 979: 2, 992: 8, 998: 5, 999: 7, 1000: 8, 1001: 8, 1017: 8, 1041: 8, 1042: 8, 1043: 8, 1044: 8, 1056: 8, 1059: 1, 1114: 8, 1161: 8, 1162: 8, 1163: 8, 1196: 8, 1227: 8, 1235: 8, 1279: 8, 1552: 8, 1553: 8, 1556: 8, 1557: 8, 1561: 8, 1562: 8, 1568: 8, 1569: 8, 1570: 8, 1571: 8, 1572: 8, 1584: 8, 1589: 8, 1592: 8, 1596: 8, 1597: 8, 1600: 8, 1664: 8, 1728: 8, 1779: 8, 1904: 8, 1912: 8, 1990: 8, 1998: 8, 2016: 8, 2017: 8, 2018: 8, 2019: 8, 2020: 8, 2021: 8, 2022: 8, 2023: 8, 2024: 8 }], CAR.CAMRY: [ #XLE and LE { 36: 8, 37: 8, 119: 6, 170: 8, 180: 8, 186: 4, 426: 6, 452: 8, 464: 8, 466: 8, 467: 8, 544: 4, 550: 8, 552: 4, 562: 6, 608: 8, 610: 8, 643: 7, 658: 8, 705: 8, 728: 8, 740: 5, 761: 8, 764: 8, 767: 4, 800: 8, 810: 2, 812: 8, 814: 8, 818: 8, 822: 8, 824: 8, 830: 7, 835: 8, 836: 8, 869: 7, 870: 7, 871: 2, 888: 8, 889: 8, 891: 8, 898: 8, 900: 6, 902: 6, 905: 8, 918: 8, 921: 8, 933: 8, 934: 8, 935: 8, 944: 8, 945: 8, 951: 8, 955: 8, 956: 8, 976: 1, 983: 8, 984: 8, 998: 5, 999: 7, 1000: 8, 1001: 8, 1002: 8, 1011: 8, 1014: 8, 1017: 8, 1020: 8, 1041: 8, 1042: 8, 1044: 8, 1056: 8, 1059: 1, 1076: 8, 1077: 8, 1082: 8, 1114: 8, 1161: 8, 1162: 8, 1163: 8, 1164: 8, 1165: 8, 1166: 8, 1167: 8, 1228: 8, 1235: 8, 1237: 8, 1263: 8, 1264: 8, 1279: 8, 1412: 8, 1541: 8, 1552: 8, 1553: 8, 1556: 8, 1557: 8, 1568: 8, 1570: 8, 1571: 8, 1572: 8, 1595: 8, 1745: 8, 1779: 8, 1786: 8, 1787: 8, 1788: 8, 1789: 8, 1808: 8, 1816: 8, 1904: 8, 1912: 8, 1990: 8, 1998: 8 }, #XSE and SE # TODO: get proper fingerprint in stock mode { 36: 8, 37: 8, 114: 5, 119: 6, 120: 4, 170: 8, 180: 8, 186: 4, 426: 6, 452: 8, 464: 8, 466: 8, 467: 8, 544: 4, 550: 8, 552: 4, 562: 6, 608: 8, 610: 8, 643: 7, 658: 8, 705: 8, 728: 8, 740: 5, 761: 8, 764: 8, 767: 4, 800: 8, 810: 2, 812: 8, 814: 8, 818: 8, 822: 8, 824: 8, 830: 7, 835: 8, 836: 8, 869: 7, 870: 7, 888: 8, 889: 8, 891: 8, 898: 8, 900: 6, 902: 6, 905: 8, 918: 8, 921: 8, 933: 8, 934: 8, 935: 8, 944: 8, 945: 8, 951: 8, 955: 8, 956: 8, 976: 1, 983: 8, 984: 8, 998: 5, 999: 7, 1000: 8, 1001: 8, 1002: 8, 1011: 8, 1014: 8, 1017: 8, 1020: 8, 1041: 8, 1056: 8, 1059: 1, 1076: 8, 1077: 8, 1082: 8, 1114: 8, 1164: 8, 1165: 8, 1166: 8, 1167: 8, 1228: 8, 1237: 8, 1263: 8, 1264: 8, 1279: 8, 1412: 8, 1541: 8, 1552: 8, 1553: 8, 1556: 8, 1557: 8, 1568: 8, 1570: 8, 1571: 8, 1572: 8, 1595: 8, 1745: 8, 1779: 8, 1786: 8, 1787: 8, 1788: 8, 1789: 8, 1808: 8, 1816: 8, 1904: 8, 1912: 8, 1990: 8, 1998: 8 }, { # 2019 XSE 36: 8, 37: 8, 170: 8, 180: 8, 186: 4, 426: 6, 452: 8, 464: 8, 466: 8, 467: 8, 544: 4, 550: 8, 552: 4, 562: 6, 608: 8, 610: 8, 643: 7, 658: 8, 705: 8, 728: 8, 740: 5, 761: 8, 764: 8, 767: 4, 800: 8, 810: 2, 812: 8, 814: 8, 818: 8, 822: 8, 824: 8, 830: 7, 835: 8, 836: 8, 865: 8, 869: 7, 870: 7, 871: 2, 888: 8, 889: 8, 891: 8, 896: 8, 898: 8, 900: 6, 902: 6, 905: 8, 918: 8, 921: 8, 933: 8, 934: 8, 935: 8, 942: 8, 944: 8, 945: 8, 951: 8, 955: 8, 956: 8, 976: 1, 983: 8, 984: 8, 998: 5, 999: 7, 1000: 8, 1001: 8, 1002: 8, 1011: 8, 1014: 8, 1017: 8, 1020: 8, 1041: 8, 1042: 8, 1044: 8, 1056: 8, 1059: 1, 1076: 8, 1077: 8, 1082: 8, 1114: 8, 1161: 8, 1162: 8, 1163: 8, 1164: 8, 1165: 8, 1166: 8, 1167: 8, 1228: 8, 1235: 8, 1237: 8, 1263: 8, 1264: 8, 1279: 8, 1412: 8, 1541: 8, 1552: 8, 1553: 8, 1556: 8, 1557: 8, 1568: 8, 1570: 8, 1571: 8, 1572: 8, 1592: 8, 1594: 8, 1595: 8, 1649: 8, 1745: 8, 1779: 8, 1786: 8, 1787: 8, 1788: 8, 1789: 8, 1792: 8, 1767: 4, 1808: 8, 1816: 8, 1872: 8, 1880: 8, 1904: 8, 1912: 8, 1937: 8, 1945: 8, 1953: 8, 1961: 8, 1968: 8, 1976: 8, 1990: 8, 1998: 8, 2015: 8, 2016: 8, 2024: 8 }], CAR.CAMRYH: [ #SE, LE and LE with Blindspot Monitor { 36: 8, 37: 8, 166: 8, 170: 8, 180: 8, 295: 8, 296: 8, 426: 6, 452: 8, 466: 8, 467: 8, 550: 8, 552: 4, 560: 7, 562: 6, 581: 5, 608: 8, 610: 8, 643: 7, 713: 8, 728: 8, 740: 5, 761: 8, 764: 8, 767: 4, 800: 8, 810: 2, 812: 8, 818: 8, 824: 8, 829: 2, 830: 7, 835: 8, 836: 8, 865: 8, 869: 7, 870: 7, 871: 2, 889: 8, 896: 8, 898: 8, 900: 6, 902: 6, 905: 8, 921: 8, 933: 8, 934: 8, 935: 8, 944: 8, 945: 8, 950: 8, 951: 8, 953: 8, 955: 8, 956: 8, 971: 7, 975: 5, 983: 8, 984: 8, 993: 8, 998: 5, 999: 7, 1000: 8, 1001: 8, 1002: 8, 1011: 8, 1014: 8, 1017: 8, 1020: 8, 1041: 8, 1042: 8, 1044: 8, 1056: 8, 1057: 8, 1059: 1, 1071: 8, 1076: 8, 1077: 8, 1084: 8, 1085: 8, 1086: 8, 1114: 8, 1132: 8, 1161: 8, 1162: 8, 1163: 8, 1164: 8, 1165: 8, 1166: 8, 1167: 8, 1235: 8, 1237: 8, 1264: 8, 1279: 8, 1541: 8, 1552: 8, 1553: 8, 1556: 8, 1557: 8, 1568: 8, 1570: 8, 1571: 8, 1572: 8, 1592: 8, 1594: 8, 1595: 8, 1649: 8, 1745: 8, 1779: 8, 1786: 8, 1787: 8, 1788: 8, 1789: 8, 1808: 8, 1810: 8, 1816: 8, 1818: 8, 1904: 8, 1912: 8, 1990: 8, 1998: 8 }, #SL { 36: 8, 37: 8, 166: 8, 170: 8, 180: 8, 295: 8, 296: 8, 426: 6, 452: 8, 466: 8, 467: 8, 550: 8, 552: 4, 560: 7, 562: 6, 581: 5, 608: 8, 610: 8, 643: 7, 713: 8, 728: 8, 740: 5, 761: 8, 764: 8, 767: 4, 800: 8, 810: 2, 812: 8, 818: 8, 824: 8, 829: 2, 830: 7, 835: 8, 836: 8, 869: 7, 870: 7, 871: 2, 888: 8, 889: 8, 898: 8, 900: 6, 902: 6, 905: 8, 913: 8, 918: 8, 921: 8, 933: 8, 934: 8, 935: 8, 944: 8, 945: 8, 950: 8, 951: 8, 953: 8, 955: 8, 956: 8, 971: 7, 975: 5, 993: 8, 998: 5, 999: 7, 1000: 8, 1001: 8, 1002: 8, 1014: 8, 1017: 8, 1020: 8, 1041: 8, 1042: 8, 1044: 8, 1056: 8, 1057: 8, 1059: 1, 1071: 8, 1076: 8, 1077: 8, 1084: 8, 1085: 8, 1086: 8, 1114: 8, 1132: 8, 1161: 8, 1162: 8, 1163: 8, 1164: 8, 1165: 8, 1166: 8, 1167: 8, 1228: 8, 1235: 8, 1237: 8, 1264: 8, 1279: 8, 1541: 8, 1552: 8, 1553: 8, 1556: 8, 1557: 8, 1568: 8, 1570: 8, 1571: 8, 1572: 8, 1595: 8, 1745: 8, 1779: 8, 1786: 8, 1787: 8, 1788: 8, 1789: 8, 1904: 8, 1912: 8, 1990: 8, 1998: 8 }, #XLE { 36: 8, 37: 8, 166: 8, 170: 8, 180: 8, 295: 8, 296: 8, 426: 6, 452: 8, 466: 8, 467: 8, 550: 8, 552: 4, 560: 7, 562: 6, 581: 5, 608: 8, 610: 8, 643: 7, 658: 8, 713: 8, 728: 8, 740: 5, 761: 8, 764: 8, 767: 4, 800: 8, 810: 2, 812: 8, 814: 8, 818: 8, 824: 8, 829: 2, 830: 7, 835: 8, 836: 8, 869: 7, 870: 7, 871: 2, 888: 8, 889: 8, 898: 8, 900: 6, 902: 6, 905: 8, 918: 8, 921: 8, 933: 8, 934: 8, 935: 8, 944: 8, 945: 8, 950: 8, 951: 8, 953: 8, 955: 8, 956: 8, 971: 7, 975: 5, 983: 8, 984: 8, 993: 8, 998: 5, 999: 7, 1000: 8, 1001: 8, 1002: 8, 1011: 8, 1014: 8, 1017: 8, 1020: 8, 1041: 8, 1042: 8, 1044: 8, 1056: 8, 1057: 8, 1059: 1, 1071: 8, 1076: 8, 1077: 8, 1082: 8, 1084: 8, 1085: 8, 1086: 8, 1114: 8, 1132: 8, 1161: 8, 1162: 8, 1163: 8, 1164: 8, 1165: 8, 1166: 8, 1167: 8, 1228: 8, 1235: 8, 1237: 8, 1264: 8, 1279: 8, 1541: 8, 1552: 8, 1553: 8, 1556: 8, 1557: 8, 1568: 8, 1570: 8, 1571: 8, 1572: 8, 1595: 8, 1745: 8, 1779: 8, 1786: 8, 1787: 8, 1788: 8, 1789: 8, 1904: 8, 1912: 8, 1990: 8, 1998: 8 }], CAR.HIGHLANDER: [{ 36: 8, 37: 8, 114: 5, 119: 6, 120: 4, 170: 8, 180: 8, 186: 4, 238: 4, 355: 5, 426: 6, 452: 8, 464: 8, 466: 8, 467: 8, 544: 4, 545: 5, 550: 8, 552: 4, 608: 8, 610: 5, 643: 7, 705: 8, 725: 2, 740: 5, 767: 4, 800: 8, 835: 8, 836: 8, 849: 4, 869: 7, 870: 7, 871: 2, 896: 8, 900: 6, 902: 6, 905: 8, 911: 8, 916: 3, 921: 8, 922: 8, 933: 8, 944: 8, 945: 8, 951: 8, 955: 8, 956: 8, 979: 2, 998: 5, 999: 7, 1000: 8, 1001: 8, 1008: 2, 1014: 8, 1017: 8, 1020: 8, 1041: 8, 1042: 8, 1043: 8, 1044: 8, 1056: 8, 1059: 1, 1114: 8, 1161: 8, 1162: 8, 1163: 8, 1176: 8, 1177: 8, 1178: 8, 1179: 8, 1180: 8, 1181: 8, 1182: 8, 1183: 8, 1189: 8, 1190: 8, 1191: 8, 1192: 8, 1196: 8, 1197: 8, 1198: 8, 1199: 8, 1206: 8, 1207: 8, 1212: 8, 1227: 8, 1235: 8, 1237: 8, 1279: 8, 1408: 8, 1409: 8, 1410: 8, 1552: 8, 1553: 8, 1554: 8, 1556: 8, 1557: 8, 1561: 8, 1562: 8, 1568: 8, 1569: 8, 1570: 8, 1571: 8, 1572: 8, 1584: 8, 1589: 8, 1592: 8, 1593: 8, 1595: 8, 1599: 8, 1656: 8, 1666: 8, 1667: 8, 1728: 8, 1745: 8, 1779: 8, 1872: 8, 1880: 8, 1904: 8, 1912: 8, 1984: 8, 1988: 8, 1992: 8, 1996: 8, 1990: 8, 1998: 8 }, # 2019 Highlander XLE { 36: 8, 37: 8, 114: 5, 119: 6, 120: 4, 170: 8, 180: 8, 186: 4, 238: 4, 355: 5, 426: 6, 452: 8, 464: 8, 466: 8, 467: 8, 544: 4, 545: 5, 550: 8, 552: 4, 608: 8, 610: 5, 643: 7, 705: 8, 725: 2, 740: 5, 767: 4, 800: 8, 835: 8, 836: 8, 849: 4, 869: 7, 870: 7, 871: 2, 896: 8, 900: 6, 902: 6, 905: 8, 911: 8, 916: 3, 921: 8, 922: 8, 933: 8, 944: 8, 945: 8, 951: 8, 955: 8, 956: 8, 979: 2, 992: 8, 998: 5, 999: 7, 1000: 8, 1001: 8, 1008: 2, 1014: 8, 1017: 8, 1020: 8, 1041: 8, 1042: 8, 1043: 8, 1044: 8, 1056: 8, 1059: 1, 1076: 8, 1077: 8, 1114: 8, 1161: 8, 1162: 8, 1163: 8, 1176: 8, 1177: 8, 1178: 8, 1179: 8, 1180: 8, 1181: 8, 1182: 8, 1183: 8, 1189: 8, 1190: 8, 1191: 8, 1192: 8, 1196: 8, 1197: 8, 1198: 8, 1199: 8, 1206: 8, 1207: 8, 1212: 8, 1227: 8, 1235: 8, 1237: 8, 1279: 8, 1408: 8, 1409: 8, 1410: 8, 1552: 8, 1553: 8, 1554: 8, 1556: 8, 1557: 8, 1561: 8, 1562: 8, 1568: 8, 1569: 8, 1570: 8, 1571: 8, 1572: 8, 1584: 8, 1589: 8, 1592: 8, 1593: 8, 1595: 8, 1599: 8, 1656: 8, 1728: 8, 1745: 8, 1779: 8, 1904: 8, 1912: 8, 1990: 8, 1998: 8 }, # 2017 Highlander Limited { 36: 8, 37: 8, 114: 5, 119: 6, 120: 4, 170: 8, 180: 8, 186: 4, 238: 4, 355: 5, 426: 6, 452: 8, 464: 8, 466: 8, 467: 8, 544: 4, 545: 5, 550: 8, 552: 4, 608: 8, 610: 5, 643: 7, 705: 8, 725: 2, 740: 5, 767: 4, 800: 8, 835: 8, 836: 8, 849: 4, 869: 7, 870: 7, 871: 2, 896: 8, 900: 6, 902: 6, 905: 8, 911: 8, 916: 3, 918: 7, 921: 8, 922: 8, 933: 8, 944: 8, 945: 8, 951: 8, 955: 8, 956: 8, 979: 2, 998: 5, 999: 7, 1000: 8, 1001: 8, 1005: 2, 1008: 2, 1014: 8, 1017: 8, 1020: 8, 1041: 8, 1042: 8, 1043: 8, 1044: 8, 1056: 8, 1059: 1, 1114: 8, 1161: 8, 1162: 8, 1163: 8, 1176: 8, 1177: 8, 1178: 8, 1179: 8, 1180: 8, 1181: 8, 1182: 8, 1183: 8, 1189: 8, 1190: 8, 1191: 8, 1192: 8, 1196: 8, 1197: 8, 1198: 8, 1199: 8, 1206: 8, 1207: 8, 1212: 8, 1227: 8, 1235: 8, 1237: 8, 1264: 8, 1279: 8, 1408: 8, 1409: 8, 1410: 8, 1552: 8, 1553: 8, 1554: 8, 1556: 8, 1557: 8, 1561: 8, 1562: 8, 1568: 8, 1569: 8, 1570: 8, 1571: 8, 1572: 8, 1584: 8, 1589: 8, 1592: 8, 1593: 8, 1595: 8, 1599: 8, 1656: 8, 1728: 8, 1745: 8, 1779: 8, 1904: 8, 1912: 8, 1990: 8, 1998: 8 }, # 2018 Highlander Limited Platinum { 36: 8, 37: 8, 114: 5, 119: 6, 120: 4, 170: 8, 180: 8, 186: 4, 238: 4, 355: 5, 426: 6, 452: 8, 464: 8, 466: 8, 467: 8, 544: 4, 545: 5, 550: 8, 552: 4, 608: 8, 610: 5, 643: 7, 705: 8, 725: 2, 740: 5, 767: 4, 800: 8, 835: 8, 836: 8, 849: 4, 869: 7, 870: 7, 871: 2, 896: 8, 900: 6, 902: 6, 905: 8, 911: 8, 916: 3, 918: 7, 921: 8, 922: 8, 933: 8, 944: 8, 945: 8, 951: 8, 955: 8, 956: 8, 979: 2, 998: 5, 999: 7, 1000: 8, 1001: 8, 1008: 2, 1014: 8, 1017: 8, 1020: 8, 1041: 8, 1042: 8, 1043: 8, 1044: 8, 1056: 8, 1059: 1, 1114: 8, 1161: 8, 1162: 8, 1163: 8, 1176: 8, 1177: 8, 1178: 8, 1179: 8, 1180: 8, 1181: 8, 1182: 8, 1183: 8, 1189: 8, 1190: 8, 1191: 8, 1192: 8, 1196: 8, 1197: 8, 1198: 8, 1199: 8, 1206: 8, 1207: 8, 1212: 8, 1227: 8, 1235: 8, 1237: 8, 1263: 8, 1279: 8, 1408: 8, 1409: 8, 1410: 8, 1552: 8, 1553: 8, 1554: 8, 1556: 8, 1557: 8, 1561: 8, 1562: 8, 1568: 8, 1569: 8, 1570: 8, 1571: 8, 1572: 8, 1584: 8, 1585: 8, 1589: 8, 1592: 8, 1593: 8, 1595: 8, 1599: 8, 1656: 8, 1728: 8, 1745: 8, 1779: 8, 1872: 8, 1880: 8, 1904: 8, 1912: 8, 1988: 8, 1990: 8, 1996: 8, 1998: 8, 2015: 8, 2016: 8, 2024: 8 }], CAR.HIGHLANDERH: [{ 36: 8, 37: 8, 170: 8, 180: 8, 296: 8, 426: 6, 452: 8, 466: 8, 467: 8, 550: 8, 552: 4, 560: 7, 581: 5, 608: 8, 610: 5, 643: 7, 713: 8, 740: 5, 767: 4, 800: 8, 835: 8, 836: 8, 849: 4, 869: 7, 870: 7, 871: 2, 896: 8, 897: 8, 900: 6, 902: 6, 905: 8, 911: 8, 916: 3, 918: 7, 921: 8, 933: 8, 944: 8, 945: 8, 950: 8, 951: 8, 953: 3, 955: 8, 956: 8, 979: 2, 998: 5, 999: 7, 1000: 8, 1001: 8, 1005: 2, 1014: 8, 1017: 8, 1020: 8, 1041: 8, 1042: 8, 1043: 8, 1044: 8, 1056: 8, 1059: 1, 1112: 8, 1114: 8, 1161: 8, 1162: 8, 1163: 8, 1176: 8, 1177: 8, 1178: 8, 1179: 8, 1180: 8, 1181: 8, 1184: 8, 1185: 8, 1186: 8, 1189: 8, 1190: 8, 1191: 8, 1192: 8, 1196: 8, 1197: 8, 1198: 8, 1199: 8, 1206: 8, 1212: 8, 1227: 8, 1232: 8, 1235: 8, 1237: 8, 1263: 8, 1264: 8, 1279: 8, 1552: 8, 1553: 8, 1554: 8, 1556: 8, 1557: 8, 1561: 8, 1562: 8, 1568: 8, 1569: 8, 1570: 8, 1571: 8, 1572: 8, 1584: 8, 1589: 8, 1592: 8, 1593: 8, 1595: 8, 1599: 8, 1656: 8, 1728: 8, 1745: 8, 1779: 8, 1904: 8, 1912: 8, 1990: 8, 1998: 8 }, { # 2019 Highlander Hybrid Limited Platinum 36: 8, 37: 8, 170: 8, 180: 8, 296: 8, 426: 6, 452: 8, 466: 8, 467: 8, 550: 8, 552: 4, 560: 7, 581: 5, 608: 8, 610: 5, 643: 7, 713: 8, 740: 5, 767: 4, 800: 8, 835: 8, 836: 8, 849: 4, 869: 7, 870: 7, 871: 2, 896: 8, 897: 8, 900: 6, 902: 6, 905: 8, 911: 8, 916: 3, 918: 7, 921: 8, 933: 8, 944: 8, 945: 8, 950: 8, 951: 8, 953: 3, 955: 8, 956: 8, 979: 2, 992: 8, 998: 5, 999: 7, 1000: 8, 1001: 8, 1014: 8, 1017: 8, 1020: 8, 1041: 8, 1042: 8, 1043: 8, 1044: 8, 1056: 8, 1057: 8, 1059: 1, 1076: 8, 1077: 8, 1112: 8, 1114: 8, 1161: 8, 1162: 8, 1163: 8, 1176: 8, 1177: 8, 1178: 8, 1179: 8, 1180: 8, 1181: 8, 1184: 8, 1185: 8, 1186: 8, 1189: 8, 1190: 8, 1191: 8, 1192: 8, 1196: 8, 1197: 8, 1198: 8, 1199: 8, 1206: 8, 1212: 8, 1227: 8, 1232: 8, 1235: 8, 1237: 8, 1263: 8, 1279: 8, 1552: 8, 1553: 8, 1554: 8, 1556: 8, 1557: 8, 1561: 8, 1562: 8, 1568: 8, 1569: 8, 1570: 8, 1571: 8, 1572: 8, 1584: 8, 1589: 8, 1592: 8, 1593: 8, 1595: 8, 1599: 8, 1656: 8, 1666: 8, 1667: 8, 1728: 8, 1745: 8, 1779: 8, 1904: 8, 1912: 8, 1990: 8, 1998: 8 }], CAR.COROLLAH_TSS2: [ # 2019 Taiwan Altis Hybrid { 36: 8, 37: 8, 166: 8, 170: 8, 180: 8, 295: 8, 296: 8, 401: 8, 426: 6, 452: 8, 466: 8, 467: 8, 550: 8, 552: 4, 560: 7, 562: 6, 581: 5, 608: 8, 610: 8, 643: 7, 713: 8, 728: 8, 740: 5, 742: 8, 743: 8, 761: 8, 765: 8, 767: 4, 800: 8, 810: 2, 829: 2, 830: 7, 835: 8, 836: 8, 865: 8, 869: 7, 870: 7, 871: 2, 877: 8, 881: 8, 885: 8, 896: 8, 898: 8, 918: 7, 921: 8, 944: 8, 945: 8, 950: 8, 951: 8, 953: 8, 955: 8, 956: 8, 971: 7, 975: 5, 987: 8, 993: 8, 1002: 8, 1014: 8, 1017: 8, 1020: 8, 1041: 8, 1042: 8, 1044: 8, 1056: 8, 1057: 8, 1059: 1, 1071: 8, 1082: 8, 1112: 8, 1114: 8, 1161: 8, 1162: 8, 1163: 8, 1172: 8, 1235: 8, 1237: 8, 1279: 8, 1541: 8, 1552: 8, 1553: 8, 1556: 8, 1557: 8, 1568: 8, 1570: 8, 1571: 8, 1572: 8, 1592: 8, 1594: 8, 1595: 8, 1745: 8, 1775: 8, 1779: 8 }, # 2019 Chinese Levin Hybrid { 36: 8, 37: 8, 166: 8, 170: 8, 180: 8, 295: 8, 296: 8, 401: 8, 426: 6, 452: 8, 466: 8, 467: 8, 550: 8, 552: 4, 560: 7, 562: 6, 581: 5, 608: 8, 610: 8, 643: 7, 713: 8, 728: 8, 740: 5, 742: 8, 743: 8, 761: 8, 765: 8, 767: 4, 800: 8, 810: 2, 812: 8, 829: 2, 830: 7, 835: 8, 836: 8, 865: 8, 869: 7, 870: 7, 871: 2, 877: 8, 881: 8, 885: 8, 896: 8, 898: 8, 921: 8, 944: 8, 945: 8, 950: 8, 951: 8, 953: 8, 955: 8, 956: 8, 971: 7, 975: 5, 993: 8, 1002: 8, 1017: 8, 1020: 8, 1041: 8, 1042: 8, 1044: 8, 1056: 8, 1057: 8, 1059: 1, 1071: 8, 1114: 8, 1161: 8, 1162: 8, 1163: 8, 1172: 8, 1235: 8, 1279: 8, 1541: 8, 1552: 8, 1553: 8, 1556: 8, 1557: 8, 1568: 8, 1570: 8, 1571: 8, 1572: 8, 1592: 8, 1594: 8, 1595: 8, 1600: 8, 1649: 8, 1745: 8, 1775: 8, 1779: 8 } ], CAR.SIENNA: [ { 36: 8, 37: 8, 114: 5, 119: 6, 120: 4, 170: 8, 180: 8, 186: 4, 426: 6, 452: 8, 464: 8, 466: 8, 467: 8, 544: 4, 545: 5, 548: 8, 550: 8, 552: 4, 562: 4, 608: 8, 610: 5, 643: 7, 705: 8, 725: 2, 740: 5, 764: 8, 767: 4, 800: 8, 824: 8, 835: 8, 836: 8, 849: 4, 869: 7, 870: 7, 871: 2, 888: 8, 896: 8, 900: 6, 902: 6, 905: 8, 911: 8, 916: 1, 918: 7, 921: 8, 933: 8, 944: 6, 945: 8, 951: 8, 955: 8, 956: 8, 979: 2, 992: 8, 998: 5, 999: 7, 1000: 8, 1001: 8, 1002: 8, 1008: 2, 1014: 8, 1017: 8, 1041: 8, 1042: 8, 1043: 8, 1056: 8, 1059: 1, 1076: 8, 1077: 8, 1114: 8, 1160: 8, 1161: 8, 1162: 8, 1163: 8, 1164: 8, 1165: 8, 1166: 8, 1167: 8, 1176: 8, 1177: 8, 1178: 8, 1179: 8, 1180: 8, 1181: 8, 1182: 8, 1183: 8, 1191: 8, 1192: 8, 1196: 8, 1197: 8, 1198: 8, 1199: 8, 1200: 8, 1201: 8, 1202: 8, 1203: 8, 1212: 8, 1227: 8, 1228: 8, 1235: 8, 1237: 8, 1279: 8, 1552: 8, 1553: 8, 1555: 8, 1556: 8, 1557: 8, 1561: 8, 1562: 8, 1568: 8, 1569: 8, 1570: 8, 1571: 8, 1572: 8, 1584: 8, 1589: 8, 1592: 8, 1593: 8, 1595: 8, 1656: 8, 1664: 8, 1666: 8, 1667: 8, 1728: 8, 1745: 8, 1779: 8, 1904: 8, 1912: 8, 1990: 8, 1998: 8 }, # XLE AWD 2018 { 36: 8, 37: 8, 114: 5, 119: 6, 120: 4, 170: 8, 180: 8, 186: 4, 238: 4, 426: 6, 452: 8, 464: 8, 466: 8, 467: 8, 544: 4, 545: 5, 548: 8, 550: 8, 552: 4, 562: 4, 608: 8, 610: 5, 643: 7, 705: 8, 725: 2, 740: 5, 764: 8, 767: 4, 800: 8, 824: 8, 835: 8, 836: 8, 849: 4, 869: 7, 870: 7, 871: 2, 896: 8, 900: 6, 902: 6, 905: 8, 911: 8, 916: 1, 921: 8, 933: 8, 944: 6, 945: 8, 951: 8, 955: 8, 956: 8, 979: 2, 992: 8, 998: 5, 999: 7, 1000: 8, 1001: 8, 1002: 8, 1008: 2, 1014: 8, 1017: 8, 1041: 8, 1042: 8, 1043: 8, 1056: 8, 1059: 1, 1076: 8, 1077: 8, 1114: 8, 1160: 8, 1161: 8, 1162: 8, 1163: 8, 1164: 8, 1165: 8, 1166: 8, 1167: 8, 1176: 8, 1177: 8, 1178: 8, 1179: 8, 1180: 8, 1181: 8, 1182: 8, 1183: 8, 1191: 8, 1192: 8, 1196: 8, 1197: 8, 1198: 8, 1199: 8, 1200: 8, 1201: 8, 1202: 8, 1203: 8, 1212: 8, 1227: 8, 1235: 8, 1237: 8, 1279: 8, 1552: 8, 1553: 8, 1555: 8, 1556: 8, 1557: 8, 1561: 8, 1562: 8, 1568: 8, 1569: 8, 1570: 8, 1571: 8, 1572: 8, 1584: 8, 1589: 8, 1592: 8, 1593: 8, 1595: 8, 1656: 8, 1664: 8, 1666: 8, 1667: 8, 1728: 8, 1745: 8, 1779: 8, 1904: 8, 1912: 8, 1990: 8, 1998: 8 }], } FW_VERSIONS = { CAR.AVALON: { (Ecu.esp, 0x7b0, None): [ b'F152607060\x00\x00\x00\x00\x00\x00', ], (Ecu.dsu, 0x791, None): [ b'881510701300\x00\x00\x00\x00', b'881510705100\x00\x00\x00\x00', b'881510705200\x00\x00\x00\x00', ], (Ecu.eps, 0x7a1, None): [ b'8965B41051\x00\x00\x00\x00\x00\x00', ], (Ecu.engine, 0x7e0, None): [ b'\x0230721100\x00\x00\x00\x00\x00\x00\x00\x00A0C01000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x0230721200\x00\x00\x00\x00\x00\x00\x00\x00A0C01000\x00\x00\x00\x00\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ b'8821F4702000\x00\x00\x00\x00', b'8821F4702100\x00\x00\x00\x00', ], (Ecu.fwdCamera, 0x750, 0x6d): [ b'8646F0701100\x00\x00\x00\x00', b'8646F0703000\x00\x00\x00\x00', ], }, CAR.AVALON_2019: { (Ecu.esp, 0x7b0, None): [ b'F152607140\x00\x00\x00\x00\x00\x00', b'F152607171\x00\x00\x00\x00\x00\x00', b'F152607110\x00\x00\x00\x00\x00\x00', b'F152607180\x00\x00\x00\x00\x00\x00', ], (Ecu.dsu, 0x791, None): [ b'881510703200\x00\x00\x00\x00', ], (Ecu.eps, 0x7a1, None): [ b'8965B41080\x00\x00\x00\x00\x00\x00', b'8965B07010\x00\x00\x00\x00\x00\x00', b'8965B41090\x00\x00\x00\x00\x00\x00', ], (Ecu.engine, 0x700, None): [ b'\x01896630735100\x00\x00\x00\x00', b'\x01896630725300\x00\x00\x00\x00', b'\x01896630738000\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ b'8821F4702300\x00\x00\x00\x00', ], (Ecu.fwdCamera, 0x750, 0x6d): [ b'8646F0702100\x00\x00\x00\x00', ], }, CAR.AVALONH_2019: { (Ecu.esp, 0x7b0, None): [ b'F152641040\x00\x00\x00\x00\x00\x00', b'F152641061\x00\x00\x00\x00\x00\x00', b'F152641050\x00\x00\x00\x00\x00\x00', ], (Ecu.dsu, 0x791, None): [ b'881510704200\x00\x00\x00\x00', b'881514107100\x00\x00\x00\x00', ], (Ecu.eps, 0x7a1, None): [ b'8965B07010\x00\x00\x00\x00\x00\x00', b'8965B41090\x00\x00\x00\x00\x00\x00', b'8965B41070\x00\x00\x00\x00\x00\x00', ], (Ecu.engine, 0x700, None): [ b'\x02896630724000\x00\x00\x00\x00897CF3302002\x00\x00\x00\x00', b'\x02896630737000\x00\x00\x00\x00897CF3305001\x00\x00\x00\x00', b'\x02896630728000\x00\x00\x00\x00897CF3302002\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ b'8821F4702300\x00\x00\x00\x00', ], (Ecu.fwdCamera, 0x750, 0x6d): [ b'8646F0702100\x00\x00\x00\x00', ], }, CAR.CAMRY: { (Ecu.engine, 0x700, None): [ b'\x018966306L3100\x00\x00\x00\x00', b'\x018966306L4200\x00\x00\x00\x00', b'\x018966306L5200\x00\x00\x00\x00', b'\x018966306P8000\x00\x00\x00\x00', b'\x018966306Q3100\x00\x00\x00\x00', b'\x018966306Q4000\x00\x00\x00\x00', b'\x018966306Q4100\x00\x00\x00\x00', b'\x018966306Q4200\x00\x00\x00\x00', b'\x018966333Q9200\x00\x00\x00\x00', b'\x018966333P3100\x00\x00\x00\x00', b'\x018966333P3200\x00\x00\x00\x00', b'\x018966333P4200\x00\x00\x00\x00', b'\x018966333P4300\x00\x00\x00\x00', b'\x018966333P4400\x00\x00\x00\x00', b'\x018966333P4500\x00\x00\x00\x00', b'\x018966333P4700\x00\x00\x00\x00', b'\x018966333Q6000\x00\x00\x00\x00', b'\x018966333Q6200\x00\x00\x00\x00', b'\x018966333Q6300\x00\x00\x00\x00', b'\x018966333W6000\x00\x00\x00\x00', ], (Ecu.engine, 0x7e0, None): [ b'\x02333P1100\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00', ], (Ecu.dsu, 0x791, None): [ b'8821F0601200 ', b'8821F0601300 ', b'8821F0602000 ', b'8821F0603300 ', b'8821F0604100 ', b'8821F0605200 ', b'8821F0607200 ', b'8821F0608000 ', b'8821F0608200 ', b'8821F0609100 ', ], (Ecu.esp, 0x7b0, None): [ b'F152606210\x00\x00\x00\x00\x00\x00', b'F152606230\x00\x00\x00\x00\x00\x00', b'F152606270\x00\x00\x00\x00\x00\x00', b'F152606290\x00\x00\x00\x00\x00\x00', b'F152606410\x00\x00\x00\x00\x00\x00', b'F152633540\x00\x00\x00\x00\x00\x00', b'F152633A10\x00\x00\x00\x00\x00\x00', b'F152633A20\x00\x00\x00\x00\x00\x00', ], (Ecu.eps, 0x7a1, None): [ b'8965B33540\x00\x00\x00\x00\x00\x00', b'8965B33542\x00\x00\x00\x00\x00\x00', b'8965B33580\x00\x00\x00\x00\x00\x00', b'8965B33581\x00\x00\x00\x00\x00\x00', b'8965B33621\x00\x00\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ # Same as 0x791 b'8821F0601200 ', b'8821F0601300 ', b'8821F0602000 ', b'8821F0603300 ', b'8821F0604100 ', b'8821F0605200 ', b'8821F0607200 ', b'8821F0608000 ', b'8821F0608200 ', b'8821F0609100 ', ], (Ecu.fwdCamera, 0x750, 0x6d): [ b'8646F0601200 ', b'8646F0601300 ', b'8646F0601400 ', b'8646F0603400 ', b'8646F0604100 ', b'8646F0605000 ', b'8646F0606000 ', b'8646F0606100 ', b'8646F0607100 ', ], }, CAR.CAMRYH: { (Ecu.engine, 0x700, None): [ b'\x018966306Q6000\x00\x00\x00\x00', b'\x018966333N1100\x00\x00\x00\x00', b'\x018966333N4300\x00\x00\x00\x00', b'\x018966333X0000\x00\x00\x00\x00', b'\x018966333X4000\x00\x00\x00\x00', b'\x01896633T16000\x00\x00\x00\x00', b'\x028966306B2100\x00\x00\x00\x00897CF3302002\x00\x00\x00\x00', b'\x028966306B2300\x00\x00\x00\x00897CF3302002\x00\x00\x00\x00', b'\x028966306N8100\x00\x00\x00\x00897CF3302002\x00\x00\x00\x00', b'\x028966306N8200\x00\x00\x00\x00897CF3302002\x00\x00\x00\x00', b'\x028966306N8300\x00\x00\x00\x00897CF3302002\x00\x00\x00\x00', b'\x028966306N8400\x00\x00\x00\x00897CF3302002\x00\x00\x00\x00', b'\x028966306R5000\x00\x00\x00\x00897CF3302002\x00\x00\x00\x00', b'\x028966306R5000\x00\x00\x00\x00897CF3305001\x00\x00\x00\x00', b'\x028966306R6000\x00\x00\x00\x00897CF3302002\x00\x00\x00\x00', b'\x028966306R6000\x00\x00\x00\x00897CF3305001\x00\x00\x00\x00', b'\x028966306S0000\x00\x00\x00\x00897CF3305001\x00\x00\x00\x00', b'\x028966306S0100\x00\x00\x00\x00897CF3305001\x00\x00\x00\x00', b'\x028966306S1100\x00\x00\x00\x00897CF3305001\x00\x00\x00\x00', ], (Ecu.esp, 0x7b0, None): [ b'F152633214\x00\x00\x00\x00\x00\x00', b'F152633660\x00\x00\x00\x00\x00\x00', b'F152633712\x00\x00\x00\x00\x00\x00', b'F152633713\x00\x00\x00\x00\x00\x00', b'F152633B51\x00\x00\x00\x00\x00\x00', b'F152633B60\x00\x00\x00\x00\x00\x00', ], (Ecu.dsu, 0x791, None): [ b'8821F0601200 ', b'8821F0601300 ', b'8821F0603400 ', b'8821F0604000 ', b'8821F0604200 ', b'8821F0605200 ', b'8821F0606200 ', b'8821F0607200 ', b'8821F0608000 ', b'8821F0608200 ', b'8821F0609000 ', b'8821F0609100 ', ], (Ecu.eps, 0x7a1, None): [ b'8965B33540\x00\x00\x00\x00\x00\x00', b'8965B33542\x00\x00\x00\x00\x00\x00', b'8965B33550\x00\x00\x00\x00\x00\x00', b'8965B33551\x00\x00\x00\x00\x00\x00', b'8965B33580\x00\x00\x00\x00\x00\x00', b'8965B33581\x00\x00\x00\x00\x00\x00', b'8965B33611\x00\x00\x00\x00\x00\x00', b'8965B33621\x00\x00\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ # Same as 0x791 b'8821F0601200 ', b'8821F0601300 ', b'8821F0603400 ', b'8821F0604000 ', b'8821F0604200 ', b'8821F0605200 ', b'8821F0606200 ', b'8821F0607200 ', b'8821F0608000 ', b'8821F0608200 ', b'8821F0609000 ', b'8821F0609100 ', ], (Ecu.fwdCamera, 0x750, 0x6d): [ b'8646F0601200 ', b'8646F0601300 ', b'8646F0601400 ', b'8646F0603400 ', b'8646F0603500 ', b'8646F0604100 ', b'8646F0605000 ', b'8646F0606000 ', b'8646F0606100 ', b'8646F0607000 ', b'8646F0607100 ', ], }, CAR.CAMRY_TSS2: { (Ecu.eps, 0x7a1, None): [ b'8965B33630\x00\x00\x00\x00\x00\x00', ], (Ecu.esp, 0x7b0, None): [ b'\x01F152606370\x00\x00\x00\x00\x00\x00', b'\x01F152606390\x00\x00\x00\x00\x00\x00', b'\x01F152606400\x00\x00\x00\x00\x00\x00', ], (Ecu.engine, 0x700, None): [ b'\x018966306Q5000\x00\x00\x00\x00', b'\x018966306T3100\x00\x00\x00\x00', b'\x018966306T3200\x00\x00\x00\x00', b'\x018966306T4100\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 15): [ b'\x018821F6201200\x00\x00\x00\x00', ], (Ecu.fwdCamera, 0x750, 109): [ b'\x028646F3305200\x00\x00\x00\x008646G5301200\x00\x00\x00\x00', b'\x028646F3305300\x00\x00\x00\x008646G5301200\x00\x00\x00\x00', ], }, CAR.CAMRYH_TSS2: { (Ecu.eps, 0x7a1, None): [ b'8965B33630\x00\x00\x00\x00\x00\x00', ], (Ecu.esp, 0x7b0, None): [ b'F152633D00\x00\x00\x00\x00\x00\x00', ], (Ecu.engine, 0x700, None): [ b'\x018966306Q6000\x00\x00\x00\x00', b'\x018966306Q7000\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 15): [ b'\x018821F6201200\x00\x00\x00\x00', ], (Ecu.fwdCamera, 0x750, 109): [ b'\x028646F3305200\x00\x00\x00\x008646G5301200\x00\x00\x00\x00', b'\x028646F3305300\x00\x00\x00\x008646G5301200\x00\x00\x00\x00', ], }, CAR.CHR: { (Ecu.engine, 0x700, None): [ b'\x01896631017100\x00\x00\x00\x00', b'\x01896631017200\x00\x00\x00\x00', b'\x0189663F413100\x00\x00\x00\x00', b'\x0189663F414100\x00\x00\x00\x00', ], (Ecu.dsu, 0x791, None): [ b'8821F0W01000 ', b'8821F0W01100 ', b'8821FF401600 ', b'8821FF404000 ', b'8821FF404100 ', b'8821FF405100 ', b'8821FF406000 ', b'8821FF407100 ', ], (Ecu.esp, 0x7b0, None): [ b'F152610020\x00\x00\x00\x00\x00\x00', b'F152610153\x00\x00\x00\x00\x00\x00', b'F152610210\x00\x00\x00\x00\x00\x00', b'F1526F4034\x00\x00\x00\x00\x00\x00', b'F1526F4044\x00\x00\x00\x00\x00\x00', b'F1526F4073\x00\x00\x00\x00\x00\x00', b'F1526F4121\x00\x00\x00\x00\x00\x00', b'F1526F4122\x00\x00\x00\x00\x00\x00', ], (Ecu.eps, 0x7a1, None): [ b'8965B10011\x00\x00\x00\x00\x00\x00', b'8965B10040\x00\x00\x00\x00\x00\x00', b'8965B10070\x00\x00\x00\x00\x00\x00', ], (Ecu.engine, 0x7e0, None): [ b'\x0331024000\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00895231203202\x00\x00\x00\x00', b'\x0331024000\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00895231203302\x00\x00\x00\x00', b'\x0331036000\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00895231203302\x00\x00\x00\x00', b'\x033F401100\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00895231203102\x00\x00\x00\x00', b'\x033F424000\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00895231203202\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ b'8821F0W01000 ', b'8821FF401600 ', b'8821FF404000 ', b'8821FF404100 ', b'8821FF405100 ', b'8821FF406000 ', b'8821FF407100 ', b'8821F0W01100 ', ], (Ecu.fwdCamera, 0x750, 0x6d): [ b'8646FF401800 ', b'8646FF404000 ', b'8646FF406000 ', b'8646FF407000 ', ], }, CAR.CHRH: { (Ecu.engine, 0x700, None): [ b'\x02896631013200\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x0289663F405000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x0289663F418000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x0289663F423000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x0289663F431000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x0189663F438000\x00\x00\x00\x00', ], (Ecu.esp, 0x7b0, None): [ b'F152610013\x00\x00\x00\x00\x00\x00', b'F152610014\x00\x00\x00\x00\x00\x00', b'F152610040\x00\x00\x00\x00\x00\x00', b'F152610190\x00\x00\x00\x00\x00\x00', b'F152610200\x00\x00\x00\x00\x00\x00', b'F152610230\x00\x00\x00\x00\x00\x00', ], (Ecu.dsu, 0x791, None): [ b'8821F0W01000 ', b'8821FF402300 ', b'8821FF402400 ', b'8821FF404000 ', b'8821FF406000 ', b'8821FF407100 ', ], (Ecu.eps, 0x7a1, None): [ b'8965B10011\x00\x00\x00\x00\x00\x00', b'8965B10020\x00\x00\x00\x00\x00\x00', b'8965B10040\x00\x00\x00\x00\x00\x00', b'8965B10050\x00\x00\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ b'8821F0W01000 ', b'8821FF402300 ', b'8821FF402400 ', b'8821FF404000 ', b'8821FF406000 ', b'8821FF407100 ', ], (Ecu.fwdCamera, 0x750, 0x6d): [ b'8646FF402100 ', b'8646FF404000 ', b'8646FF406000 ', b'8646FF407000 ', ], }, CAR.COROLLA: { (Ecu.engine, 0x7e0, None): [ b'\x0230ZC2000\x00\x00\x00\x00\x00\x00\x00\x0050212000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x0230ZC2100\x00\x00\x00\x00\x00\x00\x00\x0050212000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x0230ZC2200\x00\x00\x00\x00\x00\x00\x00\x0050212000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x0230ZC2300\x00\x00\x00\x00\x00\x00\x00\x0050212000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x0230ZC3000\x00\x00\x00\x00\x00\x00\x00\x0050212000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x0230ZC3100\x00\x00\x00\x00\x00\x00\x00\x0050212000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x0230ZC3200\x00\x00\x00\x00\x00\x00\x00\x0050212000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x0230ZC3300\x00\x00\x00\x00\x00\x00\x00\x0050212000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x0330ZC1200\x00\x00\x00\x00\x00\x00\x00\x0050212000\x00\x00\x00\x00\x00\x00\x00\x00895231203202\x00\x00\x00\x00', ], (Ecu.dsu, 0x791, None): [ b'881510201100\x00\x00\x00\x00', b'881510201200\x00\x00\x00\x00', ], (Ecu.esp, 0x7b0, None): [ b'F152602190\x00\x00\x00\x00\x00\x00', b'F152602191\x00\x00\x00\x00\x00\x00', ], (Ecu.eps, 0x7a1, None): [ b'8965B02181\x00\x00\x00\x00\x00\x00', b'8965B02191\x00\x00\x00\x00\x00\x00', b'8965B48150\x00\x00\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ b'8821F4702100\x00\x00\x00\x00', b'8821F4702300\x00\x00\x00\x00', ], (Ecu.fwdCamera, 0x750, 0x6d): [ b'8646F0201101\x00\x00\x00\x00', b'8646F0201200\x00\x00\x00\x00', ], }, CAR.COROLLA_TSS2: { (Ecu.engine, 0x700, None): [ b'\x01896630ZG2000\x00\x00\x00\x00', b'\x01896630ZG5000\x00\x00\x00\x00', b'\x01896630ZG5100\x00\x00\x00\x00', b'\x01896630ZG5200\x00\x00\x00\x00', b'\x01896630ZG5300\x00\x00\x00\x00', b'\x01896630ZP2000\x00\x00\x00\x00', b'\x01896630ZQ5000\x00\x00\x00\x00', b'\x018966312L8000\x00\x00\x00\x00', b'\x018966312M0000\x00\x00\x00\x00', b'\x018966312M9000\x00\x00\x00\x00', b'\x018966312P9000\x00\x00\x00\x00', b'\x018966312P9100\x00\x00\x00\x00', b'\x018966312P9200\x00\x00\x00\x00', b'\x018966312P9300\x00\x00\x00\x00', b'\x018966312Q2300\x00\x00\x00\x00', b'\x018966312R0000\x00\x00\x00\x00', b'\x018966312R0100\x00\x00\x00\x00', b'\x018966312R1000\x00\x00\x00\x00', b'\x018966312R1100\x00\x00\x00\x00', b'\x018966312R3100\x00\x00\x00\x00', b'\x018966312S5000\x00\x00\x00\x00', b'\x018966312S7000\x00\x00\x00\x00', b'\x018966312W3000\x00\x00\x00\x00', b'\x018966312W9000\x00\x00\x00\x00', ], (Ecu.engine, 0x7e0, None): [ b'\x0230ZN4000\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x03312M3000\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00895231203402\x00\x00\x00\x00', b'\x03312N6000\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00895231203202\x00\x00\x00\x00', b'\x03312N6000\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00895231203302\x00\x00\x00\x00', b'\x03312N6000\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00895231203402\x00\x00\x00\x00', b'\x03312N6100\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00895231203302\x00\x00\x00\x00', b'\x03312N6100\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00895231203402\x00\x00\x00\x00', ], (Ecu.eps, 0x7a1, None): [ b'\x018965B12350\x00\x00\x00\x00\x00\x00', b'\x018965B12470\x00\x00\x00\x00\x00\x00', b'\x018965B12490\x00\x00\x00\x00\x00\x00', b'\x018965B12500\x00\x00\x00\x00\x00\x00', b'\x018965B12520\x00\x00\x00\x00\x00\x00', b'\x018965B12530\x00\x00\x00\x00\x00\x00', b'\x018965B1255000\x00\x00\x00\x00', b'8965B12361\x00\x00\x00\x00\x00\x00', ], (Ecu.esp, 0x7b0, None): [ b'\x01F152602280\x00\x00\x00\x00\x00\x00', b'\x01F152602560\x00\x00\x00\x00\x00\x00', b'\x01F152602590\x00\x00\x00\x00\x00\x00', b'\x01F152602650\x00\x00\x00\x00\x00\x00', b'\x01F152612641\x00\x00\x00\x00\x00\x00', b'\x01F152612651\x00\x00\x00\x00\x00\x00', b'\x01F152612B10\x00\x00\x00\x00\x00\x00', b'\x01F152612B51\x00\x00\x00\x00\x00\x00', b'\x01F152612B60\x00\x00\x00\x00\x00\x00', b'\x01F152612B61\x00\x00\x00\x00\x00\x00', b'\x01F152612B90\x00\x00\x00\x00\x00\x00', b'\x01F152612C00\x00\x00\x00\x00\x00\x00', b'F152602191\x00\x00\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ b'\x018821F3301100\x00\x00\x00\x00', b'\x018821F3301200\x00\x00\x00\x00', b'\x018821F3301300\x00\x00\x00\x00', b'\x018821F3301400\x00\x00\x00\x00', ], (Ecu.fwdCamera, 0x750, 0x6d): [ b'\x028646F12010D0\x00\x00\x00\x008646G26011A0\x00\x00\x00\x00', b'\x028646F1201100\x00\x00\x00\x008646G26011A0\x00\x00\x00\x00', b'\x028646F1201200\x00\x00\x00\x008646G26011A0\x00\x00\x00\x00', b'\x028646F1201300\x00\x00\x00\x008646G2601400\x00\x00\x00\x00', b'\x028646F1201400\x00\x00\x00\x008646G2601500\x00\x00\x00\x00', b'\x028646F1202000\x00\x00\x00\x008646G2601200\x00\x00\x00\x00', b'\x028646F1202100\x00\x00\x00\x008646G2601400\x00\x00\x00\x00', b'\x028646F1202200\x00\x00\x00\x008646G2601500\x00\x00\x00\x00', ], }, CAR.COROLLAH_TSS2: { (Ecu.engine, 0x700, None): [ b'\x01896630ZJ1000\x00\x00\x00\x00', b'\x01896630ZU8000\x00\x00\x00\x00', b'\x01896637621000\x00\x00\x00\x00', b'\x01896637624000\x00\x00\x00\x00', b'\x01896637626000\x00\x00\x00\x00', b'\x01896637648000\x00\x00\x00\x00', b'\x02896630ZJ5000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x02896630ZN8000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x02896630ZQ3000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x02896630ZR2000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x02896630ZT8000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x02896630ZT9000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x028966312Q3000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x028966312Q4000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x038966312L7000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF1205001\x00\x00\x00\x00', b'\x038966312N1000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF1203001\x00\x00\x00\x00', ], (Ecu.eps, 0x7a1, None): [ b'8965B12361\x00\x00\x00\x00\x00\x00', b'8965B12451\x00\x00\x00\x00\x00\x00', b'8965B16011\x00\x00\x00\x00\x00\x00', b'8965B76012\x00\x00\x00\x00\x00\x00', b'8965B76050\x00\x00\x00\x00\x00\x00', b'\x018965B12350\x00\x00\x00\x00\x00\x00', b'\x018965B12470\x00\x00\x00\x00\x00\x00', b'\x018965B12490\x00\x00\x00\x00\x00\x00', b'\x018965B12500\x00\x00\x00\x00\x00\x00', b'\x018965B12520\x00\x00\x00\x00\x00\x00', b'\x018965B12530\x00\x00\x00\x00\x00\x00', ], (Ecu.esp, 0x7b0, None): [ b'F152612590\x00\x00\x00\x00\x00\x00', b'F152612691\x00\x00\x00\x00\x00\x00', b'F152612692\x00\x00\x00\x00\x00\x00', b'F152612700\x00\x00\x00\x00\x00\x00', b'F152612710\x00\x00\x00\x00\x00\x00', b'F152612790\x00\x00\x00\x00\x00\x00', b'F152612800\x00\x00\x00\x00\x00\x00', b'F152612820\x00\x00\x00\x00\x00\x00', b'F152612840\x00\x00\x00\x00\x00\x00', b'F152612A00\x00\x00\x00\x00\x00\x00', b'F152612A10\x00\x00\x00\x00\x00\x00', b'F152616011\x00\x00\x00\x00\x00\x00', b'F152642540\x00\x00\x00\x00\x00\x00', b'F152676293\x00\x00\x00\x00\x00\x00', b'F152676303\x00\x00\x00\x00\x00\x00', b'F152676304\x00\x00\x00\x00\x00\x00', b'F152612D00\x00\x00\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ b'\x018821F3301100\x00\x00\x00\x00', b'\x018821F3301200\x00\x00\x00\x00', b'\x018821F3301300\x00\x00\x00\x00', b'\x018821F3301400\x00\x00\x00\x00', ], (Ecu.fwdCamera, 0x750, 0x6d): [ b'\x028646F12010D0\x00\x00\x00\x008646G26011A0\x00\x00\x00\x00', b'\x028646F1201100\x00\x00\x00\x008646G26011A0\x00\x00\x00\x00', b'\x028646F1201300\x00\x00\x00\x008646G2601400\x00\x00\x00\x00', b'\x028646F1201400\x00\x00\x00\x008646G2601500\x00\x00\x00\x00', b'\x028646F1202000\x00\x00\x00\x008646G2601200\x00\x00\x00\x00', b'\x028646F1202100\x00\x00\x00\x008646G2601400\x00\x00\x00\x00', b'\x028646F1601100\x00\x00\x00\x008646G2601400\x00\x00\x00\x00', b'\x028646F4203400\x00\x00\x00\x008646G2601200\x00\x00\x00\x00', b'\x028646F76020C0\x00\x00\x00\x008646G26011A0\x00\x00\x00\x00', b'\x028646F7603100\x00\x00\x00\x008646G2601200\x00\x00\x00\x00', b'\x028646F7603200\x00\x00\x00\x008646G2601400\x00\x00\x00\x00', ], }, CAR.HIGHLANDER: { (Ecu.engine, 0x700, None): [ b'\x01896630E09000\x00\x00\x00\x00', b'\x01896630E43000\x00\x00\x00\x00', b'\x01896630E43100\x00\x00\x00\x00', b'\x01896630E43200\x00\x00\x00\x00', b'\x01896630E44200\x00\x00\x00\x00', b'\x01896630E45000\x00\x00\x00\x00', b'\x01896630E45100\x00\x00\x00\x00', b'\x01896630E45200\x00\x00\x00\x00', b'\x01896630E46200\x00\x00\x00\x00', b'\x01896630E74000\x00\x00\x00\x00', b'\x01896630E75000\x00\x00\x00\x00', b'\x01896630E76000\x00\x00\x00\x00', b'\x01896630E77000\x00\x00\x00\x00', b'\x01896630E83000\x00\x00\x00\x00', b'\x01896630E84000\x00\x00\x00\x00', b'\x01896630E85000\x00\x00\x00\x00', b'\x01896630E86000\x00\x00\x00\x00', b'\x01896630E88000\x00\x00\x00\x00', b'\x01896630EA0000\x00\x00\x00\x00', ], (Ecu.eps, 0x7a1, None): [ b'8965B48140\x00\x00\x00\x00\x00\x00', b'8965B48150\x00\x00\x00\x00\x00\x00', b'8965B48210\x00\x00\x00\x00\x00\x00', ], (Ecu.esp, 0x7b0, None): [b'F15260E011\x00\x00\x00\x00\x00\x00'], (Ecu.dsu, 0x791, None): [ b'881510E01100\x00\x00\x00\x00', b'881510E01200\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ b'8821F4702100\x00\x00\x00\x00', b'8821F4702300\x00\x00\x00\x00', ], (Ecu.fwdCamera, 0x750, 0x6d): [ b'8646F0E01200\x00\x00\x00\x00', b'8646F0E01300\x00\x00\x00\x00', ], }, CAR.HIGHLANDERH: { (Ecu.eps, 0x7a1, None): [ b'8965B48160\x00\x00\x00\x00\x00\x00', ], (Ecu.esp, 0x7b0, None): [ b'F152648541\x00\x00\x00\x00\x00\x00', b'F152648542\x00\x00\x00\x00\x00\x00', ], (Ecu.engine, 0x7e0, None): [ b'\x0230E40000\x00\x00\x00\x00\x00\x00\x00\x00A4802000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x0230E40100\x00\x00\x00\x00\x00\x00\x00\x00A4802000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x0230EA2000\x00\x00\x00\x00\x00\x00\x00\x00A4802000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x0230EA2100\x00\x00\x00\x00\x00\x00\x00\x00A4802000\x00\x00\x00\x00\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ b'8821F4702100\x00\x00\x00\x00', b'8821F4702300\x00\x00\x00\x00', ], (Ecu.fwdCamera, 0x750, 0x6d): [ b'8646F0E01200\x00\x00\x00\x00', b'8646F0E01300\x00\x00\x00\x00', ], }, CAR.HIGHLANDER_TSS2: { (Ecu.eps, 0x7a1, None): [ b'8965B48241\x00\x00\x00\x00\x00\x00', b'8965B48310\x00\x00\x00\x00\x00\x00', ], (Ecu.esp, 0x7b0, None): [ b'\x01F15260E051\x00\x00\x00\x00\x00\x00', b'\x01F15260E110\x00\x00\x00\x00\x00\x00', ], (Ecu.engine, 0x700, None): [ b'\x01896630E62100\x00\x00\x00\x00', b'\x01896630E62200\x00\x00\x00\x00', b'\x01896630E64100\x00\x00\x00\x00', b'\x01896630E64200\x00\x00\x00\x00', b'\x01896630EB1000\x00\x00\x00\x00', b'\x01896630EB1100\x00\x00\x00\x00', b'\x01896630EB1200\x00\x00\x00\x00', b'\x01896630EB2000\x00\x00\x00\x00', b'\x01896630EB2100\x00\x00\x00\x00', b'\x01896630EB2200\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ b'\x018821F3301400\x00\x00\x00\x00', b'\x018821F6201200\x00\x00\x00\x00', ], (Ecu.fwdCamera, 0x750, 0x6d): [ b'\x028646F0E02100\x00\x00\x00\x008646G2601200\x00\x00\x00\x00', b'\x028646F4803000\x00\x00\x00\x008646G5301200\x00\x00\x00\x00', ], }, CAR.HIGHLANDERH_TSS2: { (Ecu.eps, 0x7a1, None): [ b'8965B48241\x00\x00\x00\x00\x00\x00', b'8965B48310\x00\x00\x00\x00\x00\x00', ], (Ecu.esp, 0x7b0, None): [ b'\x01F15264872300\x00\x00\x00\x00', b'\x01F15264872400\x00\x00\x00\x00', b'\x01F15264872500\x00\x00\x00\x00', b'\x01F152648C6300\x00\x00\x00\x00', ], (Ecu.engine, 0x700, None): [ b'\x01896630EA1000\000\000\000\000', b'\x01896630EA1000\x00\x00\x00\x00897CF4801001\x00\x00\x00\x00', b'\x02896630E66000\x00\x00\x00\x00897CF4801001\x00\x00\x00\x00', b'\x02896630EB3000\x00\x00\x00\x00897CF4801001\x00\x00\x00\x00', b'\x02896630EB3100\x00\x00\x00\x00897CF4801001\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ b'\x018821F3301400\x00\x00\x00\x00', b'\x018821F6201200\x00\x00\x00\x00', ], (Ecu.fwdCamera, 0x750, 0x6d): [ b'\x028646F0E02100\x00\x00\x00\x008646G2601200\x00\x00\x00\x00', b'\x028646F4803000\x00\x00\x00\x008646G5301200\x00\x00\x00\x00', ], }, CAR.LEXUS_IS: { (Ecu.engine, 0x700, None): [ b'\x018966353M7100\x00\x00\x00\x00', b'\x018966353Q2000\x00\x00\x00\x00', b'\x018966353Q2300\x00\x00\x00\x00', b'\x018966353R1100\x00\x00\x00\x00', b'\x018966353R7100\x00\x00\x00\x00', b'\x018966353R8100\x00\x00\x00\x00', b'\x018966353Q4000\x00\x00\x00\x00', ], (Ecu.engine, 0x7e0, None): [ b'\x0232480000\x00\x00\x00\x00\x00\x00\x00\x00A4701000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x02353P7000\x00\x00\x00\x00\x00\x00\x00\x00530J5000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x02353P9000\x00\x00\x00\x00\x00\x00\x00\x00553C1000\x00\x00\x00\x00\x00\x00\x00\x00', ], (Ecu.esp, 0x7b0, None): [ b'F152653301\x00\x00\x00\x00\x00\x00', b'F152653310\x00\x00\x00\x00\x00\x00', b'F152653330\x00\x00\x00\x00\x00\x00', ], (Ecu.dsu, 0x791, None): [ b'881515306200\x00\x00\x00\x00', b'881515306400\x00\x00\x00\x00', b'881515306500\x00\x00\x00\x00', b'881515307400\x00\x00\x00\x00', ], (Ecu.eps, 0x7a1, None): [ b'8965B53270\x00\x00\x00\x00\x00\x00', b'8965B53271\x00\x00\x00\x00\x00\x00', b'8965B53280\x00\x00\x00\x00\x00\x00', b'8965B53281\x00\x00\x00\x00\x00\x00', b'8965B53311\x00\x00\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ b'8821F4702300\x00\x00\x00\x00', b'8821F4702100\x00\x00\x00\x00', ], (Ecu.fwdCamera, 0x750, 0x6d): [ b'8646F5301300\x00\x00\x00\x00', b'8646F5301400\x00\x00\x00\x00', b'8646F5301200\x00\x00\x00\x00', ], }, CAR.PRIUS: { (Ecu.engine, 0x700, None): [ b'\x02896634761000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x02896634761100\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x02896634761200\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x02896634762000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x02896634763000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x02896634763100\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x02896634765000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x02896634765100\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x02896634769100\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x02896634769200\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x02896634770000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x02896634774000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x02896634774100\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x02896634774200\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x02896634782000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x02896634784000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x028966347A0000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x028966347A5000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x028966347A8000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x028966347B0000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x03896634759100\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4701003\x00\x00\x00\x00', b'\x03896634759200\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4701003\x00\x00\x00\x00', b'\x03896634759200\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4701004\x00\x00\x00\x00', b'\x03896634759300\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4701004\x00\x00\x00\x00', b'\x03896634760000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4701002\x00\x00\x00\x00', b'\x03896634760000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4701003\x00\x00\x00\x00', b'\x03896634760000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4701004\x00\x00\x00\x00', b'\x03896634760100\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4701003\x00\x00\x00\x00', b'\x03896634760200\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4701003\x00\x00\x00\x00', b'\x03896634760200\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4701004\x00\x00\x00\x00', b'\x03896634760300\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4701004\x00\x00\x00\x00', b'\x03896634768000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4703001\x00\x00\x00\x00', b'\x03896634768000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4703002\x00\x00\x00\x00', b'\x03896634768100\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4703002\x00\x00\x00\x00', b'\x03896634785000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4705001\x00\x00\x00\x00', b'\x03896634785000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4710001\x00\x00\x00\x00', b'\x03896634786000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4705001\x00\x00\x00\x00', b'\x03896634786000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4710001\x00\x00\x00\x00', b'\x03896634789000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4703002\x00\x00\x00\x00', b'\x038966347A3000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4701003\x00\x00\x00\x00', b'\x038966347A3000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4707001\x00\x00\x00\x00', b'\x038966347B6000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4710001\x00\x00\x00\x00', b'\x038966347B7000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4710001\x00\x00\x00\x00', ], (Ecu.eps, 0x7a1, None): [ b'8965B47021\x00\x00\x00\x00\x00\x00', b'8965B47022\x00\x00\x00\x00\x00\x00', b'8965B47023\x00\x00\x00\x00\x00\x00', b'8965B47050\x00\x00\x00\x00\x00\x00', b'8965B47060\x00\x00\x00\x00\x00\x00', # This is the EPS with good angle sensor ], (Ecu.esp, 0x7b0, None): [ b'F152647290\x00\x00\x00\x00\x00\x00', b'F152647300\x00\x00\x00\x00\x00\x00', b'F152647310\x00\x00\x00\x00\x00\x00', b'F152647414\x00\x00\x00\x00\x00\x00', b'F152647415\x00\x00\x00\x00\x00\x00', b'F152647416\x00\x00\x00\x00\x00\x00', b'F152647417\x00\x00\x00\x00\x00\x00', b'F152647470\x00\x00\x00\x00\x00\x00', b'F152647490\x00\x00\x00\x00\x00\x00', b'F152647683\x00\x00\x00\x00\x00\x00', b'F152647684\x00\x00\x00\x00\x00\x00', b'F152647862\x00\x00\x00\x00\x00\x00', b'F152647863\x00\x00\x00\x00\x00\x00', b'F152647864\x00\x00\x00\x00\x00\x00', b'F152647865\x00\x00\x00\x00\x00\x00', ], (Ecu.dsu, 0x791, None): [ b'881514702300\x00\x00\x00\x00', b'881514703100\x00\x00\x00\x00', b'881514704100\x00\x00\x00\x00', b'881514706000\x00\x00\x00\x00', b'881514706100\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ b'8821F4702000\x00\x00\x00\x00', b'8821F4702100\x00\x00\x00\x00', b'8821F4702300\x00\x00\x00\x00', ], (Ecu.fwdCamera, 0x750, 0x6d): [ b'8646F4701300\x00\x00\x00\x00', b'8646F4702001\x00\x00\x00\x00', b'8646F4702100\x00\x00\x00\x00', b'8646F4702200\x00\x00\x00\x00', b'8646F4705000\x00\x00\x00\x00', b'8646F4705200\x00\x00\x00\x00', ], }, CAR.RAV4: { (Ecu.engine, 0x7e0, None): [ b'\x02342Q1000\x00\x00\x00\x00\x00\x00\x00\x0054212000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x02342Q1100\x00\x00\x00\x00\x00\x00\x00\x0054212000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x02342Q1200\x00\x00\x00\x00\x00\x00\x00\x0054212000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x02342Q1300\x00\x00\x00\x00\x00\x00\x00\x0054212000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x02342Q2000\x00\x00\x00\x00\x00\x00\x00\x0054213000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x02342Q2100\x00\x00\x00\x00\x00\x00\x00\x0054213000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x02342Q2200\x00\x00\x00\x00\x00\x00\x00\x0054213000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x02342Q4000\x00\x00\x00\x00\x00\x00\x00\x0054215000\x00\x00\x00\x00\x00\x00\x00\x00', ], (Ecu.eps, 0x7a1, None): [ b'8965B42063\x00\x00\x00\x00\x00\x00', b'8965B42073\x00\x00\x00\x00\x00\x00', b'8965B42082\x00\x00\x00\x00\x00\x00', b'8965B42083\x00\x00\x00\x00\x00\x00', ], (Ecu.esp, 0x7b0, None): [ b'F15260R102\x00\x00\x00\x00\x00\x00', b'F15260R103\x00\x00\x00\x00\x00\x00', b'F152642493\x00\x00\x00\x00\x00\x00', b'F152642492\x00\x00\x00\x00\x00\x00', ], (Ecu.dsu, 0x791, None): [ b'881514201200\x00\x00\x00\x00', b'881514201300\x00\x00\x00\x00', b'881514201400\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ b'8821F4702000\x00\x00\x00\x00', b'8821F4702100\x00\x00\x00\x00', b'8821F4702300\x00\x00\x00\x00', ], (Ecu.fwdCamera, 0x750, 0x6d): [ b'8646F4201200\x00\x00\x00\x00', b'8646F4202001\x00\x00\x00\x00', b'8646F4202100\x00\x00\x00\x00', b'8646F4204000\x00\x00\x00\x00', ], }, CAR.RAV4H: { (Ecu.engine, 0x7e0, None): [ b'\x02342N9000\x00\x00\x00\x00\x00\x00\x00\x00A4701000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x02342N9100\x00\x00\x00\x00\x00\x00\x00\x00A4701000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x02342P0000\x00\x00\x00\x00\x00\x00\x00\x00A4701000\x00\x00\x00\x00\x00\x00\x00\x00', ], (Ecu.eps, 0x7a1, None): [ b'8965B42102\x00\x00\x00\x00\x00\x00', b'8965B42103\x00\x00\x00\x00\x00\x00', b'8965B42112\x00\x00\x00\x00\x00\x00', b'8965B42162\x00\x00\x00\x00\x00\x00', b'8965B42163\x00\x00\x00\x00\x00\x00', ], (Ecu.esp, 0x7b0, None): [ b'F152642090\x00\x00\x00\x00\x00\x00', b'F152642110\x00\x00\x00\x00\x00\x00', b'F152642120\x00\x00\x00\x00\x00\x00', b'F152642400\x00\x00\x00\x00\x00\x00', ], (Ecu.dsu, 0x791, None): [ b'881514202200\x00\x00\x00\x00', b'881514202300\x00\x00\x00\x00', b'881514202400\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ b'8821F4702000\x00\x00\x00\x00', b'8821F4702100\x00\x00\x00\x00', b'8821F4702300\x00\x00\x00\x00', ], (Ecu.fwdCamera, 0x750, 0x6d): [ b'8646F4201100\x00\x00\x00\x00', b'8646F4201200\x00\x00\x00\x00', b'8646F4202001\x00\x00\x00\x00', b'8646F4202100\x00\x00\x00\x00', b'8646F4204000\x00\x00\x00\x00', ], }, CAR.RAV4_TSS2: { (Ecu.engine, 0x700, None): [ b'\x01896630R58000\x00\x00\x00\x00', b'\x01896630R58100\x00\x00\x00\x00', b'\x018966342E2000\x00\x00\x00\x00', b'\x018966342M8000\x00\x00\x00\x00', b'\x018966342S9000\x00\x00\x00\x00', b'\x018966342T1000\x00\x00\x00\x00', b'\x018966342T6000\x00\x00\x00\x00', b'\x018966342T9000\x00\x00\x00\x00', b'\x018966342U4000\x00\x00\x00\x00', b'\x018966342U4100\x00\x00\x00\x00', b'\x018966342V3000\x00\x00\x00\x00', b'\x018966342V3100\x00\x00\x00\x00', b'\x018966342V3200\x00\x00\x00\x00', b'\x01896634A05000\x00\x00\x00\x00', b'\x01896634A19000\x00\x00\x00\x00', b'\x01896634A19100\x00\x00\x00\x00', b'\x01896634A20000\x00\x00\x00\x00', b'\x01896634A20100\x00\x00\x00\x00', b'\x01896634A22000\x00\x00\x00\x00', b'\x01896634A22100\x00\x00\x00\x00', b'\x01896634A30000\x00\x00\x00\x00', b'\x01896634A44000\x00\x00\x00\x00', b'\x01896634A45000\x00\x00\x00\x00', b'\x01896634A46000\x00\x00\x00\x00', b'\x028966342M7000\x00\x00\x00\x00897CF1201001\x00\x00\x00\x00', b'\x028966342T0000\x00\x00\x00\x00897CF1201001\x00\x00\x00\x00', b'\x028966342V1000\x00\x00\x00\x00897CF1202001\x00\x00\x00\x00', b'\x028966342Y8000\x00\x00\x00\x00897CF1201001\x00\x00\x00\x00', b'\x02896634A18000\x00\x00\x00\x00897CF1201001\x00\x00\x00\x00', b'\x02896634A18100\x00\x00\x00\x00897CF1201001\x00\x00\x00\x00', b'\x02896634A43000\x00\x00\x00\x00897CF4201001\x00\x00\x00\x00', b'\x02896634A47000\x00\x00\x00\x00897CF4201001\x00\x00\x00\x00', ], (Ecu.esp, 0x7b0, None): [ b'\x01F15260R210\x00\x00\x00\x00\x00\x00', b'\x01F15260R220\x00\x00\x00\x00\x00\x00', b'\x01F15260R290\x00\x00\x00\x00\x00\x00', b'\x01F15260R300\x00\x00\x00\x00\x00\x00', b'\x01F152642551\x00\x00\x00\x00\x00\x00', b'\x01F152642561\x00\x00\x00\x00\x00\x00', b'\x01F152642700\x00\x00\x00\x00\x00\x00', b'\x01F152642701\x00\x00\x00\x00\x00\x00', b'\x01F152642710\x00\x00\x00\x00\x00\x00', b'\x01F152642711\x00\x00\x00\x00\x00\x00', b'\x01F152642750\x00\x00\x00\x00\x00\x00', b'\x01F152642751\x00\x00\x00\x00\x00\x00', ], (Ecu.eps, 0x7a1, None): [ b'8965B42170\x00\x00\x00\x00\x00\x00', b'8965B42171\x00\x00\x00\x00\x00\x00', b'8965B42180\x00\x00\x00\x00\x00\x00', b'8965B42181\x00\x00\x00\x00\x00\x00', b'\x028965B0R01200\x00\x00\x00\x008965B0R02200\x00\x00\x00\x00', b'\x028965B0R01300\x00\x00\x00\x008965B0R02300\x00\x00\x00\x00', b'\x028965B0R01400\x00\x00\x00\x008965B0R02400\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ b'\x018821F3301100\x00\x00\x00\x00', b'\x018821F3301200\x00\x00\x00\x00', b'\x018821F3301300\x00\x00\x00\x00', b'\x018821F3301400\x00\x00\x00\x00', ], (Ecu.fwdCamera, 0x750, 0x6d): [ b'\x028646F4203200\x00\x00\x00\x008646G26011A0\x00\x00\x00\x00', b'\x028646F4203300\x00\x00\x00\x008646G26011A0\x00\x00\x00\x00', b'\x028646F4203400\x00\x00\x00\x008646G2601200\x00\x00\x00\x00', b'\x028646F4203500\x00\x00\x00\x008646G2601200\x00\x00\x00\x00', b'\x028646F4203700\x00\x00\x00\x008646G2601400\x00\x00\x00\x00', b'\x028646F4203800\x00\x00\x00\x008646G2601500\x00\x00\x00\x00', ], }, CAR.RAV4H_TSS2: { (Ecu.engine, 0x700, None): [ b'\x01896634A15000\x00\x00\x00\x00', b'\x018966342M5000\x00\x00\x00\x00', b'\x018966342W8000\x00\x00\x00\x00', b'\x018966342X5000\x00\x00\x00\x00', b'\x018966342X6000\x00\x00\x00\x00', b'\x01896634A25000\x00\x00\x00\x00', b'\x018966342W5000\x00\x00\x00\x00', b'\x028966342W4001\x00\x00\x00\x00897CF1203001\x00\x00\x00\x00', b'\x02896634A13001\x00\x00\x00\x00897CF4801001\x00\x00\x00\x00', b'\x02896634A13101\x00\x00\x00\x00897CF4801001\x00\x00\x00\x00', b'\x02896634A14001\x00\x00\x00\x00897CF1203001\x00\x00\x00\x00', b'\x02896634A23000\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00', b'\x02896634A23001\x00\x00\x00\x00897CF1203001\x00\x00\x00\x00', b'\x02896634A14001\x00\x00\x00\x00897CF4801001\x00\x00\x00\x00', b'\x02896634A14101\x00\x00\x00\x00897CF4801001\x00\x00\x00\x00', ], (Ecu.esp, 0x7b0, None): [ b'F152642291\x00\x00\x00\x00\x00\x00', b'F152642290\x00\x00\x00\x00\x00\x00', b'F152642322\x00\x00\x00\x00\x00\x00', b'F152642330\x00\x00\x00\x00\x00\x00', b'F152642331\x00\x00\x00\x00\x00\x00', b'F152642531\x00\x00\x00\x00\x00\x00', b'F152642532\x00\x00\x00\x00\x00\x00', b'F152642520\x00\x00\x00\x00\x00\x00', b'F152642521\x00\x00\x00\x00\x00\x00', b'F152642540\x00\x00\x00\x00\x00\x00', b'F152642541\x00\x00\x00\x00\x00\x00', b'F152642542\x00\x00\x00\x00\x00\x00', ], (Ecu.eps, 0x7a1, None): [ b'8965B42170\x00\x00\x00\x00\x00\x00', b'8965B42171\x00\x00\x00\x00\x00\x00', b'8965B42180\x00\x00\x00\x00\x00\x00', b'8965B42181\x00\x00\x00\x00\x00\x00', b'\x028965B0R01200\x00\x00\x00\x008965B0R02200\x00\x00\x00\x00', b'\x028965B0R01300\x00\x00\x00\x008965B0R02300\x00\x00\x00\x00', b'\x028965B0R01400\x00\x00\x00\x008965B0R02400\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ b'\x018821F3301100\x00\x00\x00\x00', b'\x018821F3301200\x00\x00\x00\x00', b'\x018821F3301300\x00\x00\x00\x00', b'\x018821F3301400\x00\x00\x00\x00', ], (Ecu.fwdCamera, 0x750, 0x6d): [ b'\x028646F4203200\x00\x00\x00\x008646G26011A0\x00\x00\x00\x00', b'\x028646F4203300\x00\x00\x00\x008646G26011A0\x00\x00\x00\x00', b'\x028646F4203400\x00\x00\x00\x008646G2601200\x00\x00\x00\x00', b'\x028646F4203500\x00\x00\x00\x008646G2601200\x00\x00\x00\x00', b'\x028646F4203700\x00\x00\x00\x008646G2601400\x00\x00\x00\x00', b'\x028646F4203800\x00\x00\x00\x008646G2601500\x00\x00\x00\x00', ], }, CAR.LEXUS_ES_TSS2: { (Ecu.engine, 0x700, None): [ b'\x01896630EC9100\x00\x00\x00\x00', b'\x018966333T5000\x00\x00\x00\x00', b'\x018966333T5100\x00\x00\x00\x00', b'\x018966333X6000\x00\x00\x00\x00', ], (Ecu.esp, 0x7b0, None): [ b'\x01F152606281\x00\x00\x00\x00\x00\x00', b'\x01F152606340\x00\x00\x00\x00\x00\x00', b'\x01F15260E031\x00\x00\x00\x00\x00\x00', ], (Ecu.eps, 0x7a1, None): [ b'8965B33252\x00\x00\x00\x00\x00\x00', b'8965B33590\x00\x00\x00\x00\x00\x00', b'8965B48271\x00\x00\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ b'\x018821F3301100\x00\x00\x00\x00', b'\x018821F3301200\x00\x00\x00\x00', b'\x018821F3301400\x00\x00\x00\x00', ], (Ecu.fwdCamera, 0x750, 0x6d): [ b'\x028646F33030D0\x00\x00\x00\x008646G26011A0\x00\x00\x00\x00', b'\x028646F3303200\x00\x00\x00\x008646G26011A0\x00\x00\x00\x00', b'\x028646F3304100\x00\x00\x00\x008646G2601200\x00\x00\x00\x00', b'\x028646F4810200\x00\x00\x00\x008646G2601400\x00\x00\x00\x00', ], }, CAR.SIENNA: { (Ecu.engine, 0x700, None): [ b'\x01896630832100\x00\x00\x00\x00', b'\x01896630832200\x00\x00\x00\x00', b'\x01896630838000\x00\x00\x00\x00', b'\x01896630838100\x00\x00\x00\x00', b'\x01896630842000\x00\x00\x00\x00', b'\x01896630843000\x00\x00\x00\x00', b'\x01896630851000\x00\x00\x00\x00', b'\x01896630851100\x00\x00\x00\x00', b'\x01896630852100\x00\x00\x00\x00', b'\x01896630859000\x00\x00\x00\x00', b'\x01896630860000\x00\x00\x00\x00', ], (Ecu.eps, 0x7a1, None): [ b'8965B45070\x00\x00\x00\x00\x00\x00', b'8965B45080\x00\x00\x00\x00\x00\x00', b'8965B45082\x00\x00\x00\x00\x00\x00', ], (Ecu.esp, 0x7b0, None): [ b'F152608130\x00\x00\x00\x00\x00\x00', ], (Ecu.dsu, 0x791, None): [ b'881510801100\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ b'8821F4702100\x00\x00\x00\x00', b'8821F4702200\x00\x00\x00\x00', b'8821F4702300\x00\x00\x00\x00', ], (Ecu.fwdCamera, 0x750, 0x6d): [ b'8646F0801100\x00\x00\x00\x00', ], }, CAR.LEXUS_CTH: { (Ecu.dsu, 0x791, None): [ b'881517601100\x00\x00\x00\x00', ], (Ecu.esp, 0x7b0, None): [ b'F152676144\x00\x00\x00\x00\x00\x00', ], (Ecu.engine, 0x7e0, None): [ b'\x0237635000\x00\x00\x00\x00\x00\x00\x00\x00A4701000\x00\x00\x00\x00\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ b'8821F4702300\x00\x00\x00\x00', ], (Ecu.fwdCamera, 0x750, 0x6d): [ b'8646F7601100\x00\x00\x00\x00', ], }, CAR.LEXUS_ESH_TSS2: { (Ecu.engine, 0x700, None): [ b'\x028966333S8000\x00\x00\x00\x00897CF3302002\x00\x00\x00\x00', b'\x028966333T0100\x00\x00\x00\x00897CF3305001\x00\x00\x00\x00', b'\x028966333V4000\x00\x00\x00\x00897CF3305001\x00\x00\x00\x00', b'\x02896633T09000\x00\x00\x00\x00897CF3307001\x00\x00\x00\x00', ], (Ecu.esp, 0x7b0, None): [ b'F152633423\x00\x00\x00\x00\x00\x00', b'F152633680\x00\x00\x00\x00\x00\x00', b'F152633681\x00\x00\x00\x00\x00\x00', ], (Ecu.eps, 0x7a1, None): [ b'8965B33252\x00\x00\x00\x00\x00\x00', b'8965B33590\x00\x00\x00\x00\x00\x00', b'8965B33690\x00\x00\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ b'\x018821F3301100\x00\x00\x00\x00', b'\x018821F3301200\x00\x00\x00\x00', b'\x018821F3301300\x00\x00\x00\x00', b'\x018821F3301400\x00\x00\x00\x00', ], (Ecu.fwdCamera, 0x750, 0x6d): [ b'\x028646F33030D0\x00\x00\x00\x008646G26011A0\x00\x00\x00\x00', b'\x028646F3303100\x00\x00\x00\x008646G26011A0\x00\x00\x00\x00', b'\x028646F3304100\x00\x00\x00\x008646G2601200\x00\x00\x00\x00', b'\x028646F3304200\x00\x00\x00\x008646G2601400\x00\x00\x00\x00', ], }, CAR.LEXUS_ESH: { (Ecu.engine, 0x7e0, None): [ b'\x02333M4200\x00\x00\x00\x00\x00\x00\x00\x00A4701000\x00\x00\x00\x00\x00\x00\x00\x00', ], (Ecu.esp, 0x7b0, None): [ b'F152633171\x00\x00\x00\x00\x00\x00', ], (Ecu.dsu, 0x791, None): [ b'881513310400\x00\x00\x00\x00', ], (Ecu.eps, 0x7a1, None): [ b'8965B33512\x00\x00\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ b'8821F4701100\x00\x00\x00\x00', b'8821F4701300\x00\x00\x00\x00', ], (Ecu.fwdCamera, 0x750, 0x6d): [ b'8646F3302001\x00\x00\x00\x00', b'8646F3302200\x00\x00\x00\x00', ], }, CAR.LEXUS_NX: { (Ecu.engine, 0x700, None): [ b'\x01896637851000\x00\x00\x00\x00', b'\x01896637852000\x00\x00\x00\x00', b'\x01896637854000\x00\x00\x00\x00', ], (Ecu.esp, 0x7b0, None): [ b'F152678130\x00\x00\x00\x00\x00\x00', b'F152678140\x00\x00\x00\x00\x00\x00', ], (Ecu.dsu, 0x791, None): [ b'881517803100\x00\x00\x00\x00', ], (Ecu.eps, 0x7a1, None): [ b'8965B78060\x00\x00\x00\x00\x00\x00', b'8965B78080\x00\x00\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ b'8821F4702100\x00\x00\x00\x00', b'8821F4702300\x00\x00\x00\x00', ], (Ecu.fwdCamera, 0x750, 0x6d): [ b'8646F7801100\x00\x00\x00\x00', ], }, CAR.LEXUS_NX_TSS2: { (Ecu.engine, 0x700, None): [ b'\x018966378B2100\x00\x00\x00\x00', b'\x018966378G3000\x00\x00\x00\x00', ], (Ecu.esp, 0x7b0, None): [ b'\x01F152678221\x00\x00\x00\x00\x00\x00', ], (Ecu.eps, 0x7a1, None): [ b'8965B78120\x00\x00\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ b"\x018821F3301400\x00\x00\x00\x00", ], (Ecu.fwdCamera, 0x750, 0x6d): [ b'\x028646F78030A0\x00\x00\x00\x008646G2601200\x00\x00\x00\x00', b'\x028646F7803100\x00\x00\x00\x008646G2601400\x00\x00\x00\x00', ], }, CAR.LEXUS_NXH: { (Ecu.engine, 0x7e0, None): [ b'\x0237882000\x00\x00\x00\x00\x00\x00\x00\x00A4701000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x0237841000\x00\x00\x00\x00\x00\x00\x00\x00A4701000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x0237886000\x00\x00\x00\x00\x00\x00\x00\x00A4701000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x0237880000\x00\x00\x00\x00\x00\x00\x00\x00A4701000\x00\x00\x00\x00\x00\x00\x00\x00', ], (Ecu.esp, 0x7b0, None): [ b'F152678160\x00\x00\x00\x00\x00\x00', b'F152678170\x00\x00\x00\x00\x00\x00', b'F152678171\x00\x00\x00\x00\x00\x00', ], (Ecu.dsu, 0x791, None): [ b'881517804300\x00\x00\x00\x00', b'881517804100\x00\x00\x00\x00', ], (Ecu.eps, 0x7a1, None): [ b'8965B78060\x00\x00\x00\x00\x00\x00', b'8965B78080\x00\x00\x00\x00\x00\x00', b'8965B78100\x00\x00\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ b'8821F4702300\x00\x00\x00\x00', b'8821F4702100\x00\x00\x00\x00', ], (Ecu.fwdCamera, 0x750, 0x6d): [ b'8646F7801300\x00\x00\x00\x00', b'8646F7801100\x00\x00\x00\x00', ], }, CAR.LEXUS_RX: { (Ecu.engine, 0x700, None): [ b'\x01896630E36200\x00\x00\x00\x00', b'\x01896630E36300\x00\x00\x00\x00', b'\x01896630E37200\x00\x00\x00\x00', b'\x01896630E37300\x00\x00\x00\x00', b'\x01896630E41000\x00\x00\x00\x00', b'\x01896630E41100\x00\x00\x00\x00', b'\x01896630E41200\x00\x00\x00\x00', b'\x01896630EA3100\x00\x00\x00\x00', b'\x01896630EA4100\x00\x00\x00\x00', b'\x01896630EA4300\x00\x00\x00\x00', b'\x01896630EA6300\x00\x00\x00\x00', b'\x018966348R1300\x00\x00\x00\x00', b'\x018966348R8500\x00\x00\x00\x00', b'\x018966348W1300\x00\x00\x00\x00', ], (Ecu.esp, 0x7b0, None): [ b'F152648472\x00\x00\x00\x00\x00\x00', b'F152648473\x00\x00\x00\x00\x00\x00', b'F152648492\x00\x00\x00\x00\x00\x00', b'F152648493\x00\x00\x00\x00\x00\x00', b'F152648474\x00\x00\x00\x00\x00\x00', b'F152648630\x00\x00\x00\x00\x00\x00', b'F152648494\x00\x00\x00\x00\x00\x00', ], (Ecu.dsu, 0x791, None): [ b'881514810300\x00\x00\x00\x00', b'881514810500\x00\x00\x00\x00', b'881514810700\x00\x00\x00\x00', ], (Ecu.eps, 0x7a1, None): [ b'8965B0E011\x00\x00\x00\x00\x00\x00', b'8965B0E012\x00\x00\x00\x00\x00\x00', b'8965B48102\x00\x00\x00\x00\x00\x00', b'8965B48112\x00\x00\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ b'8821F4701000\x00\x00\x00\x00', b'8821F4701100\x00\x00\x00\x00', b'8821F4701200\x00\x00\x00\x00', b'8821F4701300\x00\x00\x00\x00', ], (Ecu.fwdCamera, 0x750, 0x6d): [ b'8646F4801100\x00\x00\x00\x00', b'8646F4801200\x00\x00\x00\x00', b'8646F4802001\x00\x00\x00\x00', b'8646F4802100\x00\x00\x00\x00', b'8646F4802200\x00\x00\x00\x00', b'8646F4809000\x00\x00\x00\x00', ], }, CAR.LEXUS_RXH: { (Ecu.engine, 0x7e0, None): [ b'\x02348J7000\x00\x00\x00\x00\x00\x00\x00\x00A4802000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x02348N0000\x00\x00\x00\x00\x00\x00\x00\x00A4802000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x02348Q4000\x00\x00\x00\x00\x00\x00\x00\x00A4802000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x02348Q4100\x00\x00\x00\x00\x00\x00\x00\x00A4802000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x02348T1100\x00\x00\x00\x00\x00\x00\x00\x00A4802000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x02348T3000\x00\x00\x00\x00\x00\x00\x00\x00A4802000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x02348V6000\x00\x00\x00\x00\x00\x00\x00\x00A4802000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x02348Z3000\x00\x00\x00\x00\x00\x00\x00\x00A4802000\x00\x00\x00\x00\x00\x00\x00\x00', ], (Ecu.esp, 0x7b0, None): [ b'F152648361\x00\x00\x00\x00\x00\x00', b'F152648501\x00\x00\x00\x00\x00\x00', b'F152648502\x00\x00\x00\x00\x00\x00', b'F152648504\x00\x00\x00\x00\x00\x00', b'F152648740\x00\x00\x00\x00\x00\x00', b'F152648A30\x00\x00\x00\x00\x00\x00', ], (Ecu.dsu, 0x791, None): [ b'881514811300\x00\x00\x00\x00', b'881514811500\x00\x00\x00\x00', b'881514811700\x00\x00\x00\x00', ], (Ecu.eps, 0x7a1, None): [ b'8965B0E011\x00\x00\x00\x00\x00\x00', b'8965B0E012\x00\x00\x00\x00\x00\x00', b'8965B48111\x00\x00\x00\x00\x00\x00', b'8965B48112\x00\x00\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ b'8821F4701000\x00\x00\x00\x00', b'8821F4701100\x00\x00\x00\x00', b'8821F4701200\x00\x00\x00\x00', b'8821F4701300\x00\x00\x00\x00', ], (Ecu.fwdCamera, 0x750, 0x6d): [ b'8646F4801200\x00\x00\x00\x00', b'8646F4802001\x00\x00\x00\x00', b'8646F4802100\x00\x00\x00\x00', b'8646F4802200\x00\x00\x00\x00', b'8646F4809000\x00\x00\x00\x00', ], }, CAR.LEXUS_RX_TSS2: { (Ecu.engine, 0x700, None): [ b'\x01896630EC9000\x00\x00\x00\x00', b'\x01896634D12000\x00\x00\x00\x00', b'\x01896630EB0000\x00\x00\x00\x00', b'\x01896630EA9000\x00\x00\x00\x00', b'\x01896630ED0000\x00\x00\x00\x00', b'\x018966348W9000\x00\x00\x00\x00', b'\x01896634D12100\x00\x00\x00\x00', ], (Ecu.esp, 0x7b0, None): [ b'\x01F152648801\x00\x00\x00\x00\x00\x00', b'\x01F15260E031\x00\x00\x00\x00\x00\x00', b'\x01F15260E041\x00\x00\x00\x00\x00\x00', b'\x01F152648781\x00\x00\x00\x00\x00\x00', ], (Ecu.eps, 0x7a1, None): [ b'8965B48271\x00\x00\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ b'\x018821F3301100\x00\x00\x00\x00', b'\x018821F3301300\x00\x00\x00\x00', b'\x018821F3301400\x00\x00\x00\x00', ], (Ecu.fwdCamera, 0x750, 0x6d): [ b'\x028646F4810200\x00\x00\x00\x008646G2601400\x00\x00\x00\x00', b'\x028646F4810100\x00\x00\x00\x008646G2601200\x00\x00\x00\x00', ], }, CAR.LEXUS_RXH_TSS2: { (Ecu.engine, 0x7e0, None): [ b'\x02348X8000\x00\x00\x00\x00\x00\x00\x00\x00A4802000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x0234D14000\x00\x00\x00\x00\x00\x00\x00\x00A4802000\x00\x00\x00\x00\x00\x00\x00\x00', b'\x0234D16000\x00\x00\x00\x00\x00\x00\x00\x00A4802000\x00\x00\x00\x00\x00\x00\x00\x00', ], (Ecu.esp, 0x7b0, None): [ b'F152648831\x00\x00\x00\x00\x00\x00', b'F152648D00\x00\x00\x00\x00\x00\x00', b'F152648D60\x00\x00\x00\x00\x00\x00', ], (Ecu.eps, 0x7a1, None): [ b'8965B48271\x00\x00\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ b'\x018821F3301400\x00\x00\x00\x00', ], (Ecu.fwdCamera, 0x750, 0x6d): [ b'\x028646F4810200\x00\x00\x00\x008646G2601400\x00\x00\x00\x00', b'\x028646F4810100\x00\x00\x00\x008646G2601200\x00\x00\x00\x00', ], }, CAR.PRIUS_TSS2: { (Ecu.engine, 0x700, None): [ b'\x028966347C8000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00', b'\x038966347C0000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4710101\x00\x00\x00\x00', b'\x038966347C1000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4710101\x00\x00\x00\x00', b'\x038966347C5000\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4707101\x00\x00\x00\x00', b'\x038966347C5100\x00\x00\x00\x008966A4703000\x00\x00\x00\x00897CF4707101\x00\x00\x00\x00', ], (Ecu.esp, 0x7b0, None): [ b'F152647500\x00\x00\x00\x00\x00\x00', b'F152647510\x00\x00\x00\x00\x00\x00', b'F152647520\x00\x00\x00\x00\x00\x00', b'F152647521\x00\x00\x00\x00\x00\x00', ], (Ecu.eps, 0x7a1, None): [ b'8965B47070\x00\x00\x00\x00\x00\x00', ], (Ecu.fwdRadar, 0x750, 0xf): [ b'\x018821F3301400\x00\x00\x00\x00', ], (Ecu.fwdCamera, 0x750, 0x6d): [ b'\x028646F4707000\x00\x00\x00\x008646G2601400\x00\x00\x00\x00', b'\x028646F4710000\x00\x00\x00\x008646G2601500\x00\x00\x00\x00', ], }, CAR.MIRAI: { (Ecu.esp, 0x7D1, None): [b'\x01898A36203000\x00\x00\x00\x00',], (Ecu.esp, 0x7B0, None): [b'\x01F15266203200\x00\x00\x00\x00',], # a second ESP ECU (Ecu.eps, 0x7A1, None): [b'\x028965B6204100\x00\x00\x00\x008965B6203100\x00\x00\x00\x00',], (Ecu.fwdRadar, 0x750, 0xf): [b'\x018821F6201200\x00\x00\x00\x00',], (Ecu.fwdCamera, 0x750, 0x6d): [b'\x028646F6201400\x00\x00\x00\x008646G5301200\x00\x00\x00\x00',], }, CAR.ALPHARD_TSS2: { (Ecu.engine, 0x7e0, None): [b'\x0235883000\x00\x00\x00\x00\x00\x00\x00\x00A0202000\x00\x00\x00\x00\x00\x00\x00\x00',], (Ecu.eps, 0x7a1, None): [b'8965B58040\x00\x00\x00\x00\x00\x00',], (Ecu.fwdRadar, 0x750, 0xf): [b'\x018821F3301400\x00\x00\x00\x00',], (Ecu.fwdCamera, 0x750, 0x6d): [b'\x028646F5803200\x00\x00\x00\x008646G2601400\x00\x00\x00\x00',], }, } STEER_THRESHOLD = 100 DBC = { CAR.RAV4H: dbc_dict('toyota_rav4_hybrid_2017_pt_generated', 'toyota_adas'), CAR.RAV4: dbc_dict('toyota_rav4_2017_pt_generated', 'toyota_adas'), CAR.PRIUS: dbc_dict('toyota_prius_2017_pt_generated', 'toyota_adas'), CAR.COROLLA: dbc_dict('toyota_corolla_2017_pt_generated', 'toyota_adas'), CAR.LEXUS_RX: dbc_dict('lexus_rx_350_2016_pt_generated', 'toyota_adas'), CAR.LEXUS_RXH: dbc_dict('lexus_rx_hybrid_2017_pt_generated', 'toyota_adas'), CAR.LEXUS_RX_TSS2: dbc_dict('toyota_nodsu_pt_generated', 'toyota_tss2_adas'), CAR.LEXUS_RXH_TSS2: dbc_dict('toyota_nodsu_hybrid_pt_generated', 'toyota_tss2_adas'), CAR.CHR: dbc_dict('toyota_nodsu_pt_generated', 'toyota_adas'), CAR.CHRH: dbc_dict('toyota_nodsu_hybrid_pt_generated', 'toyota_adas'), CAR.CAMRY: dbc_dict('toyota_nodsu_pt_generated', 'toyota_adas'), CAR.CAMRYH: dbc_dict('toyota_camry_hybrid_2018_pt_generated', 'toyota_adas'), CAR.CAMRY_TSS2: dbc_dict('toyota_nodsu_pt_generated', 'toyota_tss2_adas'), CAR.CAMRYH_TSS2: dbc_dict('toyota_nodsu_hybrid_pt_generated', 'toyota_tss2_adas'), CAR.HIGHLANDER: dbc_dict('toyota_highlander_2017_pt_generated', 'toyota_adas'), CAR.HIGHLANDER_TSS2: dbc_dict('toyota_nodsu_pt_generated', 'toyota_tss2_adas'), CAR.HIGHLANDERH: dbc_dict('toyota_highlander_hybrid_2018_pt_generated', 'toyota_adas'), CAR.HIGHLANDERH_TSS2: dbc_dict('toyota_nodsu_hybrid_pt_generated', 'toyota_tss2_adas'), CAR.AVALON: dbc_dict('toyota_avalon_2017_pt_generated', 'toyota_adas'), CAR.AVALON_2019: dbc_dict('toyota_nodsu_pt_generated', 'toyota_adas'), CAR.AVALONH_2019: dbc_dict('toyota_nodsu_hybrid_pt_generated', 'toyota_adas'), CAR.RAV4_TSS2: dbc_dict('toyota_nodsu_pt_generated', 'toyota_tss2_adas'), CAR.COROLLA_TSS2: dbc_dict('toyota_nodsu_pt_generated', 'toyota_tss2_adas'), CAR.COROLLAH_TSS2: dbc_dict('toyota_nodsu_hybrid_pt_generated', 'toyota_tss2_adas'), CAR.LEXUS_ES_TSS2: dbc_dict('toyota_nodsu_pt_generated', 'toyota_tss2_adas'), CAR.LEXUS_ESH_TSS2: dbc_dict('toyota_nodsu_hybrid_pt_generated', 'toyota_tss2_adas'), CAR.LEXUS_ESH: dbc_dict('lexus_ct200h_2018_pt_generated', 'toyota_adas'), CAR.SIENNA: dbc_dict('toyota_sienna_xle_2018_pt_generated', 'toyota_adas'), CAR.LEXUS_IS: dbc_dict('lexus_is_2018_pt_generated', 'toyota_adas'), CAR.LEXUS_CTH: dbc_dict('lexus_ct200h_2018_pt_generated', 'toyota_adas'), CAR.RAV4H_TSS2: dbc_dict('toyota_nodsu_hybrid_pt_generated', 'toyota_tss2_adas'), CAR.LEXUS_NXH: dbc_dict('lexus_nx300h_2018_pt_generated', 'toyota_adas'), CAR.LEXUS_NX: dbc_dict('lexus_nx300_2018_pt_generated', 'toyota_adas'), CAR.LEXUS_NX_TSS2: dbc_dict('toyota_nodsu_pt_generated', 'toyota_tss2_adas'), CAR.PRIUS_TSS2: dbc_dict('toyota_nodsu_hybrid_pt_generated', 'toyota_tss2_adas'), CAR.MIRAI: dbc_dict('toyota_nodsu_hybrid_pt_generated', 'toyota_tss2_adas'), CAR.ALPHARD_TSS2: dbc_dict('toyota_nodsu_pt_generated', 'toyota_tss2_adas'), } # Toyota/Lexus Safety Sense 2.0 and 2.5 TSS2_CAR = set([CAR.RAV4_TSS2, CAR.COROLLA_TSS2, CAR.COROLLAH_TSS2, CAR.LEXUS_ES_TSS2, CAR.LEXUS_ESH_TSS2, CAR.RAV4H_TSS2, CAR.LEXUS_RX_TSS2, CAR.LEXUS_RXH_TSS2, CAR.HIGHLANDER_TSS2, CAR.HIGHLANDERH_TSS2, CAR.PRIUS_TSS2, CAR.CAMRY_TSS2, CAR.CAMRYH_TSS2, CAR.MIRAI, CAR.LEXUS_NX_TSS2, CAR.ALPHARD_TSS2]) NO_DSU_CAR = TSS2_CAR | set([CAR.CHR, CAR.CHRH, CAR.CAMRY, CAR.CAMRYH]) # no resume button press required NO_STOP_TIMER_CAR = TSS2_CAR | set([CAR.RAV4H, CAR.HIGHLANDERH, CAR.HIGHLANDER, CAR.SIENNA, CAR.LEXUS_ESH])
CAR
passwordService.js
const userService = require("../services/userService"); const encryptionService = require("../services/encryptionService"); exports.decryptPasswords = async (passwords, user_id) => { const key = await userService.getUserPrivateKey(user_id); return passwords.map((password) => {
password.password = encryptionService.decrypt(password.password, key); return password; }); };
objlookup.go
//+build ignore package main // This file is the input to TestObjValueLookup in source_test.go, // which ensures that each occurrence of an ident defining or // referring to a func, var or const object can be mapped to its // corresponding SSA Value. // // For every reference to a var object, we use annotations in comments // to denote both the expected SSA Value kind, and whether to expect // its value (x) or its address (&x). // // For const and func objects, the results don't vary by reference and // are always values not addresses, so no annotations are needed. The // declaration is enough. import "fmt" import "os" type J int func (*J) method() {} const globalConst = 0 var globalVar int // &globalVar::Global func globalFunc()
type I interface { interfaceMethod() } type S struct { x int // x::nil } func main() { print(globalVar) // globalVar::UnOp globalVar = 1 // globalVar::Const var v0 int = 1 // v0::Const (simple local value spec) if v0 > 0 { // v0::Const v0 = 2 // v0::Const } print(v0) // v0::Phi // v1 is captured and thus implicitly address-taken. var v1 int = 1 // v1::Const v1 = 2 // v1::Const fmt.Println(v1) // v1::UnOp (load) f := func(param int) { // f::MakeClosure param::Parameter if y := 1; y > 0 { // y::Const print(v1, param) // v1::UnOp (load) param::Parameter } param = 2 // param::Const println(param) // param::Const } f(0) // f::MakeClosure var v2 int // v2::Const (implicitly zero-initialized local value spec) print(v2) // v2::Const m := make(map[string]int) // m::MakeMap // Local value spec with multi-valued RHS: var v3, v4 = m[""] // v3::Extract v4::Extract m::MakeMap print(v3) // v3::Extract print(v4) // v4::Extract v3++ // v3::BinOp (assign with op) v3 += 2 // v3::BinOp (assign with op) v5, v6 := false, "" // v5::Const v6::Const (defining assignment) print(v5) // v5::Const print(v6) // v6::Const var v7 S // &v7::Alloc v7.x = 1 // &v7::Alloc x::Const print(v7.x) // v7::UnOp x::Field var v8 [1]int // &v8::Alloc v8[0] = 0 // &v8::Alloc print(v8[:]) // &v8::Alloc _ = v8[0] // v8::UnOp (load from Alloc) _ = v8[:][0] // &v8::Alloc v8ptr := &v8 // v8ptr::Alloc &v8::Alloc _ = v8ptr[0] // v8ptr::Alloc _ = *v8ptr // v8ptr::Alloc v8a := make([]int, 1) // v8a::MakeSlice v8a[0] = 0 // v8a::MakeSlice print(v8a[:]) // v8a::MakeSlice v9 := S{} // &v9::Alloc v10 := &v9 // v10::Alloc &v9::Alloc _ = v10 // v10::Alloc var v11 *J = nil // v11::Const v11.method() // v11::Const var v12 J // &v12::Alloc v12.method() // &v12::Alloc (implicitly address-taken) // NB, in the following, 'method' resolves to the *types.Func // of (*J).method, so it doesn't help us locate the specific // ssa.Values here: a bound-method closure and a promotion // wrapper. _ = v11.method // v11::Const _ = (*struct{ J }).method // These vars are optimised away. if false { v13 := 0 // v13::nil println(v13) // v13::nil } switch x := 1; x { // x::Const case v0: // v0::Phi } for k, v := range m { // k::Extract v::Extract m::MakeMap _ = k // k::Extract v++ // v::BinOp } if y := 0; y > 1 { // y::Const y::Const } var i interface{} // i::Const (nil interface) i = 1 // i::MakeInterface switch i := i.(type) { // i::MakeInterface i::MakeInterface case int: println(i) // i::Extract } ch := make(chan int) // ch::MakeChan select { case x := <-ch: // x::UnOp (receive) ch::MakeChan _ = x // x::UnOp } // .Op is an inter-package FieldVal-selection. var err os.PathError // &err::Alloc _ = err.Op // err::UnOp Op::Field _ = &err.Op // &err::Alloc &Op::FieldAddr // Exercise corner-cases of lvalues vs rvalues. // (Guessing IsAddr from the 'pointerness' won't cut it here.) type N *N var n N // n::Const n1 := n // n1::Const n::Const n2 := &n1 // n2::Alloc &n1::Alloc n3 := *n2 // n3::UnOp n2::Alloc n4 := **n3 // n4::UnOp n3::UnOp _ = n4 // n4::UnOp }
{}