file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
logger.ts | import { existsSync, appendFileSync, mkdirSync } from 'fs'
import { resolve } from 'path'
import bot from './bot'
const ADMIN_ID = process.env.ADMIN_ID
let errorsToReport = <Array<string>>[]
async function sendLogToMessage (): Promise<void> {
if (errorsToReport.length >= 1) {
const chunks = []
for (let numb = -1; errorsToReport.length > 0;) { | chunks[++numb] = errPart
}
for (const chunk of chunks)
await bot.api.sendMessage(ADMIN_ID, chunk).catch(err => saveLogAsFile(err))
}
}
function saveLogAsFile (log: string): void {
const logsFolder = resolve('logs')
// create a logs directory if it doesn't exist
!existsSync(logsFolder) && mkdirSync(logsFolder)
const filename = new Date().toLocaleDateString().replace(/\./g, '-')
appendFileSync(logsFolder + `/${filename}.log`, log)
}
/**
* Error handler
* @param {Error}: stack: error store
* @param {boolean}: saveAsFile: true - save as file without send to admin pm , false - give a choice to the handler
*/
export default ({ stack }: Error, ctx?: any, saveAsFile: boolean = false): void => {
if (!stack) return;
// add datetime and new line
const errLog = `[${new Date().toLocaleString()}] ${stack}\r\n`;
// if the second param is a boolean, then save the log as a file instead of send as a message
if ((typeof saveAsFile === 'boolean' && saveAsFile) || !ADMIN_ID) return saveLogAsFile(errLog)
errorsToReport.push(errLog)
}
ADMIN_ID && setInterval(sendLogToMessage, +process.env.ERROR_MS_INTERVAL || 600000) | const errPart = errorsToReport.shift()
if (chunks[numb] && (chunks[numb].length + errPart.length <= 4000)) {
chunks[numb] += "=".repeat(24) + '\r\n' + errPart
} else |
TestServerUpgrade.py | '''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import StringIO
import sys
from ambari_commons.exceptions import FatalException
from unittest import TestCase
from mock.mock import patch, MagicMock
from ambari_server.serverUpgrade import set_current, SetCurrentVersionOptions, upgrade_stack
import ambari_server
class TestServerUpgrade(TestCase):
| @patch("ambari_server.serverUpgrade.is_server_runing")
@patch('ambari_server.serverUpgrade.SetCurrentVersionOptions.no_finalize_options_set')
@patch('ambari_server.serverUpgrade.get_validated_string_input')
@patch('ambari_server.serverUpgrade.get_ambari_properties')
@patch('ambari_server.serverUtils.get_ambari_server_api_base')
@patch('ambari_commons.logging_utils.get_verbose')
@patch('urllib2.urlopen')
def test_set_current(self, urlopen_mock, get_verbose_mock, get_ambari_server_api_base_mock,
get_ambari_properties_mock, get_validated_string_input_mock,
no_finalize_options_set_mock, is_server_runing_mock):
options = MagicMock()
options.cluster_name = 'cc'
options.desired_repo_version = 'HDP-2.2.2.0-2561'
options.force_repo_version = None
# Case when server is not running
is_server_runing_mock.return_value = False, None
try:
set_current(options)
self.fail("Server is not running - should error out")
except FatalException:
pass # expected
is_server_runing_mock.return_value = True, 11111
# Test insufficient options case
no_finalize_options_set_mock.return_value = True
try:
set_current(options)
self.fail("Should error out")
except FatalException:
pass # expected
no_finalize_options_set_mock.return_value = False
# Test normal flow
get_validated_string_input_mock.return_value = 'dummy_string'
p = get_ambari_properties_mock.return_value
p.get_property.side_effect = ["8080", "false"]
get_ambari_server_api_base_mock.return_value = 'http://127.0.0.1:8080/api/v1/'
get_verbose_mock.retun_value = False
set_current(options)
self.assertTrue(urlopen_mock.called)
request = urlopen_mock.call_args_list[0][0][0]
self.assertEquals(request._Request__original, 'http://127.0.0.1:8080/api/v1/clusters/cc/stack_versions')
self.assertEquals(request.data, '{"ClusterStackVersions": {"state": "CURRENT", "repository_version": "HDP-2.2.2.0-2561", "force": false}}')
self.assertEquals(request.origin_req_host, '127.0.0.1')
self.assertEquals(request.headers, {'X-requested-by': 'ambari', 'Authorization': 'Basic ZHVtbXlfc3RyaW5nOmR1bW15X3N0cmluZw=='})
@patch("ambari_server.serverUpgrade.is_server_runing")
@patch('ambari_server.serverUpgrade.SetCurrentVersionOptions.no_finalize_options_set')
@patch('ambari_server.serverUpgrade.get_validated_string_input')
@patch('ambari_server.serverUpgrade.get_ambari_properties')
@patch('ambari_server.serverUtils.get_ambari_server_api_base')
@patch('ambari_commons.logging_utils.get_verbose')
@patch('urllib2.urlopen')
def test_set_current_with_force(self, urlopen_mock, get_verbose_mock, get_ambari_server_api_base_mock,
get_ambari_properties_mock, get_validated_string_input_mock,
no_finalize_options_set_mock, is_server_runing_mock):
options = MagicMock()
options.cluster_name = 'cc'
options.desired_repo_version = 'HDP-2.2.2.0-2561'
options.force_repo_version = True
# Case when server is not running
is_server_runing_mock.return_value = False, None
try:
set_current(options)
self.fail("Server is not running - should error out")
except FatalException:
pass # expected
is_server_runing_mock.return_value = True, 11111
# Test insufficient options case
no_finalize_options_set_mock.return_value = True
try:
set_current(options)
self.fail("Should error out")
except FatalException:
pass # expected
no_finalize_options_set_mock.return_value = False
# Test normal flow
get_validated_string_input_mock.return_value = 'dummy_string'
p = get_ambari_properties_mock.return_value
p.get_property.side_effect = ["8080", "false"]
get_ambari_server_api_base_mock.return_value = 'http://127.0.0.1:8080/api/v1/'
get_verbose_mock.retun_value = False
set_current(options)
self.assertTrue(urlopen_mock.called)
request = urlopen_mock.call_args_list[0][0][0]
self.assertEquals(request._Request__original, 'http://127.0.0.1:8080/api/v1/clusters/cc/stack_versions')
self.assertEquals(request.data, '{"ClusterStackVersions": {"state": "CURRENT", "repository_version": "HDP-2.2.2.0-2561", "force": true}}')
self.assertEquals(request.origin_req_host, '127.0.0.1')
self.assertEquals(request.headers, {'X-requested-by': 'ambari', 'Authorization': 'Basic ZHVtbXlfc3RyaW5nOmR1bW15X3N0cmluZw=='})
@patch("ambari_server.serverUpgrade.run_os_command")
@patch("ambari_server.serverUpgrade.get_java_exe_path")
@patch("ambari_server.serverConfiguration.get_ambari_properties")
@patch("ambari_server.serverUpgrade.get_ambari_properties")
@patch("ambari_server.serverUpgrade.check_database_name_property")
@patch("ambari_server.serverUpgrade.is_root")
def test_upgrade_stack(self, is_root_mock, c_d_n_p_mock, up_g_a_p_mock, server_g_a_p_mock, java_path_mock, run_os_mock):
run_os_mock.return_value = 0, "", ""
java_path_mock.return_value = ""
is_root_mock.return_value = True
def do_nothing():
pass
c_d_n_p_mock.side_effect = do_nothing
p = ambari_server.properties.Properties()
p._props = {
ambari_server.serverConfiguration.JDBC_DATABASE_PROPERTY: "mysql",
ambari_server.serverConfiguration.JDBC_DATABASE_NAME_PROPERTY: "ambari"
}
up_g_a_p_mock.side_effect = [p, p]
server_g_a_p_mock.side_effect = [p]
args = ["upgrade_stack", "HDP-2.3"]
upgrade_stack(args)
self.assertTrue(run_os_mock.called)
command = run_os_mock.call_args_list[0][0][0]
self.assertTrue("StackUpgradeHelper" in command and "HDP" in command and "2.3" in command)
def testCurrentVersionOptions(self):
# Negative test cases
options = MagicMock()
options.cluster_name = None
options.desired_repo_version = 'HDP-2.2.2.0-2561'
cvo = SetCurrentVersionOptions(options)
self.assertTrue(cvo.no_finalize_options_set())
options = MagicMock()
options.cluster_name = 'cc'
options.desired_repo_version = None
cvo = SetCurrentVersionOptions(options)
self.assertTrue(cvo.no_finalize_options_set())
# Positive test case
options = MagicMock()
options.cluster_name = 'cc'
options.desired_repo_version = 'HDP-2.2.2.0-2561'
cvo = SetCurrentVersionOptions(options)
self.assertFalse(cvo.no_finalize_options_set()) |
|
index.js | import React, { useState } from 'react';
import InputMask from 'react-input-mask';
import { Link, useHistory } from 'react-router-dom';
import { FiArrowLeft } from 'react-icons/fi'
import api from '../../services/api';
import Logo from '../../assets/logo.svg';
import './styles.css';
export default function Register(){
const [name, setName] = useState('');
const [email, setEmail] = useState('');
const [whatsapp, setWhatsapp] = useState('');
const [city, setCity] = useState('');
const [uf, setUf] = useState('');
const history = useHistory();
async function handleRegister(e){
e.preventDefault();
const data = {
name,
email,
whatsapp,
city,
uf
};
try{
const response = await api.post('ongs', data);
alert(`Seu ID de acesso: ${response.data.id}`);
history.push('/');
}catch(err){
alert('Erro no cadastro, tente novamente.');
}
}
return (
<div className="register-container">
<div className="content">
<section>
<img src={Logo} alt="Be The Hero"/>
<h1>Cadastro</h1>
<p>Faça seu cadastro, entre na plataforma e ajude pessoas a encontrarem os casos da sua ONG.</p>
<Link className="back-link" to="/">
<FiArrowLeft size={16} color="#E02041" />
Já tenho cadastro
</Link >
</section>
<form onSubmit={handleRegister}>
<input | value={name}
onChange={e => setName(e.target.value)}
/>
<input type="email"
placeholder="E-mail"
value={email}
onChange={e => setEmail(e.target.value)}
/>
<InputMask
type="text"
placeholder="WhatsApp"
mask="+5\5 (99) 99999-9999"
maskChar={null}
maxLength="20"
value={whatsapp}
onChange={e => setWhatsapp(e.target.value)}
/>
<div className="input-group">
<input
placeholder="Cidade"
value={city}
onChange={e => setCity(e.target.value)}
/>
<input placeholder="UF"
maxLength="2"
style={{ width: 80 }}
value={uf}
onChange={e => setUf(e.target.value)}
/>
</div>
<button className="button" type="submit">Cadastrar</button>
</form>
</div>
</div>
);
} | placeholder="Nome da ONG" |
log_container.go | package logs
import (
"errors"
"fmt"
"sync"
"time"
)
const (
elapsedTime = 100 * time.Millisecond
)
type LogEntry struct {
Time time.Time
data []byte
workload string
}
func NewLogEntry(data []byte, workload string) *LogEntry {
return &LogEntry{
data: data,
Time: time.Now(),
workload: workload,
}
}
func (l *LogEntry) Size() int {
return len(l.data)
}
func (l *LogEntry) String() string {
if l.data == nil {
return ""
}
return string(l.data)
}
func (l *LogEntry) GetWorkload() string {
return l.workload |
type FIFOLog struct {
maxlen int
data []*LogEntry
lock sync.Mutex
currentSize int
}
// NewFIFOLog returns a new FifoLog struct
func NewFIFOLog(maxSize int) *FIFOLog {
return &FIFOLog{
data: []*LogEntry{},
maxlen: maxSize,
}
}
// CurrentSize returns the buffer size.
func (f *FIFOLog) CurrentSize() int {
f.lock.Lock()
defer f.lock.Unlock()
return f.currentSize
}
// Write fills the current buffer with the next LogEntry
func (f *FIFOLog) Write(entry *LogEntry) error {
if entry == nil {
return errors.New("No log entry added")
}
f.lock.Lock()
defer f.lock.Unlock()
// We have here a "soft" limit, current entry size can be bigger than
// f.maxlen but we only pass once.
if f.currentSize > f.maxlen {
return fmt.Errorf("Buffer is already full")
}
f.data = append(f.data, entry)
f.currentSize = f.currentSize + entry.Size()
return nil
}
// ReadLine reads the line and delete it from the current buffer.
func (f *FIFOLog) ReadLine() (*LogEntry, error) {
f.lock.Lock()
defer f.lock.Unlock()
if f.currentSize <= 0 {
return nil, nil
}
res := f.data[0]
f.data = f.data[1:]
f.currentSize = f.currentSize - res.Size()
return res, nil
} | } |
main.go | package main
import (
"fmt"
"net/http"
"os"
)
func main() {
resp, err := http.Get("https://google.com")
if err != nil {
fmt.Println("Error:", err)
os.Exit(1)
}
bs := make([]byte, 99999)
resp.Body.Read(bs)
| fmt.Println(string(bs))
} |
|
rule_37.py | def findDecision(obj): #obj[0]: Passanger, obj[1]: Coupon, obj[2]: Education, obj[3]: Occupation, obj[4]: Restaurant20to50, obj[5]: Distance
# {"feature": "Occupation", "instances": 23, "metric_value": 0.9656, "depth": 1}
if obj[3]<=21:
# {"feature": "Passanger", "instances": 21, "metric_value": 0.9183, "depth": 2}
if obj[0]>0:
# {"feature": "Coupon", "instances": 20, "metric_value": 0.8813, "depth": 3} | # {"feature": "Restaurant20to50", "instances": 6, "metric_value": 0.65, "depth": 5}
if obj[4]>0.0:
return 'True'
elif obj[4]<=0.0:
return 'False'
else: return 'False'
elif obj[2]>2:
# {"feature": "Restaurant20to50", "instances": 5, "metric_value": 0.7219, "depth": 5}
if obj[4]<=2.0:
return 'False'
elif obj[4]>2.0:
# {"feature": "Distance", "instances": 2, "metric_value": 1.0, "depth": 6}
if obj[5]<=1:
return 'False'
elif obj[5]>1:
return 'True'
else: return 'True'
else: return 'False'
else: return 'False'
elif obj[1]>3:
# {"feature": "Distance", "instances": 9, "metric_value": 0.5033, "depth": 4}
if obj[5]>1:
return 'True'
elif obj[5]<=1:
# {"feature": "Education", "instances": 3, "metric_value": 0.9183, "depth": 5}
if obj[2]<=0:
# {"feature": "Restaurant20to50", "instances": 2, "metric_value": 1.0, "depth": 6}
if obj[4]>1.0:
return 'True'
elif obj[4]<=1.0:
return 'False'
else: return 'False'
elif obj[2]>0:
return 'True'
else: return 'True'
else: return 'True'
else: return 'True'
elif obj[0]<=0:
return 'False'
else: return 'False'
elif obj[3]>21:
return 'False'
else: return 'False' | if obj[1]<=3:
# {"feature": "Education", "instances": 11, "metric_value": 0.994, "depth": 4}
if obj[2]<=2: |
lib.rs | #![deny(rust_2018_idioms, warnings)]
/// Emits `ossl110` and `ossl111` cfgs based on the version of openssl.
pub fn define_version_number_cfg() {
let openssl_version = std::env::var("DEP_OPENSSL_VERSION_NUMBER").expect("DEP_OPENSSL_VERSION_NUMBER must have been set by openssl-sys");
let openssl_version = u64::from_str_radix(&openssl_version, 16).expect("DEP_OPENSSL_VERSION_NUMBER must have been set to a valid integer");
#[allow(clippy::inconsistent_digit_grouping)]
{
if openssl_version >= 0x01_01_00_00_0 {
println!("cargo:rustc-cfg=ossl110");
}
if openssl_version >= 0x01_01_01_00_0 {
println!("cargo:rustc-cfg=ossl111");
}
}
}
/// Create an instance of `cc::Build` set up to compile against openssl.
pub fn get_c_compiler() -> cc::Build | {
let openssl_include_path = std::env::var_os("DEP_OPENSSL_INCLUDE").expect("DEP_OPENSSL_INCLUDE must have been set by openssl-sys");
let mut build = cc::Build::new();
build.include(openssl_include_path);
build.warnings_into_errors(true);
build
} |
|
qmake.py | from conans.model import Generator
from conans.paths import BUILD_INFO_QMAKE
class DepsCppQmake(object):
def __init__(self, cpp_info):
def multiline(field):
return " \\\n ".join('"%s"' % p.replace("\\", "/") for p in field)
self.include_paths = multiline(cpp_info.include_paths)
self.lib_paths = " \\\n ".join('-L"%s"' % p.replace("\\", "/")
for p in cpp_info.lib_paths)
self.bin_paths = multiline(cpp_info.bin_paths)
self.res_paths = multiline(cpp_info.res_paths)
self.build_paths = multiline(cpp_info.build_paths)
self.libs = " ".join('-l%s' % l for l in cpp_info.libs)
self.defines = " \\\n ".join('"%s"' % d for d in cpp_info.defines)
self.cppflags = " ".join(cpp_info.cppflags)
self.cflags = " ".join(cpp_info.cflags)
self.sharedlinkflags = " ".join(cpp_info.sharedlinkflags)
self.exelinkflags = " ".join(cpp_info.exelinkflags)
self.rootpath = '%s' % cpp_info.rootpath.replace("\\", "/")
class QmakeGenerator(Generator):
@property
def | (self):
return BUILD_INFO_QMAKE
@property
def content(self):
deps = DepsCppQmake(self.deps_build_info)
template = ('CONAN_INCLUDEPATH{dep_name}{build_type} += {deps.include_paths}\n'
'CONAN_LIBS{dep_name}{build_type} += {deps.libs}\n'
'CONAN_LIBDIRS{dep_name}{build_type} += {deps.lib_paths}\n'
'CONAN_BINDIRS{dep_name}{build_type} += {deps.bin_paths}\n'
'CONAN_RESDIRS{dep_name}{build_type} += {deps.res_paths}\n'
'CONAN_BUILDDIRS{dep_name}{build_type} += {deps.build_paths}\n'
'CONAN_DEFINES{dep_name}{build_type} += {deps.defines}\n'
'CONAN_QMAKE_CXXFLAGS{dep_name}{build_type} += {deps.cppflags}\n'
'CONAN_QMAKE_CFLAGS{dep_name}{build_type} += {deps.cflags}\n'
'CONAN_QMAKE_LFLAGS{dep_name}{build_type} += {deps.sharedlinkflags}\n'
'CONAN_QMAKE_LFLAGS{dep_name}{build_type} += {deps.exelinkflags}\n')
sections = []
template_all = template
all_flags = template_all.format(dep_name="", deps=deps, build_type="")
sections.append(all_flags)
for config, cpp_info in self.deps_build_info.configs.items():
deps = DepsCppQmake(cpp_info)
dep_flags = template_all.format(dep_name="", deps=deps,
build_type="_" + str(config).upper())
sections.append(dep_flags)
template_deps = template + 'CONAN{dep_name}_ROOT{build_type} = "{deps.rootpath}"\n'
for dep_name, dep_cpp_info in self.deps_build_info.dependencies:
deps = DepsCppQmake(dep_cpp_info)
dep_flags = template_deps.format(dep_name="_" + dep_name.upper(), deps=deps,
build_type="")
sections.append(dep_flags)
for config, cpp_info in dep_cpp_info.configs.items():
deps = DepsCppQmake(cpp_info)
dep_flags = template_deps.format(dep_name="_" + dep_name.upper(), deps=deps,
build_type="_" + str(config).upper())
sections.append(dep_flags)
output = "\n".join(sections)
output += ("""\nCONFIG(conan_basic_setup) {
INCLUDEPATH += $$CONAN_INCLUDEPATH
LIBS += $$CONAN_LIBS
LIBS += $$CONAN_LIBDIRS
BINDIRS += $$CONAN_BINDIRS
DEFINES += $$CONAN_DEFINES
CONFIG(release, debug|release) {
message("Release config")
INCLUDEPATH += $$CONAN_INCLUDEPATH_RELEASE
LIBS += $$CONAN_LIBS_RELEASE
LIBS += $$CONAN_LIBDIRS_RELEASE
BINDIRS += $$CONAN_BINDIRS_RELEASE
DEFINES += $$CONAN_DEFINES_RELEASE
} else {
message("Debug config")
INCLUDEPATH += $$CONAN_INCLUDEPATH_DEBUG
LIBS += $$CONAN_LIBS_DEBUG
LIBS += $$CONAN_LIBDIRS_DEBUG
BINDIRS += $$CONAN_BINDIRS_DEBUG
DEFINES += $$CONAN_DEFINES_DEBUG
}
QMAKE_CXXFLAGS += $$CONAN_QMAKE_CXXFLAGS
QMAKE_CFLAGS += $$CONAN_QMAKE_CFLAGS
QMAKE_LFLAGS += $$CONAN_QMAKE_LFLAGS
QMAKE_CXXFLAGS_DEBUG += $$CONAN_QMAKE_CXXFLAGS_DEBUG
QMAKE_CFLAGS_DEBUG += $$CONAN_QMAKE_CFLAGS_DEBUG
QMAKE_LFLAGS_DEBUG += $$CONAN_QMAKE_LFLAGS_DEBUG
QMAKE_CXXFLAGS_RELEASE += $$CONAN_QMAKE_CXXFLAGS_RELEASE
QMAKE_CFLAGS_RELEASE += $$CONAN_QMAKE_CFLAGS_RELEASE
QMAKE_LFLAGS_RELEASE += $$CONAN_QMAKE_LFLAGS_RELEASE
}""")
return output
| filename |
admin.go | // Copyright 2014 The Cockroach Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
// implied. See the License for the specific language governing
// permissions and limitations under the License. See the AUTHORS file
// for names of contributors.
//
// Author: Spencer Kimball ([email protected])
// Author: Bram Gruneir ([email protected])
package server
import (
// This is imported for its side-effect of registering expvar
// endpoints with the http.DefaultServeMux.
_ "expvar"
"fmt"
"io/ioutil"
"net/http"
// This is imported for its side-effect of registering pprof
// endpoints with the http.DefaultServeMux.
_ "net/http/pprof"
"net/url"
"strings"
"github.com/cockroachdb/cockroach/client"
"github.com/cockroachdb/cockroach/util"
)
const (
maxGetResults = 0 // TODO(spencer): maybe we need paged query support
// adminEndpoint is the prefix for RESTful endpoints used to
// provide an administrative interface to the cockroach cluster.
adminEndpoint = "/_admin/"
// debugEndpoint is the prefix of golang's standard debug functionality
// for access to exported vars and pprof tools.
debugEndpoint = "/debug/"
// healthPath is the health endpoint.
healthPath = adminEndpoint + "health"
// quitPath is the quit endpoint.
quitPath = adminEndpoint + "quit"
// acctPathPrefix is the prefix for accounting configuration changes.
acctPathPrefix = adminEndpoint + "acct"
// permPathPrefix is the prefix for permission configuration changes.
permPathPrefix = adminEndpoint + "perms"
// zonePathPrefix is the prefix for zone configuration changes.
zonePathPrefix = adminEndpoint + "zones"
)
// An actionHandler is an interface which provides Get, Put & Delete
// to satisfy administrative REST APIs.
type actionHandler interface {
Put(path string, body []byte, r *http.Request) error
Get(path string, r *http.Request) (body []byte, contentType string, err error)
Delete(path string, r *http.Request) error
}
// A adminServer provides a RESTful HTTP API to administration of
// the cockroach cluster.
type adminServer struct {
db *client.KV // Key-value database client
stopper *util.Stopper // Used to shutdown the server
acct *acctHandler
perm *permHandler
zone *zoneHandler
}
// newAdminServer allocates and returns a new REST server for
// administrative APIs.
func newAdminServer(db *client.KV, stopper *util.Stopper) *adminServer {
return &adminServer{
db: db,
stopper: stopper,
acct: &acctHandler{db: db},
perm: &permHandler{db: db},
zone: &zoneHandler{db: db},
}
}
| // registerHandlers registers admin handlers with the supplied
// serve mux.
func (s *adminServer) registerHandlers(mux *http.ServeMux) {
// Pass through requests to /debug to the default serve mux so we
// get exported variables and pprof tools.
mux.HandleFunc(acctPathPrefix, s.handleAcctAction)
mux.HandleFunc(acctPathPrefix+"/", s.handleAcctAction)
mux.HandleFunc(debugEndpoint, s.handleDebug)
mux.HandleFunc(healthPath, s.handleHealth)
mux.HandleFunc(quitPath, s.handleQuit)
mux.HandleFunc(permPathPrefix, s.handlePermAction)
mux.HandleFunc(permPathPrefix+"/", s.handlePermAction)
mux.HandleFunc(zonePathPrefix, s.handleZoneAction)
mux.HandleFunc(zonePathPrefix+"/", s.handleZoneAction)
}
// handleHealth responds to health requests from monitoring services.
func (s *adminServer) handleHealth(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/plain")
fmt.Fprintln(w, "ok")
}
// handleQuit is the shutdown hook. The server is first placed into a
// draining mode, followed by exit.
func (s *adminServer) handleQuit(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/plain")
fmt.Fprintln(w, "ok")
go s.stopper.Stop()
}
// handleDebug passes requests with the debugPathPrefix onto the default
// serve mux, which is preconfigured (by import of expvar and net/http/pprof)
// to serve endpoints which access exported variables and pprof tools.
func (s *adminServer) handleDebug(w http.ResponseWriter, r *http.Request) {
handler, _ := http.DefaultServeMux.Handler(r)
handler.ServeHTTP(w, r)
}
// handleAcctAction handles actions for accounting configuration by method.
func (s *adminServer) handleAcctAction(w http.ResponseWriter, r *http.Request) {
s.handleRESTAction(s.acct, w, r, acctPathPrefix)
}
// handlePermAction handles actions for perm configuration by method.
func (s *adminServer) handlePermAction(w http.ResponseWriter, r *http.Request) {
s.handleRESTAction(s.perm, w, r, permPathPrefix)
}
// handleZoneAction handles actions for zone configuration by method.
func (s *adminServer) handleZoneAction(w http.ResponseWriter, r *http.Request) {
s.handleRESTAction(s.zone, w, r, zonePathPrefix)
}
// handleRESTAction handles RESTful admin actions.
func (s *adminServer) handleRESTAction(handler actionHandler, w http.ResponseWriter, r *http.Request, prefix string) {
switch r.Method {
case "GET":
s.handleGetAction(handler, w, r, prefix)
case "PUT", "POST":
s.handlePutAction(handler, w, r, prefix)
case "DELETE":
s.handleDeleteAction(handler, w, r, prefix)
default:
http.Error(w, "Bad Request", http.StatusBadRequest)
}
}
func unescapePath(path, prefix string) (string, error) {
result, err := url.QueryUnescape(strings.TrimPrefix(path, prefix))
if err != nil {
return "", err
}
return result, nil
}
func (s *adminServer) handlePutAction(handler actionHandler, w http.ResponseWriter, r *http.Request, prefix string) {
path, err := unescapePath(r.URL.Path, prefix)
if err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
b, err := ioutil.ReadAll(r.Body)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
defer r.Body.Close()
if err = handler.Put(path, b, r); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
w.WriteHeader(http.StatusOK)
}
func (s *adminServer) handleGetAction(handler actionHandler, w http.ResponseWriter, r *http.Request, prefix string) {
path, err := unescapePath(r.URL.Path, prefix)
if err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
b, contentType, err := handler.Get(path, r)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", contentType)
fmt.Fprintf(w, "%s", string(b))
}
func (s *adminServer) handleDeleteAction(handler actionHandler, w http.ResponseWriter, r *http.Request, prefix string) {
path, err := unescapePath(r.URL.Path, prefix)
if err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
if err = handler.Delete(path, r); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
w.WriteHeader(http.StatusOK)
} | |
mainState.ts | // Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
'use strict';
import { nbformat } from '@jupyterlab/coreutils';
import * as monacoEditor from 'monaco-editor/esm/vs/editor/editor.api';
import * as path from 'path';
import { IDataScienceSettings } from '../../client/common/types';
import { CellMatcher } from '../../client/datascience/cellMatcher';
import { concatMultilineString, splitMultilineString } from '../../client/datascience/common';
import { Identifiers } from '../../client/datascience/constants';
import { CellState, ICell, IJupyterVariable, IMessageCell } from '../../client/datascience/types';
import { noop } from '../../test/core';
import { ICellViewModel } from './cell';
import { InputHistory } from './inputHistory';
export interface IMainState {
cellVMs: ICellViewModel[];
editCellVM: ICellViewModel | undefined;
busy: boolean;
skipNextScroll?: boolean;
undoStack: ICellViewModel[][];
redoStack: ICellViewModel[][];
submittedText: boolean;
history: InputHistory;
rootStyle?: string;
rootCss?: string;
theme?: string;
forceDark?: boolean;
monacoTheme?: string;
tokenizerLoaded?: boolean;
knownDark: boolean;
editorOptions?: monacoEditor.editor.IEditorOptions;
currentExecutionCount: number;
variablesVisible: boolean;
variables: IJupyterVariable[];
pendingVariableCount: number;
debugging: boolean;
dirty?: boolean;
selectedCell?: string;
focusedCell?: string;
enableGather: boolean;
isAtBottom: boolean;
}
// tslint:disable-next-line: no-multiline-string
const darkStyle = `
:root {
--code-comment-color: #6A9955;
--code-numeric-color: #b5cea8;
--code-string-color: #ce9178;
--code-variable-color: #9CDCFE;
--code-type-color: #4EC9B0;
--code-font-family: Consolas, 'Courier New', monospace;
--code-font-size: 14px;
}
`;
// This function generates test state when running under a browser instead of inside of
export function generateTestState(inputBlockToggled: (id: string) => void, filePath: string = '', editable: boolean = false): IMainState {
return {
cellVMs: generateVMs(inputBlockToggled, filePath, editable),
editCellVM: createEditableCellVM(1),
busy: true,
skipNextScroll: false,
undoStack: [],
redoStack: [],
submittedText: false,
history: new InputHistory(),
rootStyle: darkStyle,
tokenizerLoaded: true,
editorOptions: {},
currentExecutionCount: 0,
knownDark: false,
variablesVisible: false,
variables: [
{
name: 'foo',
value: 'bar',
type: 'DataFrame',
size: 100,
supportsDataExplorer: true,
shape: '(100, 100)',
truncated: true,
count: 100
}
],
pendingVariableCount: 0,
debugging: false,
enableGather: true,
isAtBottom: true
};
}
export function | (id: string | undefined, executionCount: number | null): ICell {
return {
data:
{
cell_type: 'code', // We should eventually allow this to change to entering of markdown?
execution_count: executionCount,
metadata: {},
outputs: [],
source: ''
},
id: id ? id : Identifiers.EditCellId,
file: Identifiers.EmptyFileName,
line: 0,
state: CellState.finished,
type: 'execute'
};
}
export function createEditableCellVM(executionCount: number): ICellViewModel {
return {
cell: createEmptyCell(undefined, executionCount),
editable: true,
inputBlockOpen: true,
inputBlockShow: true,
inputBlockText: '',
inputBlockCollapseNeeded: false,
inputBlockToggled: noop
};
}
export function extractInputText(inputCell: ICell, settings: IDataScienceSettings | undefined): string {
const source = inputCell.data.cell_type === 'code' ? splitMultilineString(inputCell.data.source) : [];
const matcher = new CellMatcher(settings);
// Eliminate the #%% on the front if it has nothing else on the line
if (source.length > 0) {
const title = matcher.exec(source[0].trim());
if (title !== undefined && title.length <= 0) {
source.splice(0, 1);
}
// Eliminate the lines to hide if we're debugging
if (inputCell.extraLines) {
inputCell.extraLines.forEach(i => source.splice(i, 1));
inputCell.extraLines = undefined;
}
}
return concatMultilineString(source);
}
export function createCellVM(inputCell: ICell, settings: IDataScienceSettings | undefined, inputBlockToggled: (id: string) => void, editable: boolean): ICellViewModel {
let inputLinesCount = 0;
const inputText = inputCell.data.cell_type === 'code' ? extractInputText(inputCell, settings) : '';
if (inputText) {
inputLinesCount = inputText.split('\n').length;
}
return {
cell: inputCell,
editable,
inputBlockOpen: true,
inputBlockShow: true,
inputBlockText: inputText,
inputBlockCollapseNeeded: (inputLinesCount > 1),
inputBlockToggled: inputBlockToggled
};
}
function generateVMs(inputBlockToggled: (id: string) => void, filePath: string, editable: boolean): ICellViewModel[] {
const cells = generateCells(filePath);
return cells.map((cell: ICell) => {
const vm = createCellVM(cell, undefined, inputBlockToggled, editable);
vm.useQuickEdit = true;
return vm;
});
}
function generateCells(filePath: string): ICell[] {
// Dupe a bunch times for perf reasons
let cellData: (nbformat.ICodeCell | nbformat.IMarkdownCell | nbformat.IRawCell | IMessageCell)[] = [];
for (let i = 0; i < 10; i += 1) {
cellData = [...cellData, ...generateCellData()];
}
return cellData.map((data: nbformat.ICodeCell | nbformat.IMarkdownCell | nbformat.IRawCell | IMessageCell, key: number) => {
return {
id: key.toString(),
file: path.join(filePath, 'foo.py'),
line: 1,
state: key === cellData.length - 1 ? CellState.executing : CellState.finished,
type: key === 3 ? 'preview' : 'execute',
data: data
};
});
}
//tslint:disable:max-func-body-length
function generateCellData(): (nbformat.ICodeCell | nbformat.IMarkdownCell | nbformat.IRawCell | IMessageCell)[] {
// Hopefully new entries here can just be copied out of a jupyter notebook (ipynb)
return [
{
// These are special. Sys_info is our own custom cell
cell_type: 'messages',
messages: [
'You have this python data:',
'c:\\data\\python.exe',
'3.9.9.9 The Uber Version',
'(5, 9, 9)',
'https:\\localhost\\token?=9343p0843084039483084308430984038403840938409384098304983094803948093848034809384'
],
source: [],
metadata: {}
},
{
cell_type: 'code',
execution_count: 467,
metadata: {
slideshow: {
slide_type: '-'
}
},
outputs: [
{
data: {
// tslint:disable-next-line: no-multiline-string
'text/html': [`
<div style="
overflow: auto;
">
<style scoped="">
.dataframe tbody tr th:only-of-type {
vertical-align: middle;
}
.dataframe tbody tr th {
vertical-align: top;
}
.dataframe thead th {
text-align: right;
}
</style>
<table border="1" class="dataframe">
<thead>
<tr style="text-align: right;">
<th></th>
<th>0</th>
<th>1</th>
<th>2</th>
<th>3</th>
<th>4</th>
<th>5</th>
<th>6</th>
<th>7</th>
<th>8</th>
<th>9</th>
<th>...</th>
<th>2990</th>
<th>2991</th>
<th>2992</th>
<th>2993</th>
<th>2994</th>
<th>2995</th>
<th>2996</th>
<th>2997</th>
<th>2998</th>
<th>2999</th>
</tr>
<tr>
<th>idx</th>
<th></th>
<th></th>
<th></th>
<th></th>
<th></th>
<th></th>
<th></th>
<th></th>
<th></th>
<th></th>
<th></th>
<th></th>
<th></th>
<th></th>
<th></th>
<th></th>
<th></th>
<th></th>
<th></th>
<th></th>
<th></th>
</tr>
</thead>
<tbody>
<tr>
<th>2007-01-31</th>
<td>37.060604</td>
<td>37.060604</td>
<td>37.060604</td>
<td>37.060604</td>
<td>37.060604</td>
<td>37.060604</td>
<td>37.060604</td>
<td>37.060604</td>
<td>37.060604</td>
<td>37.060604</td>
<td>...</td>
<td>37.060604</td>
<td>37.060604</td>
<td>37.060604</td>
<td>37.060604</td>
<td>37.060604</td>
<td>37.060604</td>
<td>37.060604</td>
<td>37.060604</td>
<td>37.060604</td>
<td>37.060604</td>
</tr>
<tr>
<th>2007-02-28</th>
<td>20.603407</td>
<td>20.603407</td>
<td>20.603407</td>
<td>20.603407</td>
<td>20.603407</td>
<td>20.603407</td>
<td>20.603407</td>
<td>20.603407</td>
<td>20.603407</td>
<td>20.603407</td>
<td>...</td>
<td>20.603407</td>
<td>20.603407</td>
<td>20.603407</td>
<td>20.603407</td>
<td>20.603407</td>
<td>20.603407</td>
<td>20.603407</td>
<td>20.603407</td>
<td>20.603407</td>
<td>20.603407</td>
</tr>
<tr>
<th>2007-03-31</th>
<td>6.142031</td>
<td>6.142031</td>
<td>6.142031</td>
<td>6.142031</td>
<td>6.142031</td>
<td>6.142031</td>
<td>6.142031</td>
<td>6.142031</td>
<td>6.142031</td>
<td>6.142031</td>
<td>...</td>
<td>6.142031</td>
<td>6.142031</td>
<td>6.142031</td>
<td>6.142031</td>
<td>6.142031</td>
<td>6.142031</td>
<td>6.142031</td>
<td>6.142031</td>
<td>6.142031</td>
<td>6.142031</td>
</tr>
<tr>
<th>2007-04-30</th>
<td>6.931635</td>
<td>6.931635</td>
<td>6.931635</td>
<td>6.931635</td>
<td>6.931635</td>
<td>6.931635</td>
<td>6.931635</td>
<td>6.931635</td>
<td>6.931635</td>
<td>6.931635</td>
<td>...</td>
<td>6.931635</td>
<td>6.931635</td>
<td>6.931635</td>
<td>6.931635</td>
<td>6.931635</td>
<td>6.931635</td>
<td>6.931635</td>
<td>6.931635</td>
<td>6.931635</td>
<td>6.931635</td>
</tr>
<tr>
<th>2007-05-31</th>
<td>52.642243</td>
<td>52.642243</td>
<td>52.642243</td>
<td>52.642243</td>
<td>52.642243</td>
<td>52.642243</td>
<td>52.642243</td>
<td>52.642243</td>
<td>52.642243</td>
<td>52.642243</td>
<td>...</td>
<td>52.642243</td>
<td>52.642243</td>
<td>52.642243</td>
<td>52.642243</td>
<td>52.642243</td>
<td>52.642243</td>
<td>52.642243</td>
<td>52.642243</td>
<td>52.642243</td>
<td>52.642243</td>
</tr>
</tbody>
</table>
<p>5 rows × 3000 columns</p>
</div>`
]
},
execution_count: 4,
metadata: {},
output_type: 'execute_result'
}
],
source: [
'myvar = \"\"\" # Lorem Ipsum\n',
'\n',
'Lorem ipsum dolor sit amet, consectetur adipiscing elit.\n',
'Nullam eget varius ligula, eget fermentum mauris.\n',
'Cras ultrices, enim sit amet iaculis ornare, nisl nibh aliquet elit, sed ultrices velit ipsum dignissim nisl.\n',
'Nunc quis orci ante. Vivamus vel blandit velit.\n","Sed mattis dui diam, et blandit augue mattis vestibulum.\n',
'Suspendisse ornare interdum velit. Suspendisse potenti.\n',
'Morbi molestie lacinia sapien nec porttitor. Nam at vestibulum nisi.\n',
'\"\"\" '
]
},
{
cell_type: 'markdown',
metadata: {},
source: [
'## Cell 3\n',
'Here\'s some markdown\n',
'- A List\n',
'- Of Items'
]
},
{
cell_type: 'code',
execution_count: 1,
metadata: {},
outputs: [
{
ename: 'NameError',
evalue: 'name "df" is not defined',
output_type: 'error',
traceback: [
'\u001b[1;31m---------------------------------------------------------------------------\u001b[0m',
'\u001b[1;31mNameError\u001b[0m Traceback (most recent call last)',
'\u001b[1;32m<ipython-input-1-00cf07b74dcd>\u001b[0m in \u001b[0;36m<module>\u001b[1;34m()\u001b[0m\n\u001b[1;32m----> 1\u001b[1;33m \u001b[0mdf\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m',
'\u001b[1;31mNameError\u001b[0m: name "df" is not defined'
]
}
],
source: [
'df'
]
},
{
cell_type: 'code',
execution_count: 1,
metadata: {},
outputs: [
{
ename: 'NameError',
evalue: 'name "df" is not defined',
output_type: 'error',
traceback: [
'\u001b[1;31m---------------------------------------------------------------------------\u001b[0m',
'\u001b[1;31mNameError\u001b[0m Traceback (most recent call last)',
'\u001b[1;32m<ipython-input-1-00cf07b74dcd>\u001b[0m in \u001b[0;36m<module>\u001b[1;34m()\u001b[0m\n\u001b[1;32m----> 1\u001b[1;33m \u001b[0mdf\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m',
'\u001b[1;31mNameError\u001b[0m: name "df" is not defined'
]
}
],
source: [
'df'
]
}
];
}
| createEmptyCell |
gopdf.go | package gopdf
import (
"bytes"
"errors"
"fmt"
"io"
"io/ioutil"
"log"
"os"
"strconv"
"time"
)
const subsetFont = "SubsetFont"
//GoPdf : A simple library for generating PDF written in Go lang
type GoPdf struct {
//page Margin
leftMargin float64
topMargin float64
pdfObjs []IObj
config Config
/*---index ของ obj สำคัญๆ เก็บเพื่อลด loop ตอนค้นหา---*/
//index ของ obj pages
indexOfPagesObj int
//index ของ obj page อันแรก
indexOfFirstPageObj int
//ต่ำแหน่งปัจจุบัน
curr Current
indexEncodingObjFonts []int
indexOfContent int
//index ของ procset ซึ่งควรจะมีอันเดียว
indexOfProcSet int
//IsUnderline bool
// Buffer for io.Reader compliance
buf bytes.Buffer
//pdf PProtection
pdfProtection *PDFProtection
encryptionObjID int
//info
isUseInfo bool
info *PdfInfo
}
//SetLineWidth : set line width
func (gp *GoPdf) SetLineWidth(width float64) {
gp.curr.lineWidth = width
gp.getContent().AppendStreamSetLineWidth(width)
}
//SetLineType : set line type ("dashed" ,"dotted")
// Usage:
// pdf.SetLineType("dashed")
// pdf.Line(50, 200, 550, 200)
// pdf.SetLineType("dotted")
// pdf.Line(50, 400, 550, 400)
func (gp *GoPdf) SetLineType(linetype string) {
gp.getContent().AppendStreamSetLineType(linetype)
}
//Line : draw line
func (gp *GoPdf) Line(x1 float64, y1 float64, x2 float64, y2 float64) {
gp.getContent().AppendStreamLine(x1, y1, x2, y2)
}
//RectFromLowerLeft : draw rectangle from lower-left corner (x, y)
func (gp *GoPdf) RectFromLowerLeft(x float64, y float64, wdth float64, hght float64) {
gp.getContent().AppendStreamRectangle(x, y, wdth, hght, "")
}
//RectFromUpperLeft : draw rectangle from upper-left corner (x, y)
func (gp *GoPdf) RectFromUpperLeft(x float64, y float64, wdth float64, hght float64) {
gp.getContent().AppendStreamRectangle(x, y+hght, wdth, hght, "")
}
//RectFromLowerLeftWithStyle : draw rectangle from lower-left corner (x, y)
// - style: Style of rectangule (draw and/or fill: D, F, DF, FD)
// D or empty string: draw. This is the default value.
// F: fill
// DF or FD: draw and fill
func (gp *GoPdf) RectFromLowerLeftWithStyle(x float64, y float64, wdth float64, hght float64, style string) {
gp.getContent().AppendStreamRectangle(x, y, wdth, hght, style)
}
//RectFromUpperLeftWithStyle : draw rectangle from upper-left corner (x, y)
// - style: Style of rectangule (draw and/or fill: D, F, DF, FD)
// D or empty string: draw. This is the default value.
// F: fill
// DF or FD: draw and fill
func (gp *GoPdf) RectFromUpperLeftWithStyle(x float64, y float64, wdth float64, hght float64, style string) {
gp.getContent().AppendStreamRectangle(x, y+hght, wdth, hght, style)
}
//Oval : draw oval
func (gp *GoPdf) Oval(x1 float64, y1 float64, x2 float64, y2 float64) {
gp.getContent().AppendStreamOval(x1, y1, x2, y2)
}
//Br : new line
func (gp *GoPdf) Br(h float64) {
gp.curr.Y += h
gp.curr.X = gp.leftMargin
}
//SetGrayFill set the grayscale for the fill, takes a float64 between 0.0 and 1.0
func (gp *GoPdf) SetGrayFill(grayScale float64) {
gp.curr.grayFill = grayScale
gp.getContent().AppendStreamSetGrayFill(grayScale)
}
//SetGrayStroke set the grayscale for the stroke, takes a float64 between 0.0 and 1.0
func (gp *GoPdf) SetGrayStroke(grayScale float64) {
gp.curr.grayStroke = grayScale
gp.getContent().AppendStreamSetGrayStroke(grayScale)
}
//SetLeftMargin : set left margin
func (gp *GoPdf) SetLeftMargin(margin float64) {
gp.leftMargin = margin
}
//SetTopMargin : set top margin
func (gp *GoPdf) SetTopMargin(margin float64) {
gp.topMargin = margin
}
//SetX : set current position X
func (gp *GoPdf) SetX(x float64) {
gp.curr.setXCount++
gp.curr.X = x
}
//GetX : get current position X
func (gp *GoPdf) GetX() float64 {
return gp.curr.X
}
//SetY : set current position y
func (gp *GoPdf) SetY(y float64) {
gp.curr.Y = y
}
//GetY : get current position y
func (gp *GoPdf) GetY() float64 {
return gp.curr.Y
}
//Image : draw image
func (gp *GoPdf) Image(picPath string, x float64, y float64, rect *Rect) error {
//check
cacheImageIndex := -1
for _, imgcache := range gp.curr.ImgCaches {
if picPath == imgcache.Path {
cacheImageIndex = imgcache.Index
break
}
}
//create img object
imgobj := new(ImageObj)
imgobj.init(func() *GoPdf {
return gp
})
imgobj.setProtection(gp.protection())
var err error
err = imgobj.SetImagePath(picPath)
if err != nil {
return err
}
if rect == nil {
rect = imgobj.GetRect()
}
if cacheImageIndex == -1 { //new image
err := imgobj.parse()
if err != nil {
return err
}
index := gp.addObj(imgobj)
if gp.indexOfProcSet != -1 {
//ยัดรูป
procset := gp.pdfObjs[gp.indexOfProcSet].(*ProcSetObj)
gp.getContent().AppendStreamImage(gp.curr.CountOfImg, x, y, rect)
procset.RealteXobjs = append(procset.RealteXobjs, RealteXobject{IndexOfObj: index})
//เก็บข้อมูลรูปเอาไว้
var imgcache ImageCache
imgcache.Index = gp.curr.CountOfImg
imgcache.Path = picPath
gp.curr.ImgCaches = append(gp.curr.ImgCaches, imgcache)
gp.curr.CountOfImg++
}
if imgobj.haveSMask() {
smaskObj, err := imgobj.createSMask()
if err != nil {
return err
}
imgobj.imginfo.smarkObjID = gp.addObj(smaskObj)
}
if imgobj.isColspaceIndexed() {
dRGB, err := imgobj.createDeviceRGB()
if err != nil {
return err
}
imgobj.imginfo.deviceRGBObjID = gp.addObj(dRGB)
}
} else { //same img
gp.getContent().AppendStreamImage(cacheImageIndex, x, y, rect)
}
return nil
}
//Image : draw image
func (gp *GoPdf) ImageByReader(o string, r io.Reader, x float64, y float64, rect *Rect) error {
//check
cacheImageIndex := -1
for _, imgcache := range gp.curr.ImgCaches {
if o == imgcache.Path {
cacheImageIndex = imgcache.Index
break
}
}
//create img object
imgobj := new(ImageObj)
imgobj.init(func() *GoPdf {
return gp
})
imgobj.setProtection(gp.protection())
var err error
err = imgobj.SetImage(r)
if err != nil {
return err
}
if rect == nil {
rect = imgobj.GetRect()
}
if cacheImageIndex == -1 { //new image
err := imgobj.parse()
if err != nil {
return err
}
index := gp.addObj(imgobj)
if gp.indexOfProcSet != -1 {
//ยัดรูป
procset := gp.pdfObjs[gp.indexOfProcSet].(*ProcSetObj)
gp.getContent().AppendStreamImage(gp.curr.CountOfImg, x, y, rect)
procset.RealteXobjs = append(procset.RealteXobjs, RealteXobject{IndexOfObj: index})
//เก็บข้อมูลรูปเอาไว้
var imgcache ImageCache
imgcache.Index = gp.curr.CountOfImg
imgcache.Path = o
gp.curr.ImgCaches = append(gp.curr.ImgCaches, imgcache)
gp.curr.CountOfImg++
}
if imgobj.haveSMask() {
smaskObj, err := imgobj.createSMask()
if err != nil {
return err
}
imgobj.imginfo.smarkObjID = gp.addObj(smaskObj)
}
if imgobj.isColspaceIndexed() {
dRGB, err := imgobj.createDeviceRGB()
if err != nil {
return err
}
imgobj.imginfo.deviceRGBObjID = gp.addObj(dRGB)
}
} else { //same img
gp.getContent().AppendStreamImage(cacheImageIndex, x, y, rect)
}
return nil
}
//AddPage : add new page
func (gp *GoPdf) AddPage() {
page := new(PageObj)
page.init(func() *GoPdf {
return gp
})
page.ResourcesRelate = strconv.Itoa(gp.indexOfProcSet+1) + " 0 R"
index := gp.addObj(page)
if gp.indexOfFirstPageObj == -1 {
gp.indexOfFirstPageObj = index
}
gp.curr.IndexOfPageObj = index
//reset
gp.indexOfContent = -1
gp.resetCurrXY()
}
//Start : init gopdf
func (gp *GoPdf) Start(config Config) {
gp.config = config
gp.init()
//สร้าง obj พื้นฐาน
catalog := new(CatalogObj)
catalog.init(func() *GoPdf {
return gp
})
pages := new(PagesObj)
pages.init(func() *GoPdf {
return gp
})
gp.addObj(catalog)
gp.indexOfPagesObj = gp.addObj(pages)
//indexOfProcSet
procset := new(ProcSetObj)
procset.init(func() *GoPdf {
return gp
})
gp.indexOfProcSet = gp.addObj(procset)
if gp.isUseProtection() {
gp.pdfProtection = gp.createProtection()
}
}
//SetFont : set font style support "" or "U"
func (gp *GoPdf) SetFont(family string, style string, size int) error {
found := false
i := 0
max := len(gp.pdfObjs)
for i < max {
if gp.pdfObjs[i].getType() == subsetFont {
obj := gp.pdfObjs[i]
sub, ok := obj.(*SubsetFontObj)
if ok {
if sub.GetFamily() == family {
gp.curr.Font_Size = size
gp.curr.Font_Style = style
gp.curr.Font_FontCount = sub.CountOfFont
gp.curr.Font_ISubset = sub
found = true
break
}
}
}
i++
}
if !found {
return errors.New("not found font family")
}
return nil
}
//WritePdf : wirte pdf file
func (gp *GoPdf) WritePdf(pdfPath string) {
ioutil.WriteFile(pdfPath, gp.GetBytesPdf(), 0644)
}
func (gp *GoPdf) Read(p []byte) (int, error) {
if gp.buf.Len() == 0 && gp.buf.Cap() == 0 {
if err := gp.compilePdf(); err != nil {
return 0, err
}
}
return gp.buf.Read(p)
}
func (gp *GoPdf) Close() error {
gp.buf = bytes.Buffer{}
return nil
}
func (gp *GoPdf) compilePdf() error {
gp.prepare()
err := gp.Close()
if err != nil {
return err
}
max := len(gp.pdfObjs)
gp.buf.WriteString("%PDF-1.7\n\n")
linelens := make([]int, max)
i := 0
for i < max {
objID := i + 1
linelens[i] = gp.buf.Len()
pdfObj := gp.pdfObjs[i]
err = pdfObj.build(objID)
if err != nil {
return err
}
gp.buf.WriteString(strconv.Itoa(objID) + " 0 obj\n")
buffbyte := pdfObj.getObjBuff().Bytes()
gp.buf.Write(buffbyte)
gp.buf.WriteString("endobj\n\n")
i++
}
gp.xref(linelens, &gp.buf, &i)
return nil
}
//GetBytesPdfReturnErr : get bytes of pdf file
func (gp *GoPdf) GetBytesPdfReturnErr() ([]byte, error) {
err := gp.Close()
if err != nil {
return nil, err
}
err = gp.compilePdf()
return gp.buf.Bytes(), err
}
//GetBytesPdf : get bytes of pdf file
func (gp *GoPdf) GetBytesPdf() []byte {
b, err := gp.GetBytesPdfReturnErr()
if err != nil {
log.Fatalf("%s", err.Error())
}
return b
}
//Text write text start at current x,y ( current y is the baseline of text )
func (gp *GoPdf) Text(text string) error {
err := gp.curr.Font_ISubset.AddChars(text)
if err != nil {
return err
}
err = gp.getContent().AppendStreamText(text)
if err != nil {
return err
}
return nil
}
//CellWithOption create cell of text ( use current x,y is upper-left corner of cell)
func (gp *GoPdf) CellWithOption(rectangle *Rect, text string, opt CellOption) error {
err := gp.curr.Font_ISubset.AddChars(text)
if err != nil {
return err
}
err = gp.getContent().AppendStreamSubsetFont(rectangle, text, opt)
if err != nil {
return err
}
return nil
}
//Cell : create cell of text ( use current x,y is upper-left corner of cell)
//Note that this has no effect on Rect.H pdf (now). Fix later :-)
func (gp *GoPdf) Cell(rectangle *Rect, text string) error {
defaultopt := CellOption{
Align: Left | Top,
Border: 0,
Float: Right,
}
err := gp.curr.Font_ISubset.AddChars(text)
if err != nil {
return err
}
err = gp.getContent().AppendStreamSubsetFont(rectangle, text, defaultopt)
if err != nil {
return err
}
return nil
}
//AddTTFFontByReader add font file
func (gp *GoPdf) AddTTFFontByReader(family string, rd io.Reader) error {
return gp.AddTTFFontByReaderWithOption(family, rd, defaultTtfFontOption())
}
//AddTTFFontByReaderWithOption add font file
func (gp *GoPdf) AddTTFFontByReaderWithOption(family string, rd io.Reader, option TtfOption) error {
subsetFont := new(SubsetFontObj)
subsetFont.init(func() *GoPdf {
return gp
})
subsetFont.SetTtfFontOption(option)
subsetFont.SetFamily(family)
err := subsetFont.SetTTFByReader(rd)
if err != nil {
return err
}
unicodemap := new(UnicodeMap)
unicodemap.init(func() *GoPdf {
return gp
})
unicodemap.setProtection(gp.protection())
unicodemap.SetPtrToSubsetFontObj(subsetFont)
unicodeindex := gp.addObj(unicodemap)
pdfdic := new(PdfDictionaryObj)
pdfdic.init(func() *GoPdf {
return gp
})
pdfdic.setProtection(gp.protection())
pdfdic.SetPtrToSubsetFontObj(subsetFont)
pdfdicindex := gp.addObj(pdfdic)
subfontdesc := new(SubfontDescriptorObj)
subfontdesc.init(func() *GoPdf {
return gp
})
subfontdesc.SetPtrToSubsetFontObj(subsetFont)
subfontdesc.SetIndexObjPdfDictionary(pdfdicindex)
subfontdescindex := gp.addObj(subfontdesc)
cidfont := new(CIDFontObj)
cidfont.init(func() *GoPdf {
return gp
})
cidfont.SetPtrToSubsetFontObj(subsetFont)
cidfont.SetIndexObjSubfontDescriptor(subfontdescindex)
cidindex := gp.addObj(cidfont)
subsetFont.SetIndexObjCIDFont(cidindex)
subsetFont.SetIndexObjUnicodeMap(unicodeindex)
index := gp.addObj(subsetFont) //add หลังสุด
if gp.indexOfProcSet != -1 {
procset := gp.pdfObjs[gp.indexOfProcSet].(*ProcSetObj)
if !procset.Realtes.IsContainsFamily(family) {
procset.Realtes = append(procset.Realtes, RelateFont{Family: family, IndexOfObj: index, CountOfFont: gp.curr.CountOfFont})
subsetFont.CountOfFont = gp.curr.CountOfFont
gp.curr.CountOfFont++
}
}
return nil
}
//AddTTFFontWithOption : add font file
func (gp *GoPdf) AddTTFFontWithOption(family string, ttfpath string, option TtfOption) error {
if _, err := os.Stat(ttfpath); os.IsNotExist(err) {
return err
}
data, err := ioutil.ReadFile(ttfpath)
if err != nil {
return err
}
rd := bytes.NewReader(data)
return gp.AddTTFFontByReaderWithOption(family, rd, option)
}
//AddTTFFont : add font file
func (gp *GoPdf) AddTTFFont(family string, ttfpath string) error {
return gp.AddTTFFontWithOption(family, ttfpath, defaultTtfFontOption())
}
//KernOverride override kern value
func (gp *GoPdf) KernOverride(family string, fn FuncKernOverride) error {
i := 0
max := len(gp.pdfObjs)
for i < max {
if gp.pdfObjs[i].getType() == subsetFont {
obj := gp.pdfObjs[i]
sub, ok := obj.(*SubsetFontObj)
if ok {
if sub.GetFamily() == family {
sub.funcKernOverride = fn
return nil
}
}
}
i++
}
return errors.New("font family not found")
}
//SetTextColor : function sets the text color
func (gp *GoPdf) SetTextColor(r uint8, g uint8, b uint8) {
rgb := Rgb{
r: r,
g: g,
b: b,
}
gp.curr.setTextColor(rgb)
}
//SetStrokeColor set the color for the stroke
func (gp *GoPdf) SetStrokeColor(r uint8, g uint8, b uint8) {
gp.getContent().AppendStreamSetColorStroke(r, g, b)
}
//SetFillColor set the color for the stroke
func (gp *GoPdf) SetFillColor(r uint8, g uint8, b uint8) {
gp.getContent().AppendStreamSetColorFill(r, g, b)
}
//MeasureTextWidth : measure Width of text (use current font)
func (gp *GoPdf) MeasureTextWidth(text string) (float64, error) {
err := gp.curr.Font_ISubset.AddChars(text) //AddChars for create CharacterToGlyphIndex
if err != nil {
return 0, err
}
_, _, textWidthPdfUnit, err := createContent(gp.curr.Font_ISubset, text, gp.curr.Font_Size, nil, nil)
if err != nil {
return 0, err
}
return textWidthPdfUnit, nil
}
//Curve Draws a Bézier curve (the Bézier curve is tangent to the line between the control points at either end of the curve)
// Parameters:
// - x0, y0: Start point
// - x1, y1: Control point 1
// - x2, y2: Control point 2
// - x3, y3: End point
// - style: Style of rectangule (draw and/or fill: D, F, DF, FD)
func (gp *GoPdf) Curve(x0 float64, y0 float64, x1 float64, y1 float64, x2 float64, y2 float64, x3 float64, y3 float64, style string) {
gp.getContent().AppendStreamCurve(x0, y0, x1, y1, x2, y2, x3, y3, style)
}
/*
//SetProtection set permissions as well as user and owner passwords
func (gp *GoPdf) SetProtection(permissions int, userPass []byte, ownerPass []byte) {
gp.pdfProtection = new(PDFProtection)
gp.pdfProtection.setProtection(permissions, userPass, ownerPass)
}*/
//SetInfo set Document Information Dictionary
func (gp *GoPdf) SetInfo(info PdfInfo) {
gp.info = &info
gp.isUseInfo = true
}
/*---private---*/
//init
func (gp *GoPdf) init() {
//default
gp.leftMargin = 10.0
gp.topMargin = 10.0
//init curr
gp.resetCurrXY()
gp.curr.IndexOfPageObj = -1
gp.curr.CountOfFont = 0
gp.curr.CountOfL = 0
gp.curr.CountOfImg = 0 //img
gp.curr.ImgCaches = *new([]ImageCache)
//init index
gp.indexOfPagesObj = -1
gp.indexOfFirstPageObj = -1
gp.indexOfContent = -1
//No underline
//gp.IsUnderline = false
gp.curr.lineWidth = 1
}
func (gp *GoPdf) resetCurrXY() {
gp.curr.X = gp.leftMargin
gp.curr.Y = gp.topMargin
}
func (gp *GoPdf) isUseProtection() bool {
return gp.config.Protection.UseProtection
}
func (gp *GoPdf) createProtection() *PDFProtection {
var prot PDFProtection
prot.setProtection(
gp.config.Protection.Permissions,
gp.config.Protection.UserPass,
gp.config.Protection.OwnerPass,
)
return &prot
}
func (gp *GoPdf) protection() *PDFProtection {
return gp.pdfProtection
}
func (gp *GoPdf) prepare() {
if gp.isUseProtection() {
encObj := gp.pdfProtection.encryptionObj()
gp.addObj(encObj)
}
if gp.indexOfPagesObj != -1 {
indexCurrPage := -1
var pagesObj *PagesObj
pagesObj = gp.pdfObjs[gp.indexOfPagesObj].(*PagesObj)
i := 0 //gp.indexOfFirstPageObj
max := len(gp.pdfObjs)
for i < max {
objtype := gp.pdfObjs[i].getType()
//fmt.Printf(" objtype = %s , %d \n", objtype , i)
if objtype == "Page" {
pagesObj.Kids = fmt.Sprintf("%s %d 0 R ", pagesObj.Kids, i+1)
pagesObj.PageCount++
indexCurrPage = i
} else if objtype == "Content" {
if indexCurrPage != -1 {
gp.pdfObjs[indexCurrPage].(*PageObj).Contents = fmt.Sprintf("%s %d 0 R ", gp.pdfObjs[indexCurrPage].(*PageObj).Contents, i+1)
}
} else if objtype == "Font" {
tmpfont := gp.pdfObjs[i].(*FontObj)
j := 0
jmax := len(gp.indexEncodingObjFonts)
for j < jmax {
tmpencoding := gp.pdfObjs[gp.indexEncodingObjFonts[j]].(*EncodingObj).GetFont()
if tmpfont.Family == tmpencoding.GetFamily() { //ใส่ ข้อมูลของ embed font
tmpfont.IsEmbedFont = true
tmpfont.SetIndexObjEncoding(gp.indexEncodingObjFonts[j] + 1)
tmpfont.SetIndexObjWidth(gp.indexEncodingObjFonts[j] + 2)
tmpfont.SetIndexObjFontDescriptor(gp.indexEncodingObjFonts[j] + 3)
break
}
j++
}
} else if objtype == "Encryption" {
gp.encryptionObjID = i + 1
}
i++
}
}
}
func (gp *GoPdf) xref(linelens []int, buff *bytes.Buffer, i *int) error {
xrefbyteoffset := buff.Len()
buff.WriteString("xref\n")
buff.WriteString("0 " + strconv.Itoa((*i)+1) + "\n")
buff.WriteString("0000000000 65535 f \n")
j := 0
max := len(linelens)
for j < max {
linelen := linelens[j]
buff.WriteString(gp.formatXrefline(linelen) + " 00000 n \n")
j++
}
buff.WriteString("trailer\n")
buff.WriteString("<<\n")
buff.WriteString("/Size " + strconv.Itoa(max+1) + "\n")
buff.WriteString("/Root 1 0 R\n")
if gp.isUseProtection() {
buff.WriteString(fmt.Sprintf("/Encrypt %d 0 R\n", gp.encryptionObjID))
buff.WriteString("/ID [()()]\n")
}
if gp.isUseInfo {
gp.bindInfo(buff)
}
buff.WriteString(">>\n")
buff.WriteString("startxref\n")
buff.WriteString(strconv.Itoa(xrefbyteoffset))
buff.WriteString("\n%%EOF\n")
(*i)++
return nil
}
func (gp *GoPdf) bindInfo(buff *bytes.Buffer) {
var zerotime time.Time
buff.WriteString("/Info <<\n")
if gp.info.Author != "" {
buff.WriteString(fmt.Sprintf("/Author <FEFF%s>\n", encodeUtf8(gp.info.Author)))
}
if gp.info.Title != "" {
buff.WriteString(fmt.Sprintf("/Title <FEFF%s>\n", encodeUtf8(gp.info.Title)))
}
if gp.info.Subject != "" {
buff.WriteString(fmt.Sprintf("/Subject <FEFF%s>\n", encodeUtf8(gp.info.Subject)))
}
if gp.info.Creator != "" {
buff.WriteString(fmt.Sprintf("/Creator <FEFF%s>\n", encodeUtf8(gp.info.Creator)))
}
if gp.info.Producer != "" {
buff.WriteString(fmt.Sprintf("/Producer <FEFF%s>\n", encodeUtf8(gp.info.Producer)))
}
if !zerotime.Equal(gp.info.CreationDate) {
buff.WriteString(fmt.Sprintf("/CreationDate(D:%s)>>\n", infodate(gp.info.CreationDate)))
}
buff.WriteString(" >>\n")
}
//ปรับ xref ให้เป็น 10 หลัก
func (gp *GoPdf) formatXrefline(n int) string {
str := strconv.Itoa(n)
for len(str) < 10 {
str = "0" + str
}
return str
}
func (gp *GoPdf) addObj(iobj IObj) int {
index := len(gp.pdfObjs)
gp.pdfObjs = append(gp.pdfObjs, iobj)
return index
}
func (gp *GoPdf) getContent() *ContentObj {
var content *ContentObj
if gp.indexOfContent <= -1 {
content = new(ContentObj)
content.init(func() *GoPdf {
return gp
})
gp.indexOfContent = gp.addObj(content)
} else {
content = gp.pdfObjs[gp.indexOfContent].(*ContentObj)
}
return content
}
func encodeUtf8(str string) string {
var buff bytes.Buffer
for _, r := range str {
c := fmt.Sprintf("%X", r)
for len(c) < 4 {
c = "0" + c
}
buff.WriteString(c)
}
return buff.String()
}
func infodate(t time.Time) string {
ft := t.Format("20060102150405-07'00'")
return ft
}
| ||
online_store_utils.py | from datetime import datetime
import boto3
from feast import utils
from feast.infra.online_stores.helpers import compute_entity_id
from feast.protos.feast.types.EntityKey_pb2 import EntityKey as EntityKeyProto
from feast.protos.feast.types.Value_pb2 import Value as ValueProto
def _create_n_customer_test_samples(n=10):
return [
(
EntityKeyProto(
join_keys=["customer"], entity_values=[ValueProto(string_val=str(i))]
),
{
"avg_orders_day": ValueProto(float_val=1.0),
"name": ValueProto(string_val="John"),
"age": ValueProto(int64_val=3),
},
datetime.utcnow(),
None,
)
for i in range(n)
]
def _create_test_table(project, tbl_name, region):
client = boto3.client("dynamodb", region_name=region)
client.create_table(
TableName=f"{project}.{tbl_name}",
KeySchema=[{"AttributeName": "entity_id", "KeyType": "HASH"}],
AttributeDefinitions=[{"AttributeName": "entity_id", "AttributeType": "S"}],
BillingMode="PAY_PER_REQUEST",
)
def _delete_test_table(project, tbl_name, region):
client = boto3.client("dynamodb", region_name=region)
client.delete_table(TableName=f"{project}.{tbl_name}")
def _insert_data_test_table(data, project, tbl_name, region):
| dynamodb_resource = boto3.resource("dynamodb", region_name=region)
table_instance = dynamodb_resource.Table(f"{project}.{tbl_name}")
for entity_key, features, timestamp, created_ts in data:
entity_id = compute_entity_id(entity_key)
with table_instance.batch_writer() as batch:
batch.put_item(
Item={
"entity_id": entity_id,
"event_ts": str(utils.make_tzaware(timestamp)),
"values": {k: v.SerializeToString() for k, v in features.items()},
}
) |
|
owned.rs | // Copyright Amazon.com, Inc. or its affiliates.
//! Provides owned implementations of [`SymbolToken`], [`Element`] and its dependents.
//!
//! This API is simpler to manage with respect to borrowing lifetimes, but requires full
//! ownership of data to do so.
use super::{AnyInt, Element, ImportSource, Sequence, Struct, SymbolToken};
use crate::types::decimal::Decimal;
use crate::types::timestamp::Timestamp;
use crate::types::SymbolId;
use crate::value::Builder;
use crate::IonType;
use num_bigint::BigInt;
use std::collections::HashMap;
use std::iter::FromIterator;
use std::rc::Rc;
/// An owned implementation of [`ImportSource`].
#[derive(Debug, Clone)]
pub struct OwnedImportSource {
table: Rc<str>,
sid: SymbolId,
}
impl OwnedImportSource {
pub fn new<T: Into<Rc<str>>>(table: T, sid: SymbolId) -> Self {
Self {
table: table.into(),
sid,
}
}
}
impl PartialEq for OwnedImportSource {
fn eq(&self, other: &Self) -> bool {
self.table == other.table && self.sid == other.sid
}
}
impl Eq for OwnedImportSource {}
impl ImportSource for OwnedImportSource {
fn table(&self) -> &str {
&self.table
}
fn sid(&self) -> usize {
self.sid
}
}
/// An owned implementation of [`SymbolToken`].
#[derive(Debug, Clone)]
pub struct OwnedSymbolToken {
text: Option<Rc<str>>,
local_sid: Option<SymbolId>,
source: Option<OwnedImportSource>,
}
impl OwnedSymbolToken {
fn new(
text: Option<Rc<str>>,
local_sid: Option<SymbolId>,
source: Option<OwnedImportSource>,
) -> Self {
Self {
text,
local_sid,
source,
}
}
}
/// Constructs an [`OwnedSymbolToken`] with unknown text and a local ID.
/// A common case for binary parsing (though technically relevant in text).
#[inline]
pub fn local_sid_token(local_sid: SymbolId) -> OwnedSymbolToken {
OwnedSymbolToken::new(None, Some(local_sid), None)
}
/// Constructs an [`OwnedSymbolToken`] with just text.
/// A common case for text and synthesizing tokens.
#[inline]
pub fn text_token<T: Into<Rc<str>>>(text: T) -> OwnedSymbolToken {
OwnedSymbolToken::new(Some(text.into()), None, None)
}
impl PartialEq for OwnedSymbolToken {
fn eq(&self, other: &Self) -> bool {
if other.text != None || self.text != None {
// if either side has text, we only compare text
other.text == self.text
} else {
// no text--so the sources must be the same (all local symbols with no source are the same)
other.source == self.source
}
}
}
impl Eq for OwnedSymbolToken {}
impl<T: Into<Rc<str>>> From<T> for OwnedSymbolToken {
/// Constructs an owned token that has only text.
fn from(text: T) -> Self {
text_token(text)
}
}
impl SymbolToken for OwnedSymbolToken {
type ImportSource = OwnedImportSource;
fn text(&self) -> Option<&str> {
self.text.as_ref().map(|s| s.as_ref())
}
fn local_sid(&self) -> Option<usize> {
self.local_sid
}
fn source(&self) -> Option<&Self::ImportSource> {
self.source.as_ref()
}
fn with_text(self, text: &'static str) -> Self {
OwnedSymbolToken::new(Some(Rc::from(text)), self.local_sid, self.source)
}
fn with_local_sid(self, local_sid: SymbolId) -> Self {
OwnedSymbolToken::new(self.text, Some(local_sid), self.source)
}
fn with_source(self, table: &'static str, sid: SymbolId) -> Self {
OwnedSymbolToken::new(
self.text,
self.local_sid,
Some(OwnedImportSource::new(table, sid)),
)
}
fn text_token(text: &'static str) -> Self {
OwnedSymbolToken::new(Some(Rc::from(text)), None, None)
}
fn local_sid_token(local_sid: usize) -> Self {
OwnedSymbolToken::new(None, Some(local_sid), None)
}
}
/// An owned implementation of [`Builder`].
impl Builder for OwnedElement {
type Element = OwnedElement;
type SymbolToken = OwnedSymbolToken;
type Sequence = OwnedSequence;
type Struct = OwnedStruct;
type ImportSource = OwnedImportSource;
fn new_null(e_type: IonType) -> Self::Element {
OwnedValue::Null(e_type).into()
}
fn new_bool(bool: bool) -> Self::Element {
OwnedValue::Boolean(bool).into()
}
fn new_string(str: &'static str) -> Self::Element {
OwnedValue::String(str.into()).into()
}
fn new_symbol(sym: Self::SymbolToken) -> Self::Element {
OwnedValue::Symbol(sym).into()
}
fn new_i64(int: i64) -> Self::Element {
OwnedValue::Integer(AnyInt::I64(int)).into()
}
fn new_big_int(big_int: BigInt) -> Self::Element {
OwnedValue::Integer(AnyInt::BigInt(big_int)).into()
}
fn new_decimal(decimal: Decimal) -> Self::Element {
OwnedValue::Decimal(decimal).into()
}
fn new_timestamp(timestamp: Timestamp) -> Self::Element {
OwnedValue::Timestamp(timestamp).into()
}
fn new_f64(float: f64) -> Self::Element {
OwnedValue::Float(float).into()
}
fn new_clob(bytes: &[u8]) -> Self::Element {
OwnedValue::Clob(bytes.into()).into()
}
fn new_blob(bytes: &[u8]) -> Self::Element {
OwnedValue::Blob(bytes.into()).into()
}
fn new_list<I: IntoIterator<Item = Self::Element>>(seq: I) -> Self::Element {
OwnedValue::List(seq.into_iter().collect()).into()
}
fn new_sexp<I: IntoIterator<Item = Self::Element>>(seq: I) -> Self::Element {
OwnedValue::SExpression(seq.into_iter().collect()).into()
}
fn new_struct<
K: Into<Self::SymbolToken>,
V: Into<Self::Element>,
I: IntoIterator<Item = (K, V)>,
>(
structure: I,
) -> Self::Element {
OwnedValue::Struct(structure.into_iter().collect()).into()
}
}
/// An owned implementation of [`Sequence`]
#[derive(Debug, Clone)]
pub struct OwnedSequence {
children: Vec<OwnedElement>,
}
impl OwnedSequence {
pub fn new(children: Vec<OwnedElement>) -> Self {
Self { children }
}
}
impl FromIterator<OwnedElement> for OwnedSequence {
/// Returns an owned sequence from the given iterator of elements.
fn from_iter<I: IntoIterator<Item = OwnedElement>>(iter: I) -> Self {
let mut children: Vec<OwnedElement> = Vec::new();
for elem in iter {
children.push(elem);
}
Self { children }
}
}
impl Sequence for OwnedSequence {
type Element = OwnedElement;
fn iter<'a>(&'a self) -> Box<dyn Iterator<Item = &'a Self::Element> + 'a> {
Box::new(self.children.iter())
}
fn get(&self, index: usize) -> Option<&Self::Element> {
self.children.get(index)
}
fn len(&self) -> usize {
self.children.len()
}
fn is_empty(&self) -> bool {
self.len() == 0
}
}
impl PartialEq for OwnedSequence {
fn eq(&self, other: &Self) -> bool {
self.children == other.children
}
}
impl Eq for OwnedSequence {}
/// An owned implementation of [`Struct`]
#[derive(Debug, Clone)]
pub struct OwnedStruct {
text_fields: HashMap<Rc<str>, Vec<(OwnedSymbolToken, OwnedElement)>>,
no_text_fields: Vec<(OwnedSymbolToken, OwnedElement)>,
}
impl OwnedStruct {
fn eq_text_fields(&self, other: &Self) -> bool {
// check if both the text_fields have same (field_name,value) pairs
self.text_fields.iter().all(|(key, value)| {
value.iter().all(|(_my_s, my_v)| {
other
.get_all(key)
.find(|other_v| my_v == *other_v)
.is_some()
}) && value.len() == other.get_all(key).count()
})
}
fn eq_no_text_fields(&self, other: &Self) -> bool {
// check if both the no_text_fields are same values
self.no_text_fields.iter().all(|(my_k, my_v)| {
other
.no_text_fields
.iter()
.find(|(other_k, other_v)| my_k == other_k && my_v == other_v)
.is_some()
})
}
}
impl<K, V> FromIterator<(K, V)> for OwnedStruct
where
K: Into<OwnedSymbolToken>,
V: Into<OwnedElement>,
{
/// Returns an owned struct from the given iterator of field names/values.
fn from_iter<I: IntoIterator<Item = (K, V)>>(iter: I) -> Self {
let mut text_fields: HashMap<Rc<str>, Vec<(OwnedSymbolToken, OwnedElement)>> =
HashMap::new();
let mut no_text_fields: Vec<(OwnedSymbolToken, OwnedElement)> = Vec::new();
for (k, v) in iter {
let key = k.into();
let val = v.into();
match key.text() {
Some(text) => {
let vals = text_fields.entry(text.into()).or_insert(Vec::new());
vals.push((key, val));
}
None => {
no_text_fields.push((key, val));
}
}
}
Self {
text_fields,
no_text_fields,
}
}
}
impl Struct for OwnedStruct {
type FieldName = OwnedSymbolToken;
type Element = OwnedElement;
fn iter<'a>(
&'a self,
) -> Box<dyn Iterator<Item = (&'a Self::FieldName, &'a Self::Element)> + 'a> {
// convert &(k, v) -> (&k, &v)
// flattens the fields_with_text_key HashMap and chains with fields_with_no_text_key
// to return all fields with iterator
Box::new(
self.text_fields
.values()
.flat_map(|v| v)
.into_iter()
.chain(self.no_text_fields.iter())
.map(|(k, v)| (k, v)),
)
}
fn get<T: AsRef<str>>(&self, field_name: T) -> Option<&Self::Element> {
self.text_fields
.get(field_name.as_ref())?
.last()
.map(|(_s, v)| v)
}
fn get_all<'a, T: AsRef<str>>(
&'a self,
field_name: T,
) -> Box<dyn Iterator<Item = &'a Self::Element> + 'a> {
Box::new(
self.text_fields
.get(field_name.as_ref())
.into_iter()
.flat_map(|v| v.iter())
.map(|(_s, v)| v),
)
}
}
impl PartialEq for OwnedStruct {
fn eq(&self, other: &Self) -> bool {
// check if both text_fields and no_text_fields have same length
self.text_fields.len() == other.text_fields.len() && self.no_text_fields.len() == other.no_text_fields.len()
// check if text_fields and no_text_fields are equal
// we need to test equality in both directions for both text_fields and no_text_fields
// A good example for this is annotated vs not annotated values in struct
// { a:4, a:4 } vs. { a:4, a:a::4 } // returns true
// { a:4, a:a::4 } vs. { a:4, a:4 } // returns false
&& self.eq_text_fields(other) && other.eq_text_fields(self)
&& self.eq_no_text_fields(other) && other.eq_no_text_fields(self)
}
}
impl Eq for OwnedStruct {}
/// Variants for all owned version _values_ within an [`Element`].
#[derive(Debug, Clone, PartialEq)]
pub enum OwnedValue {
Null(IonType),
Integer(AnyInt),
Float(f64),
Decimal(Decimal),
Timestamp(Timestamp),
String(String),
Symbol(OwnedSymbolToken),
Boolean(bool),
Blob(Vec<u8>),
Clob(Vec<u8>),
SExpression(OwnedSequence),
List(OwnedSequence),
Struct(OwnedStruct),
// TODO fill this in with the rest of the value types...
}
/// An owned implementation of [`Element`]
#[derive(Debug, Clone)]
pub struct OwnedElement {
annotations: Vec<OwnedSymbolToken>,
value: OwnedValue,
}
impl OwnedElement {
pub fn new(annotations: Vec<OwnedSymbolToken>, value: OwnedValue) -> Self {
Self { annotations, value }
}
}
impl PartialEq for OwnedElement {
fn eq(&self, other: &Self) -> bool {
self.value == other.value && self.annotations == other.annotations
}
}
impl Eq for OwnedElement {}
impl From<OwnedValue> for OwnedElement {
fn from(val: OwnedValue) -> Self {
Self::new(vec![], val)
}
}
impl From<IonType> for OwnedElement {
fn from(ion_type: IonType) -> Self {
OwnedValue::Null(ion_type).into()
}
}
impl From<i64> for OwnedElement {
fn from(i64_val: i64) -> Self {
OwnedValue::Integer(AnyInt::I64(i64_val)).into()
}
}
impl From<BigInt> for OwnedElement {
fn from(big_int_val: BigInt) -> Self {
OwnedValue::Integer(AnyInt::BigInt(big_int_val)).into()
}
}
impl From<f64> for OwnedElement {
fn from(f64_val: f64) -> Self {
OwnedValue::Float(f64_val).into()
}
}
impl From<Decimal> for OwnedElement {
fn from(decimal_val: Decimal) -> Self {
OwnedValue::Decimal(decimal_val).into()
}
}
impl From<Timestamp> for OwnedElement {
fn from(timestamp_val: Timestamp) -> Self {
OwnedValue::Timestamp(timestamp_val).into()
}
}
impl From<bool> for OwnedElement {
fn from(bool_val: bool) -> Self {
OwnedValue::Boolean(bool_val).into()
}
}
impl From<String> for OwnedElement {
fn from(string_val: String) -> Self {
OwnedValue::String(string_val).into()
}
}
impl From<OwnedSymbolToken> for OwnedElement {
fn from(sym_val: OwnedSymbolToken) -> Self {
OwnedValue::Symbol(sym_val).into()
}
}
impl From<OwnedStruct> for OwnedElement {
fn from(struct_val: OwnedStruct) -> Self {
OwnedValue::Struct(struct_val).into()
}
}
impl Element for OwnedElement {
type SymbolToken = OwnedSymbolToken;
type Sequence = OwnedSequence;
type Struct = OwnedStruct;
type Builder = OwnedElement;
fn ion_type(&self) -> IonType {
use OwnedValue::*;
match &self.value {
Null(t) => *t,
Integer(_) => IonType::Integer,
Float(_) => IonType::Float,
Decimal(_) => IonType::Decimal,
Timestamp(_) => IonType::Timestamp,
String(_) => IonType::String,
Symbol(_) => IonType::Symbol,
Boolean(_) => IonType::Boolean,
Blob(_) => IonType::Blob,
Clob(_) => IonType::Clob,
SExpression(_) => IonType::SExpression,
List(_) => IonType::List,
Struct(_) => IonType::Struct,
}
}
fn annotations<'a>(&'a self) -> Box<dyn Iterator<Item = &'a Self::SymbolToken> + 'a> {
Box::new(self.annotations.iter())
}
fn with_annotations<I: IntoIterator<Item = Self::SymbolToken>>(self, annotations: I) -> Self {
OwnedElement::new(annotations.into_iter().collect(), self.value)
}
fn is_null(&self) -> bool {
match &self.value {
OwnedValue::Null(_) => true,
_ => false,
}
}
fn as_any_int(&self) -> Option<&AnyInt> {
match &self.value {
OwnedValue::Integer(i) => Some(i),
_ => None,
}
}
fn as_f64(&self) -> Option<f64> {
match &self.value {
OwnedValue::Float(f) => Some(*f),
_ => None,
}
}
fn as_decimal(&self) -> Option<&Decimal> {
match &self.value {
OwnedValue::Decimal(d) => Some(d),
_ => None,
}
}
fn | (&self) -> Option<&Timestamp> {
match &self.value {
OwnedValue::Timestamp(t) => Some(t),
_ => None,
}
}
fn as_str(&self) -> Option<&str> {
match &self.value {
OwnedValue::String(text) => Some(text),
OwnedValue::Symbol(sym) => sym.text(),
_ => None,
}
}
fn as_sym(&self) -> Option<&Self::SymbolToken> {
match &self.value {
OwnedValue::Symbol(sym) => Some(sym),
_ => None,
}
}
fn as_bool(&self) -> Option<bool> {
match &self.value {
OwnedValue::Boolean(b) => Some(*b),
_ => None,
}
}
fn as_bytes(&self) -> Option<&[u8]> {
match &self.value {
OwnedValue::Blob(bytes) | OwnedValue::Clob(bytes) => Some(bytes),
_ => None,
}
}
fn as_sequence(&self) -> Option<&Self::Sequence> {
match &self.value {
OwnedValue::SExpression(seq) | OwnedValue::List(seq) => Some(seq),
_ => None,
}
}
fn as_struct(&self) -> Option<&Self::Struct> {
match &self.value {
OwnedValue::Struct(structure) => Some(structure),
_ => None,
}
}
}
#[cfg(test)]
mod value_tests {
use super::*;
use rstest::*;
#[rstest(
elem1,elem2,
case::str(
OwnedElement::new_string("hello"),
"hello".to_string().into()
),
case::sym_with_text(
OwnedElement::new_symbol(text_token("hello")),
text_token("hello").into()
),
case::struct_(
OwnedElement::new_struct(vec![("greetings", OwnedElement::from(OwnedValue::String("hello".into())))].into_iter()),
OwnedStruct::from_iter(vec![("greetings", OwnedElement::from(OwnedValue::String("hello".into())))].into_iter()).into()
),
)]
fn owned_element_accessors(elem1: OwnedElement, elem2: OwnedElement) {
// assert if both the element construction creates the same element
assert_eq!(elem1, elem2);
}
}
| as_timestamp |
evaluate_DSTC2.py | from metrics import bleu, rouge
import argparse
def get_args():
'''
Parse input arguments:
preds_path: The directory in which labels and predictions files are dumped after inference
config_id: The config id mentioned in the labels and predictions filenames
'''
parser = argparse.ArgumentParser()
parser.add_argument("--preds_path")
parser.add_argument("--kb_path")
parser.add_argument("--config_id")
args = parser.parse_args()
return args
def read_results(path,num):
with open(path+"/labels"+str(num)+".txt","r") as fp:
l=fp.readlines()
with open(path+"/predictions"+str(num)+".txt","r") as fp:
p=fp.readlines()
return p,l
def exact_match(p,l):
c=0
for i1,i in enumerate(l):
if p[i1]==l[i1]:
c+=1
print("Per-Resp Acc: ",c/len(l))
def moses_bl_rouge(p,l):
bl = bleu.moses_multi_bleu(p,l)
x = rouge.rouge(p,l)
print('BLEU: %f\nROUGE1-F: %f\nROUGE1-P: %f\nROUGE1-R: %f\nROUGE2-F: %f\nROUGE2-P: %f\nROUGE2-R: %f\nROUGEL-F: %f\nROUGEL-P: %f\nROUGEL-R: %f'%(bl,x['rouge_1/f_score'],x['rouge_1/p_score'],x['rouge_1/r_score'],x['rouge_2/f_score'],
x['rouge_2/p_score'],x['rouge_2/r_score'],x['rouge_l/f_score'],x['rouge_l/p_score'],x['rouge_l/r_score']))
def micro_compute_prf(gold, pred, global_entity_list):
TP, FP, FN = 0, 0, 0
if len(gold)!= 0:
count = 1
for g in gold:
if g in pred:
TP += 1
else:
FN += 1
for p in set(pred):
if p in global_entity_list:
if p not in gold:
FP += 1
else:
count = 0
return TP,FP,FN,count
def ent_f1(preds,labels,kb_path):
with open(kb_path,'r') as fp:
kb=fp.readlines()
ent=[]
for i in kb:
triples = i.split(' ')
ent.append(triples[1].strip()) | ent.append(triples[3].strip())
ent = set(ent)
ent_list = sorted(ent)
mic_pred=0
les=[]
all_TP=0
all_FP=0
all_FN=0
for i in range(len(labels)):
l = labels[i].strip().split()
le=[]
for j in l:
if j in ent_list:
le.append(j)
les.append(le)
p = preds[i].strip().split()
tp,fp,fn,c = micro_compute_prf(le,p,ent_list)
all_TP+=tp
all_FP+=fp
all_FN+=fn
mic_pred+=c
mic_prec = all_TP/float(all_TP+all_FP)
mic_rec = all_TP/float(all_TP + all_FN)
mic_f1=2 * mic_prec * mic_rec / float(mic_prec + mic_rec)
print("Entity-F1:",mic_f1)
if __name__=='__main__':
args = get_args()
result_path = args.preds_path
kb_path = args.kb_path
config_id = args.config_id
print(config_id,"\n")
preds,labels = read_results(result_path,config_id)
exact_match(preds,labels)
moses_bl_rouge(preds,labels)
ent_f1(preds,labels,kb_path) | |
_load_balancer_outbound_rules_operations.py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class LoadBalancerOutboundRulesOperations:
"""LoadBalancerOutboundRulesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2021_02_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name: str,
load_balancer_name: str,
**kwargs
) -> AsyncIterable["_models.LoadBalancerOutboundRuleListResult"]:
"""Gets all the outbound rules in a load balancer.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either LoadBalancerOutboundRuleListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2021_02_01.models.LoadBalancerOutboundRuleListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.LoadBalancerOutboundRuleListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-02-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
|
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/outboundRules'} # type: ignore
async def get(
self,
resource_group_name: str,
load_balancer_name: str,
outbound_rule_name: str,
**kwargs
) -> "_models.OutboundRule":
"""Gets the specified load balancer outbound rule.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:param outbound_rule_name: The name of the outbound rule.
:type outbound_rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: OutboundRule, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2021_02_01.models.OutboundRule
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.OutboundRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-02-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'outboundRuleName': self._serialize.url("outbound_rule_name", outbound_rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('OutboundRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/outboundRules/{outboundRuleName}'} # type: ignore
| deserialized = self._deserialize('LoadBalancerOutboundRuleListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem) |
protocol.rs | // This file is generated by rust-protobuf 2.6.0. Do not edit
// @generated
// https://github.com/Manishearth/rust-clippy/issues/702
#![allow(unknown_lints)]
#![allow(clippy)]
#![cfg_attr(rustfmt, rustfmt_skip)]
#![allow(box_pointers)]
#![allow(dead_code)]
#![allow(missing_docs)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
#![allow(non_upper_case_globals)]
#![allow(trivial_casts)]
#![allow(unsafe_code)]
#![allow(unused_imports)]
#![allow(unused_results)]
use protobuf::Message as Message_imported_for_functions;
use protobuf::ProtobufEnum as ProtobufEnum_imported_for_functions;
#[derive(PartialEq,Clone,Default)]
pub struct Message {
// message oneof groups
pub protocol: ::std::option::Option<Message_oneof_protocol>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a Message {
fn default() -> &'a Message {
<Message as ::protobuf::Message>::default_instance()
}
}
#[derive(Clone,PartialEq,Debug)]
pub enum Message_oneof_protocol {
lorawan(super::lorawan::Message),
}
impl Message {
pub fn new() -> Message {
::std::default::Default::default()
}
// .lorawan.Message lorawan = 1;
pub fn get_lorawan(&self) -> &super::lorawan::Message {
match self.protocol {
::std::option::Option::Some(Message_oneof_protocol::lorawan(ref v)) => v,
_ => super::lorawan::Message::default_instance(),
}
}
pub fn clear_lorawan(&mut self) {
self.protocol = ::std::option::Option::None;
}
pub fn has_lorawan(&self) -> bool {
match self.protocol {
::std::option::Option::Some(Message_oneof_protocol::lorawan(..)) => true,
_ => false,
}
}
// Param is passed by value, moved
pub fn set_lorawan(&mut self, v: super::lorawan::Message) {
self.protocol = ::std::option::Option::Some(Message_oneof_protocol::lorawan(v))
}
// Mutable pointer to the field.
pub fn mut_lorawan(&mut self) -> &mut super::lorawan::Message {
if let ::std::option::Option::Some(Message_oneof_protocol::lorawan(_)) = self.protocol {
} else {
self.protocol = ::std::option::Option::Some(Message_oneof_protocol::lorawan(super::lorawan::Message::new()));
}
match self.protocol {
::std::option::Option::Some(Message_oneof_protocol::lorawan(ref mut v)) => v,
_ => panic!(),
}
}
// Take field
pub fn take_lorawan(&mut self) -> super::lorawan::Message {
if self.has_lorawan() {
match self.protocol.take() {
::std::option::Option::Some(Message_oneof_protocol::lorawan(v)) => v,
_ => panic!(),
}
} else {
super::lorawan::Message::new()
}
}
}
impl ::protobuf::Message for Message {
fn is_initialized(&self) -> bool {
if let Some(Message_oneof_protocol::lorawan(ref v)) = self.protocol {
if !v.is_initialized() {
return false;
}
}
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
if wire_type != ::protobuf::wire_format::WireTypeLengthDelimited {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
self.protocol = ::std::option::Option::Some(Message_oneof_protocol::lorawan(is.read_message()?));
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if let ::std::option::Option::Some(ref v) = self.protocol {
match v {
&Message_oneof_protocol::lorawan(ref v) => {
let len = v.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
},
};
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream) -> ::protobuf::ProtobufResult<()> {
if let ::std::option::Option::Some(ref v) = self.protocol {
match v {
&Message_oneof_protocol::lorawan(ref v) => {
os.write_tag(1, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
},
};
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &::std::any::Any {
self as &::std::any::Any
}
fn as_any_mut(&mut self) -> &mut ::std::any::Any {
self as &mut ::std::any::Any
}
fn into_any(self: Box<Self>) -> ::std::boxed::Box<::std::any::Any> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> Message {
Message::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static mut descriptor: ::protobuf::lazy::Lazy<::protobuf::reflect::MessageDescriptor> = ::protobuf::lazy::Lazy {
lock: ::protobuf::lazy::ONCE_INIT,
ptr: 0 as *const ::protobuf::reflect::MessageDescriptor,
};
unsafe {
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_singular_message_accessor::<_, super::lorawan::Message>(
"lorawan",
Message::has_lorawan,
Message::get_lorawan,
));
::protobuf::reflect::MessageDescriptor::new::<Message>(
"Message",
fields,
file_descriptor_proto()
)
})
}
}
fn default_instance() -> &'static Message {
static mut instance: ::protobuf::lazy::Lazy<Message> = ::protobuf::lazy::Lazy {
lock: ::protobuf::lazy::ONCE_INIT,
ptr: 0 as *const Message,
};
unsafe {
instance.get(Message::new)
}
}
}
impl ::protobuf::Clear for Message {
fn clear(&mut self) {
self.protocol = ::std::option::Option::None;
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for Message {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for Message {
fn as_ref(&self) -> ::protobuf::reflect::ProtobufValueRef {
::protobuf::reflect::ProtobufValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct RxMetadata {
// message oneof groups
pub protocol: ::std::option::Option<RxMetadata_oneof_protocol>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a RxMetadata {
fn default() -> &'a RxMetadata {
<RxMetadata as ::protobuf::Message>::default_instance()
}
}
#[derive(Clone,PartialEq,Debug)]
pub enum RxMetadata_oneof_protocol {
lorawan(super::lorawan::Metadata),
}
impl RxMetadata {
pub fn new() -> RxMetadata {
::std::default::Default::default()
}
// .lorawan.Metadata lorawan = 1;
pub fn get_lorawan(&self) -> &super::lorawan::Metadata {
match self.protocol {
::std::option::Option::Some(RxMetadata_oneof_protocol::lorawan(ref v)) => v,
_ => super::lorawan::Metadata::default_instance(),
}
}
pub fn clear_lorawan(&mut self) {
self.protocol = ::std::option::Option::None;
}
pub fn has_lorawan(&self) -> bool {
match self.protocol {
::std::option::Option::Some(RxMetadata_oneof_protocol::lorawan(..)) => true,
_ => false,
}
}
// Param is passed by value, moved
pub fn set_lorawan(&mut self, v: super::lorawan::Metadata) {
self.protocol = ::std::option::Option::Some(RxMetadata_oneof_protocol::lorawan(v))
}
// Mutable pointer to the field.
pub fn mut_lorawan(&mut self) -> &mut super::lorawan::Metadata {
if let ::std::option::Option::Some(RxMetadata_oneof_protocol::lorawan(_)) = self.protocol {
} else {
self.protocol = ::std::option::Option::Some(RxMetadata_oneof_protocol::lorawan(super::lorawan::Metadata::new()));
}
match self.protocol {
::std::option::Option::Some(RxMetadata_oneof_protocol::lorawan(ref mut v)) => v,
_ => panic!(),
}
}
// Take field
pub fn take_lorawan(&mut self) -> super::lorawan::Metadata {
if self.has_lorawan() {
match self.protocol.take() {
::std::option::Option::Some(RxMetadata_oneof_protocol::lorawan(v)) => v,
_ => panic!(),
}
} else {
super::lorawan::Metadata::new()
}
}
}
impl ::protobuf::Message for RxMetadata {
fn is_initialized(&self) -> bool {
if let Some(RxMetadata_oneof_protocol::lorawan(ref v)) = self.protocol {
if !v.is_initialized() {
return false;
}
}
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
if wire_type != ::protobuf::wire_format::WireTypeLengthDelimited {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
self.protocol = ::std::option::Option::Some(RxMetadata_oneof_protocol::lorawan(is.read_message()?));
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if let ::std::option::Option::Some(ref v) = self.protocol {
match v {
&RxMetadata_oneof_protocol::lorawan(ref v) => {
let len = v.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
},
};
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream) -> ::protobuf::ProtobufResult<()> {
if let ::std::option::Option::Some(ref v) = self.protocol {
match v {
&RxMetadata_oneof_protocol::lorawan(ref v) => {
os.write_tag(1, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
},
};
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &::std::any::Any {
self as &::std::any::Any
}
fn as_any_mut(&mut self) -> &mut ::std::any::Any {
self as &mut ::std::any::Any
}
fn into_any(self: Box<Self>) -> ::std::boxed::Box<::std::any::Any> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> RxMetadata {
RxMetadata::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static mut descriptor: ::protobuf::lazy::Lazy<::protobuf::reflect::MessageDescriptor> = ::protobuf::lazy::Lazy {
lock: ::protobuf::lazy::ONCE_INIT,
ptr: 0 as *const ::protobuf::reflect::MessageDescriptor,
};
unsafe {
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_singular_message_accessor::<_, super::lorawan::Metadata>(
"lorawan",
RxMetadata::has_lorawan,
RxMetadata::get_lorawan,
));
::protobuf::reflect::MessageDescriptor::new::<RxMetadata>(
"RxMetadata",
fields,
file_descriptor_proto()
)
})
}
}
fn default_instance() -> &'static RxMetadata {
static mut instance: ::protobuf::lazy::Lazy<RxMetadata> = ::protobuf::lazy::Lazy {
lock: ::protobuf::lazy::ONCE_INIT,
ptr: 0 as *const RxMetadata,
};
unsafe {
instance.get(RxMetadata::new)
}
}
}
impl ::protobuf::Clear for RxMetadata {
fn clear(&mut self) {
self.protocol = ::std::option::Option::None;
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for RxMetadata {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for RxMetadata {
fn as_ref(&self) -> ::protobuf::reflect::ProtobufValueRef {
::protobuf::reflect::ProtobufValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct TxConfiguration {
// message oneof groups
pub protocol: ::std::option::Option<TxConfiguration_oneof_protocol>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a TxConfiguration {
fn default() -> &'a TxConfiguration {
<TxConfiguration as ::protobuf::Message>::default_instance()
}
}
#[derive(Clone,PartialEq,Debug)]
pub enum TxConfiguration_oneof_protocol {
lorawan(super::lorawan::TxConfiguration),
}
impl TxConfiguration {
pub fn new() -> TxConfiguration {
::std::default::Default::default()
}
// .lorawan.TxConfiguration lorawan = 1;
pub fn get_lorawan(&self) -> &super::lorawan::TxConfiguration {
match self.protocol {
::std::option::Option::Some(TxConfiguration_oneof_protocol::lorawan(ref v)) => v,
_ => super::lorawan::TxConfiguration::default_instance(),
}
}
pub fn clear_lorawan(&mut self) {
self.protocol = ::std::option::Option::None;
}
pub fn has_lorawan(&self) -> bool {
match self.protocol {
::std::option::Option::Some(TxConfiguration_oneof_protocol::lorawan(..)) => true,
_ => false,
}
}
// Param is passed by value, moved
pub fn set_lorawan(&mut self, v: super::lorawan::TxConfiguration) {
self.protocol = ::std::option::Option::Some(TxConfiguration_oneof_protocol::lorawan(v))
}
// Mutable pointer to the field.
pub fn mut_lorawan(&mut self) -> &mut super::lorawan::TxConfiguration {
if let ::std::option::Option::Some(TxConfiguration_oneof_protocol::lorawan(_)) = self.protocol {
} else {
self.protocol = ::std::option::Option::Some(TxConfiguration_oneof_protocol::lorawan(super::lorawan::TxConfiguration::new()));
}
match self.protocol {
::std::option::Option::Some(TxConfiguration_oneof_protocol::lorawan(ref mut v)) => v,
_ => panic!(),
}
}
// Take field
pub fn take_lorawan(&mut self) -> super::lorawan::TxConfiguration {
if self.has_lorawan() {
match self.protocol.take() {
::std::option::Option::Some(TxConfiguration_oneof_protocol::lorawan(v)) => v,
_ => panic!(),
}
} else {
super::lorawan::TxConfiguration::new()
}
}
}
impl ::protobuf::Message for TxConfiguration {
fn is_initialized(&self) -> bool {
if let Some(TxConfiguration_oneof_protocol::lorawan(ref v)) = self.protocol {
if !v.is_initialized() {
return false;
}
}
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
if wire_type != ::protobuf::wire_format::WireTypeLengthDelimited {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
self.protocol = ::std::option::Option::Some(TxConfiguration_oneof_protocol::lorawan(is.read_message()?));
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if let ::std::option::Option::Some(ref v) = self.protocol {
match v {
&TxConfiguration_oneof_protocol::lorawan(ref v) => {
let len = v.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
},
};
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream) -> ::protobuf::ProtobufResult<()> {
if let ::std::option::Option::Some(ref v) = self.protocol {
match v {
&TxConfiguration_oneof_protocol::lorawan(ref v) => {
os.write_tag(1, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
},
};
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(()) |
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &::std::any::Any {
self as &::std::any::Any
}
fn as_any_mut(&mut self) -> &mut ::std::any::Any {
self as &mut ::std::any::Any
}
fn into_any(self: Box<Self>) -> ::std::boxed::Box<::std::any::Any> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> TxConfiguration {
TxConfiguration::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static mut descriptor: ::protobuf::lazy::Lazy<::protobuf::reflect::MessageDescriptor> = ::protobuf::lazy::Lazy {
lock: ::protobuf::lazy::ONCE_INIT,
ptr: 0 as *const ::protobuf::reflect::MessageDescriptor,
};
unsafe {
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_singular_message_accessor::<_, super::lorawan::TxConfiguration>(
"lorawan",
TxConfiguration::has_lorawan,
TxConfiguration::get_lorawan,
));
::protobuf::reflect::MessageDescriptor::new::<TxConfiguration>(
"TxConfiguration",
fields,
file_descriptor_proto()
)
})
}
}
fn default_instance() -> &'static TxConfiguration {
static mut instance: ::protobuf::lazy::Lazy<TxConfiguration> = ::protobuf::lazy::Lazy {
lock: ::protobuf::lazy::ONCE_INIT,
ptr: 0 as *const TxConfiguration,
};
unsafe {
instance.get(TxConfiguration::new)
}
}
}
impl ::protobuf::Clear for TxConfiguration {
fn clear(&mut self) {
self.protocol = ::std::option::Option::None;
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for TxConfiguration {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for TxConfiguration {
fn as_ref(&self) -> ::protobuf::reflect::ProtobufValueRef {
::protobuf::reflect::ProtobufValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct ActivationMetadata {
// message oneof groups
pub protocol: ::std::option::Option<ActivationMetadata_oneof_protocol>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a ActivationMetadata {
fn default() -> &'a ActivationMetadata {
<ActivationMetadata as ::protobuf::Message>::default_instance()
}
}
#[derive(Clone,PartialEq,Debug)]
pub enum ActivationMetadata_oneof_protocol {
lorawan(super::lorawan::ActivationMetadata),
}
impl ActivationMetadata {
pub fn new() -> ActivationMetadata {
::std::default::Default::default()
}
// .lorawan.ActivationMetadata lorawan = 1;
pub fn get_lorawan(&self) -> &super::lorawan::ActivationMetadata {
match self.protocol {
::std::option::Option::Some(ActivationMetadata_oneof_protocol::lorawan(ref v)) => v,
_ => super::lorawan::ActivationMetadata::default_instance(),
}
}
pub fn clear_lorawan(&mut self) {
self.protocol = ::std::option::Option::None;
}
pub fn has_lorawan(&self) -> bool {
match self.protocol {
::std::option::Option::Some(ActivationMetadata_oneof_protocol::lorawan(..)) => true,
_ => false,
}
}
// Param is passed by value, moved
pub fn set_lorawan(&mut self, v: super::lorawan::ActivationMetadata) {
self.protocol = ::std::option::Option::Some(ActivationMetadata_oneof_protocol::lorawan(v))
}
// Mutable pointer to the field.
pub fn mut_lorawan(&mut self) -> &mut super::lorawan::ActivationMetadata {
if let ::std::option::Option::Some(ActivationMetadata_oneof_protocol::lorawan(_)) = self.protocol {
} else {
self.protocol = ::std::option::Option::Some(ActivationMetadata_oneof_protocol::lorawan(super::lorawan::ActivationMetadata::new()));
}
match self.protocol {
::std::option::Option::Some(ActivationMetadata_oneof_protocol::lorawan(ref mut v)) => v,
_ => panic!(),
}
}
// Take field
pub fn take_lorawan(&mut self) -> super::lorawan::ActivationMetadata {
if self.has_lorawan() {
match self.protocol.take() {
::std::option::Option::Some(ActivationMetadata_oneof_protocol::lorawan(v)) => v,
_ => panic!(),
}
} else {
super::lorawan::ActivationMetadata::new()
}
}
}
impl ::protobuf::Message for ActivationMetadata {
fn is_initialized(&self) -> bool {
if let Some(ActivationMetadata_oneof_protocol::lorawan(ref v)) = self.protocol {
if !v.is_initialized() {
return false;
}
}
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
if wire_type != ::protobuf::wire_format::WireTypeLengthDelimited {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
self.protocol = ::std::option::Option::Some(ActivationMetadata_oneof_protocol::lorawan(is.read_message()?));
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if let ::std::option::Option::Some(ref v) = self.protocol {
match v {
&ActivationMetadata_oneof_protocol::lorawan(ref v) => {
let len = v.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
},
};
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream) -> ::protobuf::ProtobufResult<()> {
if let ::std::option::Option::Some(ref v) = self.protocol {
match v {
&ActivationMetadata_oneof_protocol::lorawan(ref v) => {
os.write_tag(1, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
},
};
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &::std::any::Any {
self as &::std::any::Any
}
fn as_any_mut(&mut self) -> &mut ::std::any::Any {
self as &mut ::std::any::Any
}
fn into_any(self: Box<Self>) -> ::std::boxed::Box<::std::any::Any> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> ActivationMetadata {
ActivationMetadata::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static mut descriptor: ::protobuf::lazy::Lazy<::protobuf::reflect::MessageDescriptor> = ::protobuf::lazy::Lazy {
lock: ::protobuf::lazy::ONCE_INIT,
ptr: 0 as *const ::protobuf::reflect::MessageDescriptor,
};
unsafe {
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_singular_message_accessor::<_, super::lorawan::ActivationMetadata>(
"lorawan",
ActivationMetadata::has_lorawan,
ActivationMetadata::get_lorawan,
));
::protobuf::reflect::MessageDescriptor::new::<ActivationMetadata>(
"ActivationMetadata",
fields,
file_descriptor_proto()
)
})
}
}
fn default_instance() -> &'static ActivationMetadata {
static mut instance: ::protobuf::lazy::Lazy<ActivationMetadata> = ::protobuf::lazy::Lazy {
lock: ::protobuf::lazy::ONCE_INIT,
ptr: 0 as *const ActivationMetadata,
};
unsafe {
instance.get(ActivationMetadata::new)
}
}
}
impl ::protobuf::Clear for ActivationMetadata {
fn clear(&mut self) {
self.protocol = ::std::option::Option::None;
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for ActivationMetadata {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for ActivationMetadata {
fn as_ref(&self) -> ::protobuf::reflect::ProtobufValueRef {
::protobuf::reflect::ProtobufValueRef::Message(self)
}
}
static file_descriptor_proto_data: &'static [u8] = b"\
\n7github.com/TheThingsNetwork/api/protocol/protocol.proto\x12\x08protoc\
ol\x1a-github.com/gogo/protobuf/gogoproto/gogo.proto\x1a>github.com/TheT\
hingsNetwork/api/protocol/lorawan/lorawan.proto\"P\n\x07Message\x129\n\
\x07lorawan\x18\x01\x20\x01(\x0b2\x10.lorawan.MessageH\0R\x07lorawanB\
\x0b\xe2\xde\x1f\x07LoRaWANB\n\n\x08protocol\"T\n\nRxMetadata\x12:\n\x07\
lorawan\x18\x01\x20\x01(\x0b2\x11.lorawan.MetadataH\0R\x07lorawanB\x0b\
\xe2\xde\x1f\x07LoRaWANB\n\n\x08protocol\"`\n\x0fTxConfiguration\x12A\n\
\x07lorawan\x18\x01\x20\x01(\x0b2\x18.lorawan.TxConfigurationH\0R\x07lor\
awanB\x0b\xe2\xde\x1f\x07LoRaWANB\n\n\x08protocol\"f\n\x12ActivationMeta\
data\x12D\n\x07lorawan\x18\x01\x20\x01(\x0b2\x1b.lorawan.ActivationMetad\
ataH\0R\x07lorawanB\x0b\xe2\xde\x1f\x07LoRaWANB\n\n\x08protocolB~\n!org.\
thethingsnetwork.api.protocolB\rProtocolProtoP\x01Z(github.com/TheThings\
Network/api/protocol\xaa\x02\x1dTheThingsNetwork.API.Protocolb\x06proto3\
";
static mut file_descriptor_proto_lazy: ::protobuf::lazy::Lazy<::protobuf::descriptor::FileDescriptorProto> = ::protobuf::lazy::Lazy {
lock: ::protobuf::lazy::ONCE_INIT,
ptr: 0 as *const ::protobuf::descriptor::FileDescriptorProto,
};
fn parse_descriptor_proto() -> ::protobuf::descriptor::FileDescriptorProto {
::protobuf::parse_from_bytes(file_descriptor_proto_data).unwrap()
}
pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {
unsafe {
file_descriptor_proto_lazy.get(|| {
parse_descriptor_proto()
})
}
} | } |
FeedStyle.ts | import styled, { css } from 'styled-components';
export const StyledDiv = styled.div`
position: relative;
min-height: 300px;
`;
export const StyledArticle = styled.article`
@keyframes fadeIn {
from {
opacity: 0;
}
to {
opacity: 1;
}
}
background-color: rgb(255, 255, 255);
border: 1px solid rgb(219, 219, 219);
border-radius: 10px;
animation: fadeIn 1s;
transition: box-shadow 0.3s, transform 0.5s;
&.selected {
box-shadow: 5px 5px 10px rgb(219, 219, 219);
}
${props =>
props.width < 800
? css`
margin-top: 20px;
border: 1px solid rgb(219, 219, 219);
`
: css`
margin-top: 50px;
`}
${props =>
props.width < 800
? css`
& {
margin-left: 0;
}
`
: css`
display: inline-block;
box-sizing: border-box;
width: 32%;
& {
margin-left: 2%;
}
&:nth-of-type(3n + 1) {
margin-left: 0;
}
`}
`;
export const StyledSpinnerDiv = styled.div`
position: absolute;
top: 50%;
left: 52%;
`;
export const StyledPreviewDiv = styled.div`
position: relative;
margin: 30px auto;
max-width: 670px;
border: 1px solid rgb(219, 219, 219);
`;
export const StyledCommentDiv = styled.div`
display: flex;
align-items: center;
justify-content: center;
padding: 20px 0;
p {
display: inline-block;
margin: 0 20px;
padding: 5px 40px;
background-color: rgb(51, 154, 240);
border-radius: 5px;
font-size: 20px;
font-weight: bold;
color: rgb(250, 250, 250);
}
a {
display: inline-block;
margin-top: 10px;
vertical-align: bottom;
font-size: 60px;
color: rgb(51, 154, 240);
}
`;
export const StyledPostDiv = styled.div`
display: inline-block;
margin-left: 2.5%;
margin-top: 2.5%;
width: 30%;
padding-bottom: 30%;
background-repeat: no-repeat;
background-position: 50% 50%;
background-size: cover;
background-image: url(${props => `${parseImagePath(props.image[0])}`});
@keyframse fadeIn {
from {
opacity: 0;
}
to {
opacity: 1;
}
}
animation: fadeIn 2s;
`;
function | (imageUrl: string) {
if (imageUrl !== null && imageUrl !== undefined) {
if (imageUrl.split('\\')[1] !== undefined) {
return imageUrl.split('\\')[1];
} else if (imageUrl.split('/')[1] !== undefined) {
return imageUrl.split('/')[1];
}
}
}
export const StyledLastComment = styled.p`
margin: 0;
text-align: center;
line-height: 80px;
`; | parseImagePath |
sync.js | Calendar.ns('Controllers').Sync = (function() {
/**
* Handles all synchronization related
* tasks. The intent is that this will
* be the focal point for any view
* to observe sync events and this
* controller will decide when to actually
* tell the stores when to sync.
*/
function Sync(app) {
this.app = app;
this.pending = 0;
Calendar.Responder.call(this);
}
Sync.prototype = {
__proto__: Calendar.Responder.prototype,
startEvent: 'syncStart',
completeEvent: 'syncComplete',
_incrementPending: function() {
if (!this.pending) {
this.emit('syncStart');
}
this.pending++;
},
_resolvePending: function() {
if (!(--this.pending)) {
this.emit('syncComplete');
}
},
/**
* Sync all accounts, calendars, events.
* There is no callback for all intentionally.
*
* Use:
*
* controller.once('syncComplete', cb);
*
*/
all: function(callback) { | this.once('syncComplete', callback);
}
if (this.app.offline()) {
this.emit('offline');
this.emit('syncComplete');
return;
}
var account = this.app.store('Account');
account.all(function(err, list) {
for (var key in list) {
this.account(list[key]);
}
// If we have nothing to sync
if (!this.pending) {
this.emit('syncComplete');
}
}.bind(this));
},
/**
* Initiates a sync for a single calendar.
*
* @param {Object} account parent of calendar.
* @param {Object} calendar specific calendar to sync.
* @param {Function} [callback] optional callback.
*/
calendar: function(account, calendar, callback) {
var store = this.app.store('Calendar');
var self = this;
this._incrementPending();
store.sync(account, calendar, function(err) {
self._resolvePending();
if (callback)
callback(err);
});
},
/**
* Initiates a sync of a single account and all
* associated calendars (calendars that exist after
* the full sync of the account itself).
*
* @param {Object} account sync target.
* @param {Function} [callback] optional callback.
*/
account: function(account, callback) {
var accountStore = this.app.store('Account');
var calendarStore = this.app.store('Calendar');
var self = this;
this._incrementPending();
accountStore.sync(account, function(err) {
var pending = 0;
function next() {
if (!(--pending)) {
self._resolvePending();
if (callback)
callback();
}
}
function fetchCalendars(err, calendars) {
if (err) {
return callback(err);
}
for (var key in calendars) {
pending++;
self.calendar(account, calendars[key], next);
}
}
// find all calendars
var calendars = calendarStore.remotesByAccount(
account._id,
fetchCalendars
);
});
}
};
return Sync;
}()); | if (callback) { |
mini-editor-hike.tsx | import React from "react"
import { MiniEditorTween } from "./mini-editor-tween"
import { EditorStep } from "./use-snapshots"
import { CodeProps } from "@code-hike/smooth-code"
import { EditorFrameProps } from "./editor-frame"
export { MiniEditorHike, MiniEditorHikeProps, EditorStep }
type MiniEditorHikeProps = {
steps: EditorStep[]
progress: number
backward: boolean
frameProps?: Partial<EditorFrameProps>
codeProps?: Partial<CodeProps>
}
function MiniEditorHike({
steps = [],
progress = 0,
backward = false,
frameProps,
codeProps,
}: MiniEditorHikeProps) {
const prevIndex = clamp(
Math.floor(progress),
0,
steps.length - 1 | steps.length - 1
)
const prev = steps[prevIndex]
const next = steps[nextIndex]
const t = clamp(progress - prevIndex, 0, steps.length - 1)
return (
<MiniEditorTween
frameProps={frameProps}
codeProps={codeProps}
prev={prev}
next={next}
backward={backward}
t={t}
/>
)
}
function clamp(a: number, min: number, max: number) {
return Math.max(Math.min(a, max), min)
} | )
const nextIndex = clamp(
prevIndex + 1,
0, |
test_result_directories_extended.py | # coding: utf-8
"""
Isilon SDK
Isilon SDK - Language bindings for the OneFS API # noqa: E501
OpenAPI spec version: 10
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import isi_sdk_9_0_0
from isi_sdk_9_0_0.models.result_directories_extended import ResultDirectoriesExtended # noqa: E501
from isi_sdk_9_0_0.rest import ApiException
class TestResultDirectoriesExtended(unittest.TestCase):
"""ResultDirectoriesExtended unit test stubs"""
def setUp(self):
pass
def tearDown(self):
|
def testResultDirectoriesExtended(self):
"""Test ResultDirectoriesExtended"""
# FIXME: construct object with mandatory attributes with example values
# model = isi_sdk_9_0_0.models.result_directories_extended.ResultDirectoriesExtended() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| pass |
context.go | package httpserver
import (
"bytes"
"fmt"
"io/ioutil"
"net"
"net/http"
"net/url"
"path"
"strings"
"text/template"
"time"
"github.com/russross/blackfriday"
"os"
)
// This file contains the context and functions available for
// use in the templates.
// Context is the context with which Caddy templates are executed.
type Context struct {
Root http.FileSystem
Req *http.Request
URL *url.URL
}
// Include returns the contents of filename relative to the site root.
func (c Context) Include(filename string) (string, error) {
return ContextInclude(filename, c, c.Root)
}
// Now returns the current timestamp in the specified format.
func (c Context) Now(format string) string {
return time.Now().Format(format)
}
// NowDate returns the current date/time that can be used
// in other time functions.
func (c Context) NowDate() time.Time {
return time.Now()
}
// Cookie gets the value of a cookie with name name.
func (c Context) Cookie(name string) string {
cookies := c.Req.Cookies()
for _, cookie := range cookies {
if cookie.Name == name {
return cookie.Value
}
}
return ""
}
// Header gets the value of a request header with field name.
func (c Context) Header(name string) string {
return c.Req.Header.Get(name)
}
// Env gets a map of the environment variables.
func (c Context) Env() map[string]string {
osEnv := os.Environ()
envVars := make(map[string]string, len(osEnv))
for _, env := range osEnv {
data := strings.SplitN(env, "=", 2)
if len(data) == 2 && len(data[0]) > 0 {
envVars[data[0]] = data[1]
}
}
return envVars
}
// IP gets the (remote) IP address of the client making the request.
func (c Context) IP() string {
ip, _, err := net.SplitHostPort(c.Req.RemoteAddr)
if err != nil {
return c.Req.RemoteAddr
}
return ip
}
// URI returns the raw, unprocessed request URI (including query
// string and hash) obtained directly from the Request-Line of
// the HTTP request.
func (c Context) URI() string {
return c.Req.RequestURI
}
// Host returns the hostname portion of the Host header
// from the HTTP request.
func (c Context) Host() (string, error) {
host, _, err := net.SplitHostPort(c.Req.Host)
if err != nil {
if !strings.Contains(c.Req.Host, ":") {
// common with sites served on the default port 80
return c.Req.Host, nil
}
return "", err
}
return host, nil
}
// Port returns the port portion of the Host header if specified.
func (c Context) Port() (string, error) {
_, port, err := net.SplitHostPort(c.Req.Host)
if err != nil {
if !strings.Contains(c.Req.Host, ":") {
// common with sites served on the default port 80
return "80", nil
}
return "", err
}
return port, nil
}
// Method returns the method (GET, POST, etc.) of the request.
func (c Context) Method() string {
return c.Req.Method
}
// PathMatches returns true if the path portion of the request
// URL matches pattern.
func (c Context) PathMatches(pattern string) bool {
return Path(c.Req.URL.Path).Matches(pattern)
}
// Truncate truncates the input string to the given length.
// If length is negative, it returns that many characters
// starting from the end of the string. If the absolute value
// of length is greater than len(input), the whole input is
// returned.
func (c Context) Truncate(input string, length int) string {
if length < 0 && len(input)+length > 0 {
return input[len(input)+length:]
}
if length >= 0 && len(input) > length {
return input[:length]
}
return input
}
// StripHTML returns s without HTML tags. It is fairly naive
// but works with most valid HTML inputs.
func (c Context) StripHTML(s string) string {
var buf bytes.Buffer
var inTag, inQuotes bool
var tagStart int
for i, ch := range s {
if inTag {
if ch == '>' && !inQuotes {
inTag = false
} else if ch == '<' && !inQuotes {
// false start
buf.WriteString(s[tagStart:i])
tagStart = i
} else if ch == '"' {
inQuotes = !inQuotes
}
continue
}
if ch == '<' {
inTag = true
tagStart = i
continue
}
buf.WriteRune(ch)
}
if inTag {
// false start
buf.WriteString(s[tagStart:])
}
return buf.String()
}
// Ext returns the suffix beginning at the final dot in the final
// slash-separated element of the pathStr (or in other words, the
// file extension).
func (c Context) Ext(pathStr string) string {
return path.Ext(pathStr)
}
// StripExt returns the input string without the extension,
// which is the suffix starting with the final '.' character
// but not before the final path separator ('/') character.
// If there is no extension, the whole input is returned.
func (c Context) StripExt(path string) string {
for i := len(path) - 1; i >= 0 && path[i] != '/'; i-- {
if path[i] == '.' {
return path[:i]
}
}
return path
}
// Replace replaces instances of find in input with replacement.
func (c Context) Replace(input, find, replacement string) string {
return strings.Replace(input, find, replacement, -1)
}
// Markdown returns the HTML contents of the markdown contained in filename
// (relative to the site root).
func (c Context) Markdown(filename string) (string, error) {
body, err := c.Include(filename)
if err != nil {
return "", err
}
renderer := blackfriday.HtmlRenderer(0, "", "")
extns := 0
extns |= blackfriday.EXTENSION_TABLES
extns |= blackfriday.EXTENSION_FENCED_CODE
extns |= blackfriday.EXTENSION_STRIKETHROUGH
extns |= blackfriday.EXTENSION_DEFINITION_LISTS
markdown := blackfriday.Markdown([]byte(body), renderer, extns)
return string(markdown), nil
}
// ContextInclude opens filename using fs and executes a template with the context ctx.
// This does the same thing that Context.Include() does, but with the ability to provide
// your own context so that the included files can have access to additional fields your
// type may provide. You can embed Context in your type, then override its Include method
// to call this function with ctx being the instance of your type, and fs being Context.Root.
func | (filename string, ctx interface{}, fs http.FileSystem) (string, error) {
file, err := fs.Open(filename)
if err != nil {
return "", err
}
defer file.Close()
body, err := ioutil.ReadAll(file)
if err != nil {
return "", err
}
tpl, err := template.New(filename).Parse(string(body))
if err != nil {
return "", err
}
var buf bytes.Buffer
err = tpl.Execute(&buf, ctx)
if err != nil {
return "", err
}
return buf.String(), nil
}
// ToLower will convert the given string to lower case.
func (c Context) ToLower(s string) string {
return strings.ToLower(s)
}
// ToUpper will convert the given string to upper case.
func (c Context) ToUpper(s string) string {
return strings.ToUpper(s)
}
// Split is a pass-through to strings.Split. It will split the first argument at each instance of the separator and return a slice of strings.
func (c Context) Split(s string, sep string) []string {
return strings.Split(s, sep)
}
// Join is a pass-through to strings.Join. It will join the first argument slice with the separator in the second argument and return the result.
func (c Context) Join(a []string, sep string) string {
return strings.Join(a, sep)
}
// Slice will convert the given arguments into a slice.
func (c Context) Slice(elems ...interface{}) []interface{} {
return elems
}
// Map will convert the arguments into a map. It expects alternating string keys and values. This is useful for building more complicated data structures
// if you are using subtemplates or things like that.
func (c Context) Map(values ...interface{}) (map[string]interface{}, error) {
if len(values)%2 != 0 {
return nil, fmt.Errorf("Map expects an even number of arguments")
}
dict := make(map[string]interface{}, len(values)/2)
for i := 0; i < len(values); i += 2 {
key, ok := values[i].(string)
if !ok {
return nil, fmt.Errorf("Map keys must be strings")
}
dict[key] = values[i+1]
}
return dict, nil
}
| ContextInclude |
binary_sensor.py | """Support for Fibaro binary sensors."""
import logging
from homeassistant.components.binary_sensor import (
ENTITY_ID_FORMAT, BinarySensorDevice)
from homeassistant.const import CONF_DEVICE_CLASS, CONF_ICON
from . import FIBARO_DEVICES, FibaroDevice
DEPENDENCIES = ['fibaro']
_LOGGER = logging.getLogger(__name__)
SENSOR_TYPES = {
'com.fibaro.floodSensor': ['Flood', 'mdi:water', 'flood'],
'com.fibaro.motionSensor': ['Motion', 'mdi:run', 'motion'],
'com.fibaro.doorSensor': ['Door', 'mdi:window-open', 'door'],
'com.fibaro.windowSensor': ['Window', 'mdi:window-open', 'window'],
'com.fibaro.smokeSensor': ['Smoke', 'mdi:smoking', 'smoke'],
'com.fibaro.FGMS001': ['Motion', 'mdi:run', 'motion'],
'com.fibaro.heatDetector': ['Heat', 'mdi:fire', 'heat'],
}
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Perform the setup for Fibaro controller devices."""
if discovery_info is None:
return
add_entities(
[FibaroBinarySensor(device)
for device in hass.data[FIBARO_DEVICES]['binary_sensor']], True)
class FibaroBinarySensor(FibaroDevice, BinarySensorDevice):
"""Representation of a Fibaro Binary Sensor."""
def | (self, fibaro_device):
"""Initialize the binary_sensor."""
self._state = None
super().__init__(fibaro_device)
self.entity_id = ENTITY_ID_FORMAT.format(self.ha_id)
stype = None
devconf = fibaro_device.device_config
if fibaro_device.type in SENSOR_TYPES:
stype = fibaro_device.type
elif fibaro_device.baseType in SENSOR_TYPES:
stype = fibaro_device.baseType
if stype:
self._device_class = SENSOR_TYPES[stype][2]
self._icon = SENSOR_TYPES[stype][1]
else:
self._device_class = None
self._icon = None
# device_config overrides:
self._device_class = devconf.get(CONF_DEVICE_CLASS,
self._device_class)
self._icon = devconf.get(CONF_ICON, self._icon)
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return self._icon
@property
def device_class(self):
"""Return the device class of the sensor."""
return self._device_class
@property
def is_on(self):
"""Return true if sensor is on."""
return self._state
def update(self):
"""Get the latest data and update the state."""
self._state = self.current_binary_state
| __init__ |
update_portfolio.go | package portfolios
import (
"fmt"
"strings"
"github.com/spf13/cobra"
"github.com/ykdundar/budgie/database"
)
func | (name string, rename string) error {
updatePortfolio, prepErr := database.DBConnection().Prepare(buildQuery(name, rename))
cobra.CheckErr(prepErr)
defer updatePortfolio.Close()
_, updateErr := updatePortfolio.Exec()
cobra.CheckErr(updateErr)
return updateErr
}
func buildQuery(name string, rename string) string {
var querySlc []string
if rename != "" {
querySlc = append(querySlc, fmt.Sprintf("name='%s'", rename))
}
return fmt.Sprintf("UPDATE portfolios SET %s WHERE name = '%s'", strings.Join(querySlc[:], ","), name)
}
| UpdatePortfolio |
Footer.js | import {Link} from "react-router-dom";
export default function | () {
return (
<div className="footer-container">
<div className="footer-controls">
<Link to="/privacy">Privacy</Link>
<Link to="/contact">Contact</Link>
</div>
</div>
);
}
| Footer |
tests.rs | use super::*;
use crate::conductor::api::error::ConductorApiError;
use crate::conductor::api::MockCellConductorApi;
use crate::test_utils::fake_genesis;
use ::fixt::prelude::*;
use error::SysValidationError;
use holochain_keystore::AgentPubKeyExt;
use holochain_serialized_bytes::SerializedBytes;
use holochain_state::prelude::fresh_reader_test;
use holochain_state::prelude::test_cache_env;
use holochain_state::prelude::test_cell_env;
use holochain_wasm_test_utils::TestWasm;
use holochain_zome_types::Header;
use matches::assert_matches;
use observability;
use std::convert::TryFrom;
#[tokio::test(flavor = "multi_thread")]
async fn verify_header_signature_test() {
let keystore = holochain_state::test_utils::test_keystore();
let author = fake_agent_pubkey_1();
let mut header = fixt!(CreateLink);
header.author = author.clone();
let header = Header::CreateLink(header);
let real_signature = author.sign(&keystore, &header).await.unwrap();
let wrong_signature = Signature([1_u8; 64]);
assert_matches!(
verify_header_signature(&wrong_signature, &header).await,
Err(SysValidationError::ValidationOutcome(
ValidationOutcome::Counterfeit(_, _)
))
);
assert_matches!(
verify_header_signature(&real_signature, &header).await,
Ok(())
);
}
#[tokio::test(flavor = "multi_thread")]
async fn check_previous_header() {
let mut header = fixt!(CreateLink);
header.prev_header = fixt!(HeaderHash);
header.header_seq = 1;
assert_matches!(check_prev_header(&header.clone().into()), Ok(()));
header.header_seq = 0;
assert_matches!(
check_prev_header(&header.clone().into()),
Err(SysValidationError::ValidationOutcome(
ValidationOutcome::PrevHeaderError(PrevHeaderError::InvalidRoot)
))
);
// Dna is always ok because of the type system
let header = fixt!(Dna);
assert_matches!(check_prev_header(&header.into()), Ok(()));
}
#[tokio::test(flavor = "multi_thread")]
async fn check_valid_if_dna_test() {
let tmp = test_cell_env();
let tmp_cache = test_cache_env();
let env: EnvRead = tmp.env().into();
// Test data
let _activity_return = vec![fixt!(HeaderHash)];
// Empty store not dna
let header = fixt!(CreateLink);
let workspace = SysValidationWorkspace::new(env.clone().into(), tmp_cache.env());
assert_matches!(
check_valid_if_dna(&header.clone().into(), &workspace).await,
Ok(())
);
let mut header = fixt!(Dna);
assert_matches!(
check_valid_if_dna(&header.clone().into(), &workspace).await,
Ok(())
);
fake_genesis(env.clone().into()).await.unwrap();
env.conn() |
header.author = fake_agent_pubkey_1();
assert_matches!(
check_valid_if_dna(&header.clone().into(), &workspace).await,
Err(SysValidationError::ValidationOutcome(
ValidationOutcome::PrevHeaderError(PrevHeaderError::InvalidRoot)
))
);
}
#[tokio::test(flavor = "multi_thread")]
async fn check_previous_timestamp() {
let mut header = fixt!(CreateLink);
let mut prev_header = fixt!(CreateLink);
header.timestamp = Timestamp::now().into();
let before = chrono::Utc::now() - chrono::Duration::weeks(1);
let after = chrono::Utc::now() + chrono::Duration::weeks(1);
prev_header.timestamp = Timestamp::from(before).into();
let r = check_prev_timestamp(&header.clone().into(), &prev_header.clone().into());
assert_matches!(r, Ok(()));
prev_header.timestamp = Timestamp::from(after).into();
let r = check_prev_timestamp(&header.clone().into(), &prev_header.clone().into());
assert_matches!(
r,
Err(SysValidationError::ValidationOutcome(
ValidationOutcome::PrevHeaderError(PrevHeaderError::Timestamp)
))
);
}
#[tokio::test(flavor = "multi_thread")]
async fn check_previous_seq() {
let mut header = fixt!(CreateLink);
let mut prev_header = fixt!(CreateLink);
header.header_seq = 2;
prev_header.header_seq = 1;
assert_matches!(
check_prev_seq(&header.clone().into(), &prev_header.clone().into()),
Ok(())
);
prev_header.header_seq = 2;
assert_matches!(
check_prev_seq(&header.clone().into(), &prev_header.clone().into()),
Err(SysValidationError::ValidationOutcome(
ValidationOutcome::PrevHeaderError(PrevHeaderError::InvalidSeq(_, _)),
),)
);
prev_header.header_seq = 3;
assert_matches!(
check_prev_seq(&header.clone().into(), &prev_header.clone().into()),
Err(SysValidationError::ValidationOutcome(
ValidationOutcome::PrevHeaderError(PrevHeaderError::InvalidSeq(_, _)),
),)
);
header.header_seq = 0;
prev_header.header_seq = 0;
assert_matches!(
check_prev_seq(&header.clone().into(), &prev_header.clone().into()),
Err(SysValidationError::ValidationOutcome(
ValidationOutcome::PrevHeaderError(PrevHeaderError::InvalidSeq(_, _)),
),)
);
}
#[tokio::test(flavor = "multi_thread")]
async fn check_entry_type_test() {
let entry_fixt = EntryFixturator::new(Predictable);
let et_fixt = EntryTypeFixturator::new(Predictable);
for (e, et) in entry_fixt.zip(et_fixt).take(4) {
assert_matches!(check_entry_type(&et, &e), Ok(()));
}
// Offset by 1
let entry_fixt = EntryFixturator::new(Predictable);
let mut et_fixt = EntryTypeFixturator::new(Predictable);
et_fixt.next().unwrap();
for (e, et) in entry_fixt.zip(et_fixt).take(4) {
assert_matches!(
check_entry_type(&et, &e),
Err(SysValidationError::ValidationOutcome(
ValidationOutcome::EntryType
))
);
}
}
#[tokio::test(flavor = "multi_thread")]
async fn check_entry_hash_test() {
let mut ec = fixt!(Create);
let entry = fixt!(Entry);
let hash = EntryHash::with_data_sync(&entry);
let header: Header = ec.clone().into();
// First check it should have an entry
assert_matches!(check_new_entry_header(&header), Ok(()));
// Safe to unwrap if new entry
let eh = header.entry_data().map(|(h, _)| h).unwrap();
assert_matches!(
check_entry_hash(&eh, &entry).await,
Err(SysValidationError::ValidationOutcome(
ValidationOutcome::EntryHash
))
);
ec.entry_hash = hash;
let header: Header = ec.clone().into();
let eh = header.entry_data().map(|(h, _)| h).unwrap();
assert_matches!(check_entry_hash(&eh, &entry).await, Ok(()));
assert_matches!(
check_new_entry_header(&fixt!(CreateLink).into()),
Err(SysValidationError::ValidationOutcome(
ValidationOutcome::NotNewEntry(_)
))
);
}
#[tokio::test(flavor = "multi_thread")]
async fn check_entry_size_test() {
// let tiny = Entry::App(SerializedBytes::from(UnsafeBytes::from(vec![0; 1])));
// let bytes = (0..16_000_000).map(|_| 0u8).into_iter().collect::<Vec<_>>();
// let huge = Entry::App(SerializedBytes::from(UnsafeBytes::from(bytes)));
// assert_matches!(check_entry_size(&tiny), Ok(()));
// assert_matches!(
// check_entry_size(&huge),
// Err(SysValidationError::ValidationOutcome(ValidationOutcome::EntryTooLarge(_, _)))
// );
}
#[tokio::test(flavor = "multi_thread")]
async fn check_update_reference_test() {
let mut ec = fixt!(Create);
let mut eu = fixt!(Update);
let et_cap = EntryType::CapClaim;
let mut aet_fixt = AppEntryTypeFixturator::new(Predictable).map(EntryType::App);
let et_app_1 = aet_fixt.next().unwrap();
let et_app_2 = aet_fixt.next().unwrap();
// Same entry type
ec.entry_type = et_app_1.clone();
eu.entry_type = et_app_1;
assert_matches!(
check_update_reference(&eu, &NewEntryHeaderRef::from(&ec)),
Ok(())
);
// Different app entry type
ec.entry_type = et_app_2;
assert_matches!(
check_update_reference(&eu, &NewEntryHeaderRef::from(&ec)),
Err(SysValidationError::ValidationOutcome(
ValidationOutcome::UpdateTypeMismatch(_, _)
))
);
// Different entry type
eu.entry_type = et_cap;
assert_matches!(
check_update_reference(&eu, &NewEntryHeaderRef::from(&ec)),
Err(SysValidationError::ValidationOutcome(
ValidationOutcome::UpdateTypeMismatch(_, _)
))
);
}
#[tokio::test(flavor = "multi_thread")]
async fn check_link_tag_size_test() {
let tiny = LinkTag(vec![0; 1]);
let bytes = (0..super::MAX_TAG_SIZE + 1)
.map(|_| 0u8)
.into_iter()
.collect::<Vec<_>>();
let huge = LinkTag(bytes);
assert_matches!(check_tag_size(&tiny), Ok(()));
assert_matches!(
check_tag_size(&huge),
Err(SysValidationError::ValidationOutcome(
ValidationOutcome::TagTooLarge(_, _)
))
);
}
#[tokio::test(flavor = "multi_thread")]
async fn check_app_entry_type_test() {
observability::test_run().ok();
// Setup test data
let dna_file = DnaFile::new(
DnaDef {
name: "app_entry_type_test".to_string(),
uid: "ba1d046d-ce29-4778-914b-47e6010d2faf".to_string(),
properties: SerializedBytes::try_from(()).unwrap(),
zomes: vec![TestWasm::EntryDefs.into()].into(),
},
vec![TestWasm::EntryDefs.into()],
)
.await
.unwrap();
let dna_hash = dna_file.dna_hash().to_owned().clone();
let mut entry_def = fixt!(EntryDef);
entry_def.visibility = EntryVisibility::Public;
// Setup mock conductor
let mut conductor_api = MockCellConductorApi::new();
conductor_api.expect_cell_id().return_const(fixt!(CellId));
// # No dna or entry def
conductor_api.expect_sync_get_entry_def().return_const(None);
conductor_api.expect_sync_get_dna().return_const(None);
conductor_api
.expect_sync_get_this_dna()
.returning(move || Err(ConductorApiError::DnaMissing(dna_hash.clone())));
// ## Dna is missing
let aet = AppEntryType::new(0.into(), 0.into(), EntryVisibility::Public);
assert_matches!(
check_app_entry_type(&aet, &conductor_api).await,
Err(SysValidationError::ConductorApiError(e))
if matches!(*e, ConductorApiError::DnaMissing(_))
);
// # Dna but no entry def in buffer
// ## ZomeId out of range
conductor_api.checkpoint();
conductor_api.expect_sync_get_entry_def().return_const(None);
conductor_api
.expect_sync_get_dna()
.return_const(Some(dna_file.clone()));
conductor_api
.expect_sync_get_this_dna()
.returning(move || Ok(dna_file.clone()));
let aet = AppEntryType::new(0.into(), 1.into(), EntryVisibility::Public);
assert_matches!(
check_app_entry_type(&aet, &conductor_api).await,
Err(SysValidationError::ValidationOutcome(
ValidationOutcome::ZomeId(_)
))
);
// ## EntryId is out of range
let aet = AppEntryType::new(10.into(), 0.into(), EntryVisibility::Public);
assert_matches!(
check_app_entry_type(&aet, &conductor_api).await,
Err(SysValidationError::ValidationOutcome(
ValidationOutcome::EntryDefId(_)
))
);
// ## EntryId is in range for dna
let aet = AppEntryType::new(0.into(), 0.into(), EntryVisibility::Public);
assert_matches!(check_app_entry_type(&aet, &conductor_api).await, Ok(_));
let aet = AppEntryType::new(0.into(), 0.into(), EntryVisibility::Private);
assert_matches!(
check_app_entry_type(&aet, &conductor_api).await,
Err(SysValidationError::ValidationOutcome(
ValidationOutcome::EntryVisibility(_)
))
);
// # Add an entry def to the buffer
conductor_api
.expect_sync_get_entry_def()
.return_const(Some(entry_def));
// ## Can get the entry from the entry def
let aet = AppEntryType::new(0.into(), 0.into(), EntryVisibility::Public);
assert_matches!(check_app_entry_type(&aet, &conductor_api).await, Ok(_));
}
#[tokio::test(flavor = "multi_thread")]
async fn check_entry_not_private_test() {
let mut ed = fixt!(EntryDef);
ed.visibility = EntryVisibility::Public;
assert_matches!(check_not_private(&ed), Ok(()));
ed.visibility = EntryVisibility::Private;
assert_matches!(
check_not_private(&ed),
Err(SysValidationError::ValidationOutcome(
ValidationOutcome::PrivateEntry
))
);
}
#[tokio::test(flavor = "multi_thread")]
async fn incoming_ops_filters_private_entry() {
let vault = test_cell_env();
let (tx, _rx) = TriggerSender::new();
let private_entry = fixt!(Entry);
let mut create = fixt!(Create);
let author = vault
.env()
.keystore()
.new_sign_keypair_random()
.await
.unwrap();
let aet = AppEntryType::new(0.into(), 0.into(), EntryVisibility::Private);
create.entry_type = EntryType::App(aet);
create.entry_hash = EntryHash::with_data_sync(&private_entry);
create.author = author.clone();
let header = Header::Create(create);
let signature = author.sign(&vault.env().keystore(), &header).await.unwrap();
let shh =
SignedHeaderHashed::with_presigned(HeaderHashed::from_content_sync(header), signature);
let el = Element::new(shh, Some(private_entry));
let ops_sender = IncomingDhtOpSender::new(vault.env(), tx.clone());
ops_sender.send_store_entry(el.clone()).await.unwrap();
let num_ops: usize = fresh_reader_test(vault.env(), |txn| {
txn.query_row("SELECT COUNT(rowid) FROM DhtOp", [], |row| row.get(0))
.unwrap()
});
assert_eq!(num_ops, 0);
let ops_sender = IncomingDhtOpSender::new(vault.env(), tx.clone());
ops_sender.send_store_element(el.clone()).await.unwrap();
let num_ops: usize = fresh_reader_test(vault.env(), |txn| {
txn.query_row("SELECT COUNT(rowid) FROM DhtOp", [], |row| row.get(0))
.unwrap()
});
assert_eq!(num_ops, 1);
let num_entries: usize = fresh_reader_test(vault.env(), |txn| {
txn.query_row("SELECT COUNT(rowid) FROM Entry", [], |row| row.get(0))
.unwrap()
});
assert_eq!(num_entries, 0);
} | .unwrap()
.execute("UPDATE DhtOp SET when_integrated = 0", [])
.unwrap(); |
gh-pages-publish.ts | const { cd, exec, echo, touch } = require("shelljs")
const { readFileSync } = require("fs")
const url = require("url")
let repoUrl
let pkg = JSON.parse(readFileSync("package.json") as any)
if (typeof pkg.repository === "object") {
if (!pkg.repository.hasOwnProperty("url")) {
throw new Error("URL does not exist in repository section")
} | } else {
repoUrl = pkg.repository
}
let parsedUrl = url.parse(repoUrl)
let repository = (parsedUrl.host || "") + (parsedUrl.path || "")
let ghToken = process.env.GH_TOKEN
echo("Deploying docs!!!")
cd("docs")
touch(".nojekyll")
exec("git init")
exec("git add .")
exec('git config user.name "Alfred Chen"')
exec('git config user.email "[email protected]"')
exec('git commit -m "docs(docs): update gh-pages"')
exec(
`git push --force --quiet "https://${ghToken}@${repository}" master:gh-pages`
)
echo("Docs deployed!!") | repoUrl = pkg.repository.url |
users.service.ts | import { Injectable } from '@nestjs/common'; | import { UserRepository } from './user.repository';
import { User } from './user.entity';
import { SaveUserDto } from './saveUser.dto';
@Injectable()
export class UsersService {
constructor(private readonly userRepository: UserRepository) {}
async findUser(username: string): Promise<User> {
return this.userRepository.findUser(username);
}
async saveUser(saveUserDto: SaveUserDto): Promise<User> {
return this.userRepository.saveUser(saveUserDto);
}
} | |
f025f89b250b_.py | """empty message
Revision ID: f025f89b250b
Revises: 37eabcbbb8fb
Create Date: 2019-10-19 18:12:48.976655
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'f025f89b250b'
down_revision = '37eabcbbb8fb'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('df_goods_image',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('sku', sa.Integer(), nullable=True),
sa.Column('image', sa.String(length=255), nullable=True),
sa.ForeignKeyConstraint(['sku'], ['df_goods_sku.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def | ():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('df_goods_image')
# ### end Alembic commands ###
| downgrade |
logging.ts | import * as LogFactory from 'bunyan'
|
export {log} | const log = LogFactory.createLogger({name: 'BitDAO.Token.Contract'}) |
sys.rs | //! This module provides platform related functions.
#[cfg(unix)]
pub use self::unix::{exit, size};
#[cfg(windows)]
pub(crate) use self::windows::{clear, scroll_down, scroll_up, set_size};
#[cfg(windows)]
pub use self::windows::{exit, size}; |
#[cfg(unix)]
pub(crate) mod unix; |
#[cfg(windows)]
pub(crate) mod windows; |
wtr.rs | use std::io;
use termcolor;
use is_tty_stdout;
/// A writer that supports coloring with either line or block buffering.
pub struct StandardStream(StandardStreamKind);
/// Returns a possibly buffered writer to stdout for the given color choice.
///
/// The writer returned is either line buffered or block buffered. The decision
/// between these two is made automatically based on whether a tty is attached
/// to stdout or not. If a tty is attached, then line buffering is used.
/// Otherwise, block buffering is used. In general, block buffering is more
/// efficient, but may increase the time it takes for the end user to see the
/// first bits of output.
///
/// If you need more fine grained control over the buffering mode, then use one
/// of `stdout_buffered_line` or `stdout_buffered_block`.
///
/// The color choice given is passed along to the underlying writer. To
/// completely disable colors in all cases, use `ColorChoice::Never`.
pub fn stdout(color_choice: termcolor::ColorChoice) -> StandardStream {
if is_tty_stdout() {
stdout_buffered_line(color_choice)
} else {
stdout_buffered_block(color_choice)
}
}
/// Returns a line buffered writer to stdout for the given color choice.
///
/// This writer is useful when printing results directly to a tty such that
/// users see output as soon as it's written. The downside of this approach
/// is that it can be slower, especially when there is a lot of output.
///
/// You might consider using
/// [`stdout`](fn.stdout.html)
/// instead, which chooses the buffering strategy automatically based on
/// whether stdout is connected to a tty.
pub fn stdout_buffered_line(
color_choice: termcolor::ColorChoice,
) -> StandardStream {
let out = termcolor::StandardStream::stdout(color_choice);
StandardStream(StandardStreamKind::LineBuffered(out))
}
/// Returns a block buffered writer to stdout for the given color choice.
///
/// This writer is useful when printing results to a file since it amortizes
/// the cost of writing data. The downside of this approach is that it can
/// increase the latency of display output when writing to a tty.
///
/// You might consider using
/// [`stdout`](fn.stdout.html)
/// instead, which chooses the buffering strategy automatically based on
/// whether stdout is connected to a tty.
pub fn stdout_buffered_block(
color_choice: termcolor::ColorChoice,
) -> StandardStream {
let out = termcolor::BufferedStandardStream::stdout(color_choice);
StandardStream(StandardStreamKind::BlockBuffered(out))
}
enum StandardStreamKind {
LineBuffered(termcolor::StandardStream),
BlockBuffered(termcolor::BufferedStandardStream),
}
impl io::Write for StandardStream {
#[inline]
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
use self::StandardStreamKind::*;
match self.0 {
LineBuffered(ref mut w) => w.write(buf),
BlockBuffered(ref mut w) => w.write(buf),
}
}
#[inline]
fn flush(&mut self) -> io::Result<()> {
use self::StandardStreamKind::*;
match self.0 {
LineBuffered(ref mut w) => w.flush(),
BlockBuffered(ref mut w) => w.flush(),
}
}
}
impl termcolor::WriteColor for StandardStream {
#[inline]
fn supports_color(&self) -> bool {
use self::StandardStreamKind::*;
match self.0 {
LineBuffered(ref w) => w.supports_color(),
BlockBuffered(ref w) => w.supports_color(),
}
}
#[inline]
fn set_color(&mut self, spec: &termcolor::ColorSpec) -> io::Result<()> {
use self::StandardStreamKind::*;
match self.0 {
LineBuffered(ref mut w) => w.set_color(spec),
BlockBuffered(ref mut w) => w.set_color(spec),
}
}
#[inline]
fn reset(&mut self) -> io::Result<()> {
use self::StandardStreamKind::*;
match self.0 {
LineBuffered(ref mut w) => w.reset(),
BlockBuffered(ref mut w) => w.reset(),
}
}
#[inline]
fn | (&self) -> bool {
use self::StandardStreamKind::*;
match self.0 {
LineBuffered(ref w) => w.is_synchronous(),
BlockBuffered(ref w) => w.is_synchronous(),
}
}
}
| is_synchronous |
stations.js | module.exports = {
dlf : {
color : '#006AB3',
darkcolor : '#00338B',
id : 3,
name : 'Deutschlandfunk',
/* für Tagesübersicht, wird auch in der Mediathek genutzt und einmal täglich über
einen Hintergrundprozess gesynct*/
dayplan : 'http://www.deutschlandfunk.de/programmvorschau.281.de.rss',
/* für LiveRadio, eigentlicher Stream wird ermittelt und gespeichert*/
stream : 'http://st01.dlf.de/dlf/01/128/mp3/stream.mp3',
/* wird jede Minute aufgerufen, wenn View aktiv ist */
mediathek : 'http://srv.deutschlandradio.de/aodlistaudio.1706.de.rpc?drau:station_id=4&drau:from=_DATE_&drau:to=_DATE_&drau:page=1&drau:limit=100'
| drk : {
color : '#E95D0F',
id : 4,
darkcolor : '#A51400',
name : 'Deutschlandfunk Kultur',
dayplan : 'http://www.deutschlandradiokultur.de/programmvorschau.282.de.rss',
stream : 'http://dradio-edge-1093.dus-dtag.cdn.addradio.net/dradio/kultur/live/mp3/128/stream.mp3',
mediathek : 'http://srv.deutschlandradio.de/aodlistaudio.1706.de.rpc?drau:station_id=3&drau:from=_DATE_&drau:to=_DATE_&drau:page=1&drau:limit=100'
},
drw : {
color : '#01953C',
id : 1,
darkcolor : '#006900',
name : 'Deutschlandfunk Nova',
meta : 'https://www.deutschlandfunknova.de/actions/dradio/playlist/onair',
stream : 'http://st03.dlf.de/dlf/03/128/mp3/stream.mp3',
mediathek : 'http://srv.deutschlandradio.de/aodlistaudio.1706.de.rpc?drau:station_id=1&drau:from=_DATE_&drau:to=_DATE_&drau:page=1&drau:limit=500'
}
}; | }, |
nfts.ts | import { createClient } from '@supabase/supabase-js';
import dotenv from 'dotenv';
dotenv.config();
import { NFT } from '../types';
const supabase = createClient(process.env.SUPABASE_URL, process.env.SUPABASE_DB_KEY);
// ID is same as the NFT Address
export async function getNftById(id: string) {
const { data: nftData, error } = await supabase
.from<NFT>('nfts')
.select('*')
.eq('id', id);
if (error) {
console.error(error);
throw Error(`Error retrieving NFT by ID DB: ${error.message} `);
}
return nftData[0];
}
export async function updateNftById(id: string, updatedNftData: Partial<NFT>) {
const { data, error } = await supabase
.from<NFT>('nfts')
.update({
...updatedNftData,
})
.match({ id });
if (error) {
console.error(error);
throw Error(`Error updating NFT Data in DB: ${error.message}`);
}
return data;
}
export async function addNft(nft: Omit<NFT, 'created_at'>) {
const { data, error } = await supabase.from<NFT>('nfts').insert([ | ...nft,
},
]);
if (error) {
console.error(error);
throw Error(`Error adding new NFT to DB: ${error.message}`);
}
return data;
} | { |
ovsdb.rs | use std::iter::FromIterator;
use ddlog_rt::Val;
use types__uuid::crate_uuid;
pub fn map_extract_val_uuids<K: Val>(
ids: &ddlog_std::Map<K, uuid_or_string_t>,
) -> ddlog_std::Map<K, uuid> {
ddlog_std::Map::from_iter(ids.x.iter().map(|(k, v)| (k.clone(), extract_uuid(v))))
}
pub fn set_extract_uuids(ids: &ddlog_std::Set<uuid_or_string_t>) -> ddlog_std::Set<uuid> {
ddlog_std::Set::from_iter(ids.x.iter().map(|x| extract_uuid(x)))
}
pub fn group2vec_remove_sentinel<K>(
g: &ddlog_std::Group<K, uuid_or_string_t>,
) -> ddlog_std::Vec<uuid_or_string_t> {
let mut res = ddlog_std::Vec::new();
for ddlog_std::tuple2(ref v, _) in g.iter() {
match v {
ddlog_std::Either::Right { r } => {
if r.as_str() != "" {
res.push(ddlog_std::Either::Right { r: r.clone() });
};
}
v => {
res.push(v.clone());
}
}
}
res
}
pub fn group2set_remove_sentinel<K>(
g: &ddlog_std::Group<K, uuid_or_string_t>,
) -> ddlog_std::Set<uuid_or_string_t> {
let mut res = ddlog_std::Set::new();
for ddlog_std::tuple2(ref v, _) in g.iter() {
match v {
ddlog_std::Either::Right { r } => {
if r.as_str() != "" {
res.insert(ddlog_std::Either::Right { r: r.clone() });
};
}
v => {
res.insert(v.clone());
}
}
}
res
}
pub fn group2map_remove_sentinel<K1, K2: Ord + Clone>(
g: &ddlog_std::Group<K1, (K2, uuid_or_string_t)>,
) -> ddlog_std::Map<K2, uuid_or_string_t> {
let mut res = ddlog_std::Map::new();
for ddlog_std::tuple2((ref k, ref v), _) in g.iter() {
match v {
ddlog_std::Either::Right { r } => {
if r.as_str() != "" {
res.insert((*k).clone(), ddlog_std::Either::Right { r: r.clone() });
};
}
_ => {
res.insert((*k).clone(), v.clone());
}
}
}
res
}
pub fn uuid2str(i: &u128) -> String {
crate_uuid::Uuid::from_u128(*i /*.to_be()*/)
.to_hyphenated()
.to_string()
}
pub fn uuid2name(i: &u128) -> String {
let s = crate_uuid::Uuid::from_u128(*i /*.to_be()*/)
.to_simple()
.to_string();
format!(
"u{}_{}_{}_{}_{}",
&s[0..8],
&s[8..12],
&s[12..16],
&s[16..20],
&s[20..32]
)
}
pub fn set_map_uuid2str(ids: &ddlog_std::Set<uuid>) -> ddlog_std::Set<String> {
ddlog_std::Set::from_iter(ids.x.iter().map(|id| uuid2str(id)))
}
pub fn set_map_uuid2name(ids: &ddlog_std::Set<uuid>) -> ddlog_std::Set<String> | {
ddlog_std::Set::from_iter(ids.x.iter().map(|id| uuid2name(id)))
} |
|
spatial_agent.py | from starling_sim.basemodel.agent.agent import Agent
class SpatialAgent(Agent):
"""
Class describing a spatial agent, with a position and origin in the simulation environment.
"""
SCHEMA = {
"properties": {
"origin": {
"type": ["number", "string"],
"title": "Origin position",
"description": "origin position id (inferred from geometry)",
}
},
"required": ["origin"]
}
def | (self, simulation_model, agent_id, origin, **kwargs):
Agent.__init__(self, simulation_model, agent_id, **kwargs)
self.origin = origin
self.position = origin
def __str__(self):
return "[id={}, origin={}]".format(self.id, self.origin)
| __init__ |
priority.py | """PriorityStateMachine keeps track of stoping or halting in front of stop or halt lines.
See :mod:`simulation.src.simulation_evaluation.src.state_machine.states.priority` for
implementation details of the states used in this StateMachine.
"""
from typing import Callable
from simulation.src.simulation_evaluation.src.state_machine.states.priority import (
FailureInStopZone,
InHaltZone,
InStopZone,
Off,
SuccessfullyStopped,
)
from .state_machine import StateMachine
__copyright__ = "KITcar"
class | (StateMachine):
"""Keep track of stoping and halting in front of stop or halt lines."""
off: "State" = Off() # noqa: F821
"""Default state"""
in_stop_zone: "State" = InStopZone() # noqa: F821
"""The car is inside a stop zone"""
in_halt_zone: "State" = InHaltZone() # noqa: F821
"""The car is inside a halt zone"""
successfully_stopped: "State" = SuccessfullyStopped() # noqa: F821
"""The car successfully stopes in the stop zone"""
failure_in_stop_zone: "State" = FailureInStopZone() # noqa: F821
"""End state when the car does not stop inside the stop zone"""
def __init__(self, callback: Callable[[], None]):
"""Initialize PriorityStateMachine.
Arguments:
callback: Function which gets executed when the state changes
"""
super().__init__(
state_machine=self.__class__,
initial_state=PriorityStateMachine.off,
callback=callback,
)
| PriorityStateMachine |
lib.rs | pub mod status;
#[cfg(test)]
mod tests {
use crate::status::StatusCode;
#[test]
fn | () {
assert!(StatusCode::OK.is_success());
}
}
| it_works |
keys.js | username = "gptesting"; | password = "test12341234";
database = "burgers_db";
port = "3306";
host = "database-1.cm4p7piy3acq.us-west-1.rds.amazonaws.com";
module.exports = { username, password, database, port, host }; | |
cursor.py | # Copyright 2017 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cursor for the Google BigQuery DB-API."""
import collections
from collections import abc as collections_abc
import copy
import logging
import re
try:
from google.cloud.bigquery_storage import ArrowSerializationOptions
except ImportError:
_ARROW_COMPRESSION_SUPPORT = False
else:
# Having BQ Storage available implies that pyarrow >=1.0.0 is available, too.
_ARROW_COMPRESSION_SUPPORT = True
from google.cloud.bigquery import job
from google.cloud.bigquery.dbapi import _helpers
from google.cloud.bigquery.dbapi import exceptions
import google.cloud.exceptions
_LOGGER = logging.getLogger(__name__)
# Per PEP 249: A 7-item sequence containing information describing one result
# column. The first two items (name and type_code) are mandatory, the other
# five are optional and are set to None if no meaningful values can be
# provided.
Column = collections.namedtuple(
"Column",
[
"name",
"type_code",
"display_size",
"internal_size",
"precision",
"scale",
"null_ok",
],
)
@_helpers.raise_on_closed("Operating on a closed cursor.")
class Cursor(object):
"""DB-API Cursor to Google BigQuery.
Args:
connection (google.cloud.bigquery.dbapi.Connection):
A DB-API connection to Google BigQuery.
"""
def __init__(self, connection):
self.connection = connection
self.description = None
# Per PEP 249: The attribute is -1 in case no .execute*() has been
# performed on the cursor or the rowcount of the last operation
# cannot be determined by the interface.
self.rowcount = -1
# Per PEP 249: The arraysize attribute defaults to 1, meaning to fetch
# a single row at a time. However, we deviate from that, and set the
# default to None, allowing the backend to automatically determine the
# most appropriate size.
self.arraysize = None
self._query_data = None
self._query_job = None
self._closed = False
def close(self):
"""Mark the cursor as closed, preventing its further use."""
self._closed = True
def _set_description(self, schema):
"""Set description from schema.
Args:
schema (Sequence[google.cloud.bigquery.schema.SchemaField]):
A description of fields in the schema.
"""
if schema is None:
self.description = None
return
self.description = tuple(
Column(
name=field.name,
type_code=field.field_type,
display_size=None,
internal_size=None,
precision=None,
scale=None,
null_ok=field.is_nullable,
)
for field in schema
)
def _set_rowcount(self, query_results):
"""Set the rowcount from query results.
Normally, this sets rowcount to the number of rows returned by the
query, but if it was a DML statement, it sets rowcount to the number
of modified rows.
Args:
query_results (google.cloud.bigquery.query._QueryResults):
Results of a query.
"""
total_rows = 0
num_dml_affected_rows = query_results.num_dml_affected_rows
if query_results.total_rows is not None and query_results.total_rows > 0:
total_rows = query_results.total_rows
if num_dml_affected_rows is not None and num_dml_affected_rows > 0:
total_rows = num_dml_affected_rows
self.rowcount = total_rows
def execute(self, operation, parameters=None, job_id=None, job_config=None):
"""Prepare and execute a database operation.
.. note::
When setting query parameters, values which are "text"
(``unicode`` in Python2, ``str`` in Python3) will use
the 'STRING' BigQuery type. Values which are "bytes" (``str`` in
Python2, ``bytes`` in Python3), will use using the 'BYTES' type. | the 'DATETIME' BigQuery type (example: Global Pi Day Celebration
March 14, 2017 at 1:59pm). A `~datetime.datetime` parameter with
timezone information uses the 'TIMESTAMP' BigQuery type (example:
a wedding on April 29, 2011 at 11am, British Summer Time).
For more information about BigQuery data types, see:
https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types
``STRUCT``/``RECORD`` and ``REPEATED`` query parameters are not
yet supported. See:
https://github.com/GoogleCloudPlatform/google-cloud-python/issues/3524
Args:
operation (str): A Google BigQuery query string.
parameters (Union[Mapping[str, Any], Sequence[Any]]):
(Optional) dictionary or sequence of parameter values.
job_id (str):
(Optional) The job_id to use. If not set, a job ID
is generated at random.
job_config (google.cloud.bigquery.job.QueryJobConfig):
(Optional) Extra configuration options for the query job.
"""
formatted_operation, parameter_types = _format_operation(operation, parameters)
self._execute(
formatted_operation, parameters, job_id, job_config, parameter_types
)
def _execute(
self, formatted_operation, parameters, job_id, job_config, parameter_types
):
self._query_data = None
self._query_job = None
client = self.connection._client
# The DB-API uses the pyformat formatting, since the way BigQuery does
# query parameters was not one of the standard options. Convert both
# the query and the parameters to the format expected by the client
# libraries.
query_parameters = _helpers.to_query_parameters(parameters, parameter_types)
if client._default_query_job_config:
if job_config:
config = job_config._fill_from_default(client._default_query_job_config)
else:
config = copy.deepcopy(client._default_query_job_config)
else:
config = job_config or job.QueryJobConfig(use_legacy_sql=False)
config.query_parameters = query_parameters
self._query_job = client.query(
formatted_operation, job_config=config, job_id=job_id
)
if self._query_job.dry_run:
self._set_description(schema=None)
self.rowcount = 0
return
# Wait for the query to finish.
try:
self._query_job.result()
except google.cloud.exceptions.GoogleCloudError as exc:
raise exceptions.DatabaseError(exc)
query_results = self._query_job._query_results
self._set_rowcount(query_results)
self._set_description(query_results.schema)
def executemany(self, operation, seq_of_parameters):
"""Prepare and execute a database operation multiple times.
Args:
operation (str): A Google BigQuery query string.
seq_of_parameters (Union[Sequence[Mapping[str, Any], Sequence[Any]]]):
Sequence of many sets of parameter values.
"""
if seq_of_parameters:
# There's no reason to format the line more than once, as
# the operation only barely depends on the parameters. So
# we just use the first set of parameters. If there are
# different numbers or types of parameters, we'll error
# anyway.
formatted_operation, parameter_types = _format_operation(
operation, seq_of_parameters[0]
)
for parameters in seq_of_parameters:
self._execute(
formatted_operation, parameters, None, None, parameter_types
)
def _try_fetch(self, size=None):
"""Try to start fetching data, if not yet started.
Mutates self to indicate that iteration has started.
"""
if self._query_job is None:
raise exceptions.InterfaceError(
"No query results: execute() must be called before fetch."
)
if self._query_job.dry_run:
self._query_data = iter([])
return
if self._query_data is None:
bqstorage_client = self.connection._bqstorage_client
if bqstorage_client is not None:
rows_iterable = self._bqstorage_fetch(bqstorage_client)
self._query_data = _helpers.to_bq_table_rows(rows_iterable)
return
rows_iter = self._query_job.result(page_size=self.arraysize)
self._query_data = iter(rows_iter)
def _bqstorage_fetch(self, bqstorage_client):
"""Start fetching data with the BigQuery Storage API.
The method assumes that the data about the relevant query job already
exists internally.
Args:
bqstorage_client(\
google.cloud.bigquery_storage_v1.BigQueryReadClient \
):
A client tha know how to talk to the BigQuery Storage API.
Returns:
Iterable[Mapping]:
A sequence of rows, represented as dictionaries.
"""
# Hitting this code path with a BQ Storage client instance implies that
# bigquery_storage can indeed be imported here without errors.
from google.cloud import bigquery_storage
table_reference = self._query_job.destination
requested_session = bigquery_storage.types.ReadSession(
table=table_reference.to_bqstorage(),
data_format=bigquery_storage.types.DataFormat.ARROW,
)
if _ARROW_COMPRESSION_SUPPORT:
requested_session.read_options.arrow_serialization_options.buffer_compression = (
ArrowSerializationOptions.CompressionCodec.LZ4_FRAME
)
read_session = bqstorage_client.create_read_session(
parent="projects/{}".format(table_reference.project),
read_session=requested_session,
# a single stream only, as DB API is not well-suited for multithreading
max_stream_count=1,
)
if not read_session.streams:
return iter([]) # empty table, nothing to read
stream_name = read_session.streams[0].name
read_rows_stream = bqstorage_client.read_rows(stream_name)
rows_iterable = read_rows_stream.rows(read_session)
return rows_iterable
def fetchone(self):
"""Fetch a single row from the results of the last ``execute*()`` call.
.. note::
If a dry run query was executed, no rows are returned.
Returns:
Tuple:
A tuple representing a row or ``None`` if no more data is
available.
Raises:
google.cloud.bigquery.dbapi.InterfaceError: if called before ``execute()``.
"""
self._try_fetch()
try:
return next(self._query_data)
except StopIteration:
return None
def fetchmany(self, size=None):
"""Fetch multiple results from the last ``execute*()`` call.
.. note::
If a dry run query was executed, no rows are returned.
.. note::
The size parameter is not used for the request/response size.
Set the ``arraysize`` attribute before calling ``execute()`` to
set the batch size.
Args:
size (int):
(Optional) Maximum number of rows to return. Defaults to the
``arraysize`` property value. If ``arraysize`` is not set, it
defaults to ``1``.
Returns:
List[Tuple]: A list of rows.
Raises:
google.cloud.bigquery.dbapi.InterfaceError: if called before ``execute()``.
"""
if size is None:
# Since self.arraysize can be None (a deviation from PEP 249),
# use an actual PEP 249 default of 1 in such case (*some* number
# is needed here).
size = self.arraysize if self.arraysize else 1
self._try_fetch(size=size)
rows = []
for row in self._query_data:
rows.append(row)
if len(rows) >= size:
break
return rows
def fetchall(self):
"""Fetch all remaining results from the last ``execute*()`` call.
.. note::
If a dry run query was executed, no rows are returned.
Returns:
List[Tuple]: A list of all the rows in the results.
Raises:
google.cloud.bigquery.dbapi.InterfaceError: if called before ``execute()``.
"""
self._try_fetch()
return list(self._query_data)
def setinputsizes(self, sizes):
"""No-op, but for consistency raise an error if cursor is closed."""
def setoutputsize(self, size, column=None):
"""No-op, but for consistency raise an error if cursor is closed."""
def __iter__(self):
self._try_fetch()
return iter(self._query_data)
def _format_operation_list(operation, parameters):
"""Formats parameters in operation in the way BigQuery expects.
The input operation will be a query like ``SELECT %s`` and the output
will be a query like ``SELECT ?``.
Args:
operation (str): A Google BigQuery query string.
parameters (Sequence[Any]): Sequence of parameter values.
Returns:
str: A formatted query string.
Raises:
google.cloud.bigquery.dbapi.ProgrammingError:
if a parameter used in the operation is not found in the
``parameters`` argument.
"""
formatted_params = ["?" for _ in parameters]
try:
return operation % tuple(formatted_params)
except (TypeError, ValueError) as exc:
raise exceptions.ProgrammingError(exc)
def _format_operation_dict(operation, parameters):
"""Formats parameters in operation in the way BigQuery expects.
The input operation will be a query like ``SELECT %(namedparam)s`` and
the output will be a query like ``SELECT @namedparam``.
Args:
operation (str): A Google BigQuery query string.
parameters (Mapping[str, Any]): Dictionary of parameter values.
Returns:
str: A formatted query string.
Raises:
google.cloud.bigquery.dbapi.ProgrammingError:
if a parameter used in the operation is not found in the
``parameters`` argument.
"""
formatted_params = {}
for name in parameters:
escaped_name = name.replace("`", r"\`")
formatted_params[name] = "@`{}`".format(escaped_name)
try:
return operation % formatted_params
except (KeyError, ValueError, TypeError) as exc:
raise exceptions.ProgrammingError(exc)
def _format_operation(operation, parameters):
"""Formats parameters in operation in way BigQuery expects.
Args:
operation (str): A Google BigQuery query string.
parameters (Union[Mapping[str, Any], Sequence[Any]]):
Optional parameter values.
Returns:
str: A formatted query string.
Raises:
google.cloud.bigquery.dbapi.ProgrammingError:
if a parameter used in the operation is not found in the
``parameters`` argument.
"""
if parameters is None or len(parameters) == 0:
return operation.replace("%%", "%"), None # Still do percent de-escaping.
operation, parameter_types = _extract_types(operation)
if parameter_types is None:
raise exceptions.ProgrammingError(
f"Parameters were provided, but {repr(operation)} has no placeholders."
)
if isinstance(parameters, collections_abc.Mapping):
return _format_operation_dict(operation, parameters), parameter_types
return _format_operation_list(operation, parameters), parameter_types
def _extract_types(
operation, extra_type_sub=re.compile(r"(%*)%(?:\(([^:)]*)(?::(\w+))?\))?s").sub
):
"""Remove type information from parameter placeholders.
For every parameter of the form %(name:type)s, replace with %(name)s and add the
item name->type to dict that's returned.
Returns operation without type information and a dictionary of names and types.
"""
parameter_types = None
def repl(m):
nonlocal parameter_types
prefix, name, type_ = m.groups()
if len(prefix) % 2:
# The prefix has an odd number of %s, the last of which
# escapes the % we're looking for, so we don't want to
# change anything.
return m.group(0)
try:
if name:
if not parameter_types:
parameter_types = {}
if type_:
if name in parameter_types:
if type_ != parameter_types[name]:
raise exceptions.ProgrammingError(
f"Conflicting types for {name}: "
f"{parameter_types[name]} and {type_}."
)
else:
parameter_types[name] = type_
else:
if not isinstance(parameter_types, dict):
raise TypeError()
return f"{prefix}%({name})s"
else:
if parameter_types is None:
parameter_types = []
parameter_types.append(type_)
return f"{prefix}%s"
except (AttributeError, TypeError):
raise exceptions.ProgrammingError(
f"{repr(operation)} mixes named and unamed parameters."
)
return extra_type_sub(repl, operation), parameter_types |
A `~datetime.datetime` parameter without timezone information uses |
timerQueueProcessorBase.go | // Copyright (c) 2017 Uber Technologies, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package history
import (
ctx "context"
"sync"
"sync/atomic"
"time"
"github.com/uber/cadence/common"
"github.com/uber/cadence/common/backoff"
"github.com/uber/cadence/common/clock"
"github.com/uber/cadence/common/log"
"github.com/uber/cadence/common/log/tag"
"github.com/uber/cadence/common/metrics"
"github.com/uber/cadence/common/persistence"
"github.com/uber/cadence/common/quotas"
"github.com/uber/cadence/common/service/dynamicconfig"
"github.com/uber/cadence/service/history/config"
"github.com/uber/cadence/service/history/queue"
"github.com/uber/cadence/service/history/shard"
"github.com/uber/cadence/service/history/task"
)
var (
emptyTime = time.Time{}
loadDomainEntryForTimerTaskRetryDelay = 100 * time.Millisecond
)
type (
timerQueueProcessorBase struct {
scope int
shard shard.Context
status int32
shutdownWG sync.WaitGroup
shutdownCh chan struct{}
config *config.Config
logger log.Logger
metricsClient metrics.Client
metricsScope metrics.Scope
timerFiredCount uint64
timerProcessor timerProcessor
timerQueueAckMgr *timerQueueAckMgrImpl
timerGate queue.TimerGate
timeSource clock.TimeSource
rateLimiter quotas.Limiter
lastPollTime time.Time
taskProcessor *taskProcessor // TODO: deprecate task processor, in favor of queueTaskProcessor
queueTaskProcessor task.Processor
redispatcher task.Redispatcher
queueTaskInitializer task.Initializer
// timer notification
newTimerCh chan struct{}
newTimeLock sync.Mutex
newTime time.Time
}
)
func newTimerQueueProcessorBase(
scope int,
shard shard.Context,
historyService *historyEngineImpl,
timerProcessor timerProcessor,
queueTaskProcessor task.Processor,
timerQueueAckMgr *timerQueueAckMgrImpl,
taskFilter task.Filter,
taskExecutor task.Executor,
timerGate queue.TimerGate,
maxPollRPS dynamicconfig.IntPropertyFn,
logger log.Logger,
metricsScope metrics.Scope,
) *timerQueueProcessorBase {
logger = logger.WithTags(tag.ComponentTimerQueue)
config := shard.GetConfig()
var taskProcessor *taskProcessor
if queueTaskProcessor == nil || !config.TimerProcessorEnablePriorityTaskProcessor() {
options := taskProcessorOptions{
workerCount: config.TimerTaskWorkerCount(),
queueSize: config.TimerTaskWorkerCount() * config.TimerTaskBatchSize(),
}
taskProcessor = newTaskProcessor(options, shard, historyService.executionCache, logger)
}
queueType := task.QueueTypeActiveTimer
redispatcherOptions := &task.RedispatcherOptions{
TaskRedispatchInterval: config.ActiveTaskRedispatchInterval,
TaskRedispatchIntervalJitterCoefficient: config.TaskRedispatchIntervalJitterCoefficient,
}
if scope == metrics.TimerStandbyQueueProcessorScope {
queueType = task.QueueTypeStandbyTimer
redispatcherOptions.TaskRedispatchInterval = config.StandbyTaskRedispatchInterval
}
base := &timerQueueProcessorBase{
scope: scope,
shard: shard,
timerProcessor: timerProcessor,
status: common.DaemonStatusInitialized,
shutdownCh: make(chan struct{}),
config: config,
logger: logger,
metricsClient: shard.GetMetricsClient(),
metricsScope: metricsScope,
timerQueueAckMgr: timerQueueAckMgr,
timerGate: timerGate,
timeSource: shard.GetTimeSource(),
newTimerCh: make(chan struct{}, 1),
lastPollTime: time.Time{},
taskProcessor: taskProcessor,
queueTaskProcessor: queueTaskProcessor,
redispatcher: task.NewRedispatcher(
queueTaskProcessor,
redispatcherOptions,
logger,
metricsScope,
),
rateLimiter: quotas.NewDynamicRateLimiter(
func() float64 {
return float64(maxPollRPS())
},
),
}
// read dynamic config only once on startup to avoid gc pressure caused by keeping reading dynamic config
emitDomainTag := config.QueueProcessorEnableDomainTaggedMetrics()
base.queueTaskInitializer = func(taskInfo task.Info) task.Task {
return task.NewTimerTask(
shard,
taskInfo,
queueType,
task.InitializeLoggerForTask(shard.GetShardID(), taskInfo, logger),
taskFilter,
taskExecutor,
base.redispatcher.AddTask,
shard.GetTimeSource(),
config.TimerTaskMaxRetryCount,
emitDomainTag,
timerQueueAckMgr,
)
}
return base
}
func (t *timerQueueProcessorBase) Start() {
if !atomic.CompareAndSwapInt32(&t.status, common.DaemonStatusInitialized, common.DaemonStatusStarted) {
return
}
if t.taskProcessor != nil {
t.taskProcessor.start()
}
t.redispatcher.Start()
// notify a initial scan
t.notifyNewTimer(time.Time{})
t.shutdownWG.Add(1)
go t.processorPump()
t.logger.Info("Timer queue processor started.")
}
func (t *timerQueueProcessorBase) Stop() {
if !atomic.CompareAndSwapInt32(&t.status, common.DaemonStatusStarted, common.DaemonStatusStopped) {
return
}
t.timerGate.Close()
close(t.shutdownCh)
t.retryTasks()
if success := common.AwaitWaitGroup(&t.shutdownWG, time.Minute); !success {
t.logger.Warn("Timer queue processor timedout on shutdown.")
}
if t.taskProcessor != nil {
t.taskProcessor.stop()
}
t.redispatcher.Stop()
t.logger.Info("Timer queue processor stopped.")
}
func (t *timerQueueProcessorBase) processorPump() {
defer t.shutdownWG.Done()
RetryProcessor:
for {
select {
case <-t.shutdownCh:
break RetryProcessor
default:
err := t.internalProcessor()
if err != nil |
}
}
t.logger.Info("Timer queue processor pump shutting down.")
}
// NotifyNewTimers - Notify the processor about the new timer events arrival.
// This should be called each time new timer events arrives, otherwise timers maybe fired unexpected.
func (t *timerQueueProcessorBase) notifyNewTimers(
timerTasks []persistence.Task,
) {
if len(timerTasks) == 0 {
return
}
isActive := t.scope == metrics.TimerActiveQueueProcessorScope
newTime := timerTasks[0].GetVisibilityTimestamp()
for _, timerTask := range timerTasks {
ts := timerTask.GetVisibilityTimestamp()
if ts.Before(newTime) {
newTime = ts
}
scopeIdx := task.GetTimerTaskMetricScope(timerTask.GetType(), isActive)
t.metricsClient.IncCounter(scopeIdx, metrics.NewTimerCounter)
}
t.notifyNewTimer(newTime)
}
func (t *timerQueueProcessorBase) notifyNewTimer(
newTime time.Time,
) {
t.newTimeLock.Lock()
defer t.newTimeLock.Unlock()
if t.newTime.IsZero() || newTime.Before(t.newTime) {
t.newTime = newTime
select {
case t.newTimerCh <- struct{}{}:
// Notified about new time.
default:
// Channel "full" -> drop and move on, this will happen only if service is in high load.
}
}
}
func (t *timerQueueProcessorBase) internalProcessor() error {
pollTimer := time.NewTimer(backoff.JitDuration(
t.config.TimerProcessorMaxPollInterval(),
t.config.TimerProcessorMaxPollIntervalJitterCoefficient(),
))
defer pollTimer.Stop()
updateAckTimer := time.NewTimer(backoff.JitDuration(
t.config.TimerProcessorUpdateAckInterval(),
t.config.TimerProcessorUpdateAckIntervalJitterCoefficient(),
))
defer updateAckTimer.Stop()
for {
// Wait until one of four things occurs:
// 1. we get notified of a new message
// 2. the timer gate fires (message scheduled to be delivered)
// 3. shutdown was triggered.
// 4. updating ack level
//
select {
case <-t.shutdownCh:
t.logger.Debug("Timer queue processor pump shutting down.")
return nil
case <-t.timerQueueAckMgr.getFinishedChan():
// timer queue ack manager indicate that all task scanned
// are finished and no more tasks
// use a separate goroutine since the caller hold the shutdownWG
go t.Stop()
return nil
case <-t.timerGate.FireChan():
maxRedispatchQueueSize := t.config.TimerProcessorMaxRedispatchQueueSize()
if !t.isPriorityTaskProcessorEnabled() || t.redispatcher.Size() <= maxRedispatchQueueSize {
lookAheadTimer, err := t.readAndFanoutTimerTasks()
if err != nil {
return err
}
if lookAheadTimer != nil {
t.timerGate.Update(lookAheadTimer.VisibilityTimestamp)
}
continue
}
// has too many pending tasks in re-dispatch queue, block loading tasks from persistence
t.redispatcher.Redispatch(maxRedispatchQueueSize)
if t.redispatcher.Size() > maxRedispatchQueueSize {
// if redispatcher still has a large number of tasks
// this only happens when system is under very high load
// we should backoff here instead of keeping submitting tasks to task processor
// don't call t.notifyNewTime(time.Now() + loadQueueTaskThrottleRetryDelay) as the time in
// standby timer processor is not real time and is managed separately
time.Sleep(loadQueueTaskThrottleRetryDelay)
}
// re-enqueue the event to see if we need keep re-dispatching or load new tasks from persistence
t.notifyNewTimer(time.Time{})
case <-pollTimer.C:
pollTimer.Reset(backoff.JitDuration(
t.config.TimerProcessorMaxPollInterval(),
t.config.TimerProcessorMaxPollIntervalJitterCoefficient(),
))
if t.lastPollTime.Add(t.config.TimerProcessorMaxPollInterval()).Before(t.timeSource.Now()) {
lookAheadTimer, err := t.readAndFanoutTimerTasks()
if err != nil {
return err
}
if lookAheadTimer != nil {
t.timerGate.Update(lookAheadTimer.VisibilityTimestamp)
}
}
case <-updateAckTimer.C:
updateAckTimer.Reset(backoff.JitDuration(
t.config.TimerProcessorUpdateAckInterval(),
t.config.TimerProcessorUpdateAckIntervalJitterCoefficient(),
))
if err := t.timerQueueAckMgr.updateAckLevel(); err == shard.ErrShardClosed {
// shard is closed, shutdown timerQProcessor and bail out
go t.Stop()
return err
}
case <-t.newTimerCh:
t.newTimeLock.Lock()
newTime := t.newTime
t.newTime = emptyTime
t.newTimeLock.Unlock()
// New Timer has arrived.
t.metricsScope.IncCounter(metrics.NewTimerNotifyCounter)
t.timerGate.Update(newTime)
}
}
}
func (t *timerQueueProcessorBase) readAndFanoutTimerTasks() (*persistence.TimerTaskInfo, error) {
ctx, cancel := ctx.WithTimeout(ctx.Background(), loadQueueTaskThrottleRetryDelay)
if err := t.rateLimiter.Wait(ctx); err != nil {
cancel()
t.notifyNewTimer(time.Time{}) // re-enqueue the event
return nil, nil
}
cancel()
t.lastPollTime = t.timeSource.Now()
timerTasks, lookAheadTask, moreTasks, err := t.timerQueueAckMgr.readTimerTasks()
if err != nil {
t.notifyNewTimer(time.Time{}) // re-enqueue the event
return nil, err
}
taskStartTime := t.timeSource.Now()
for _, task := range timerTasks {
if submitted := t.submitTask(task, taskStartTime); !submitted {
// not submitted due to shard shutdown
return nil, nil
}
}
if !moreTasks {
return lookAheadTask, nil
}
t.notifyNewTimer(time.Time{}) // re-enqueue the event
return nil, nil
}
func (t *timerQueueProcessorBase) submitTask(
taskInfo task.Info,
taskStartTime time.Time,
) bool {
if !t.isPriorityTaskProcessorEnabled() {
return t.taskProcessor.addTask(
newTaskInfo(
t.timerProcessor,
taskInfo,
task.InitializeLoggerForTask(t.shard.GetShardID(), taskInfo, t.logger),
taskStartTime,
),
)
}
timerQueueTask := t.queueTaskInitializer(taskInfo)
submitted, err := t.queueTaskProcessor.TrySubmit(timerQueueTask)
if err != nil {
select {
case <-t.shutdownCh:
// if error is due to shard shutdown
return false
default:
// otherwise it might be error from domain cache etc, add
// the task to redispatch queue so that it can be retried
t.logger.Error("Failed to submit task", tag.Error(err))
}
}
if err != nil || !submitted {
t.redispatcher.AddTask(timerQueueTask)
}
return true
}
func (t *timerQueueProcessorBase) retryTasks() {
if t.taskProcessor != nil {
t.taskProcessor.retryTasks()
}
}
func (t *timerQueueProcessorBase) complete(
timerTask *persistence.TimerTaskInfo,
) {
t.timerQueueAckMgr.completeTimerTask(timerTask)
atomic.AddUint64(&t.timerFiredCount, 1)
}
func (t *timerQueueProcessorBase) isPriorityTaskProcessorEnabled() bool {
return t.taskProcessor == nil
}
func (t *timerQueueProcessorBase) getTimerFiredCount() uint64 {
return atomic.LoadUint64(&t.timerFiredCount)
}
| {
t.logger.Error("processor pump failed with error", tag.Error(err))
} |
k12_simplified.rs | // Implementation of K12, based on the reference implementation:
// https://github.com/gvanas/KeccakCodePackage/blob/master/Standalone/k12s-reference/K12.py
//
// Some optimisations copied from https://github.com/RustCrypto/hashes/tree/master/sha3/src
//
// To the extent possible under law, the implementer has waived all copyright
// and related or neighboring rights to the source code in this file.
// http://creativecommons.org/publicdomain/zero/1.0/
use std::cmp::min;
#[macro_use]
mod macros {
/// Copied from `arrayref` crate
macro_rules! array_ref {
($arr:expr, $offset:expr, $len:expr) => {{
{
#[inline]
unsafe fn as_array<T>(slice: &[T]) -> &[T; $len] { | unsafe {
as_array(slice)
}
}
}}
}
macro_rules! REPEAT4 {
($e: expr) => ( $e; $e; $e; $e; )
}
macro_rules! REPEAT5 {
($e: expr) => ( $e; $e; $e; $e; $e; )
}
macro_rules! REPEAT6 {
($e: expr) => ( $e; $e; $e; $e; $e; $e; )
}
macro_rules! REPEAT24 {
($e: expr, $s: expr) => (
REPEAT6!({ $e; $s; });
REPEAT6!({ $e; $s; });
REPEAT6!({ $e; $s; });
REPEAT5!({ $e; $s; });
$e;
)
}
macro_rules! FOR5 {
($v: expr, $s: expr, $e: expr) => {
$v = 0;
REPEAT4!({
$e;
$v += $s;
});
$e;
}
}
}
mod lanes {
pub const RC: [u64; 12] = [
0x000000008000808b,
0x800000000000008b,
0x8000000000008089,
0x8000000000008003,
0x8000000000008002,
0x8000000000000080,
0x000000000000800a,
0x800000008000000a,
0x8000000080008081,
0x8000000000008080,
0x0000000080000001,
0x8000000080008008,
];
// (0..24).map(|t| ((t+1)*(t+2)/2) % 64)
pub const RHO: [u32; 24] = [
1, 3, 6, 10, 15, 21,28, 36, 45, 55, 2, 14, 27,
41, 56, 8, 25, 43, 62, 18, 39, 61, 20, 44
];
pub const PI: [usize; 24] = [
10, 7, 11, 17, 18, 3, 5, 16, 8, 21, 24, 4, 15, 23,
19, 13, 12, 2, 20, 14, 22, 9, 6, 1
];
pub fn keccak(lanes: &mut [u64; 25]) {
let mut c = [0u64; 5];
let (mut x, mut y): (usize, usize);
for round in 0..12 {
// θ
FOR5!(x, 1, {
c[x] = lanes[x] ^ lanes[x+5] ^ lanes[x+10] ^ lanes[x+15] ^ lanes[x+20];
});
FOR5!(x, 1, {
FOR5!(y, 5, {
lanes[x + y] ^= c[(x+4)%5] ^ c[(x+1)%5].rotate_left(1);
});
});
// ρ and π
let mut a = lanes[1];
x = 0;
REPEAT24!({
c[0] = lanes[PI[x]];
lanes[PI[x]] = a.rotate_left(RHO[x]);
}, {
a = c[0];
x += 1;
});
// χ
FOR5!(y, 5, {
FOR5!(x, 1, {
c[x] = lanes[x + y];
});
FOR5!(x, 1, {
lanes[x + y] = c[x] ^((!c[(x+1) % 5]) & c[(x+2)%5]);
});
});
// ι
lanes[0] ^= RC[round];
}
}
}
fn read_u64(bytes: &[u8; 8]) -> u64 {
unsafe{ *(bytes as *const [u8; 8] as *const u64) }.to_le()
}
fn write_u64(val: u64) -> [u8; 8] {
unsafe{ *(&val.to_le() as *const u64 as *const [u8; 8]) }
}
fn keccak(state: &mut [u8; 200]) {
let mut lanes = [0u64; 25];
let mut y;
for x in 0..5 {
FOR5!(y, 5, {
lanes[x + y] = read_u64(array_ref!(state, 8*(x+y), 8));
});
}
lanes::keccak(&mut lanes);
for x in 0..5 {
FOR5!(y, 5, {
let i = 8*(x+y);
state[i..i+8].copy_from_slice(&write_u64(lanes[x + y]));
});
}
}
pub fn k12s<T: AsRef<[u8]>, O: AsMut<[u8]>+Default>(input: T) -> O {
let input = input.as_ref();
let mut state = [0u8; 200];
let max_block_size = 1344 / 8; // r, also known as rate in bytes
// === Absorb all the input blocks ===
// We unroll first loop, which allows simple copy
let mut block_size = min(input.len(), max_block_size);
state[0..block_size].copy_from_slice(&input[0..block_size]);
let mut offset = block_size;
while offset < input.len() {
keccak(&mut state);
block_size = min(input.len() - offset, max_block_size);
for i in 0..block_size {
// TODO: is this sufficiently optimisable or better to convert to u64 first?
state[i] ^= input[i+offset];
}
offset += block_size;
}
// === Do the padding and switch to the squeezing phase ===
state[block_size] ^= 0x07;
state[max_block_size-1] ^= 0x80;
keccak(&mut state);
// === Squeeze out all the output blocks ===
let mut output = O::default();
{
offset = 0;
let output_ref = output.as_mut();
let mut output_len = output_ref.len();
loop {
block_size = min(output_len, max_block_size);
output_ref[offset..(offset+block_size)].copy_from_slice(&state[0..block_size]);
output_len -= block_size;
offset += block_size;
if output_len == 0 {
break;
}
keccak(&mut state);
}
}
output
} | &*(slice.as_ptr() as *const [_; $len])
}
let offset = $offset;
let slice = & $arr[offset..offset + $len]; |
price_provider_coingecko.rs | /*
Cryptmon
Copyright 2022 Peter Pearson.
Licensed under the Apache License, Version 2.0 (the "License");
You may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
---------
*/
use serde::{Deserialize, Serialize};
use std::collections::BTreeMap;
use crate::price_provider::{PriceProvider, PriceProviderParams, ConfigDetails, GetDataError, CoinPriceItem, Watermarks};
// for results back from CoinGecko's API regarding the list of coins and their IDs
//
// Note: this is public because CoinGecko's API is fast and ideal for this (minimal data), whereas
// some of the other providers' (i.e. CryptoCompare) return huge amounts of data
// in their query for the same, so it can take ages to get the results back,
// so we'll re-use this functionality from CoinGecko within other providers just
// for the coin list
#[derive(Serialize, Deserialize)]
#[derive(Clone, Debug)]
pub struct CoinListResultItem {
pub id: String,
pub symbol: String,
pub name: String,
}
// for results back from CoinGecko's API regarding the list of prices
#[derive(Serialize, Deserialize)]
#[derive(Clone, Debug)]
struct CoinMarketPriceItem {
id: String,
symbol: String,
name: String,
current_price: f64,
high_24h: f64,
low_24h: f64,
price_change_24h: f64,
price_change_percentage_24h: f64,
}
pub struct ProviderCoinGecko {
params: PriceProviderParams,
// list of actual id values to use for the request for prices of the
// coins that we actually want (i.e. a subset of the full list)
ids_wanted: Vec<String>,
currency_val: String,
// TODO: we could cache this and only update it every few days rather than every startup?
full_coin_list: Vec<CoinListResultItem>,
}
impl ProviderCoinGecko {
// TODO: maybe this could be made generic with dyn and put somewhere shared to reduce duplication per-provider?
pub fn new_from_config(params: &PriceProviderParams) -> Option<(ProviderCoinGecko, ConfigDetails)> {
let mut provider = ProviderCoinGecko { params: params.clone(),
ids_wanted: Vec::with_capacity(0),
currency_val: String::new(), full_coin_list: Vec::with_capacity(0) };
let config_details = provider.configure(params)?;
return Some((provider, config_details));
}
// This is public so other providers can use it in isolation
// TODO: Use Result for error handling...
pub fn get_minimal_coin_list() -> Option<Vec<CoinListResultItem>> {
let coin_list_request = ureq::get(&"https://api.coingecko.com/api/v3/coins/list".to_string());
let coin_list_resp = coin_list_request.call();
if coin_list_resp.is_err() {
eprintln!("Error calling https://api.coingecko.com/api/v3/coins/list {:?}", coin_list_resp.err());
return None;
}
let coin_list_resp = coin_list_resp.unwrap().into_string().unwrap();
let full_coin_list: Vec<CoinListResultItem> = serde_json::from_str(&coin_list_resp).unwrap();
return Some(full_coin_list);
}
}
impl PriceProvider for ProviderCoinGecko {
fn configure(&mut self, params: &PriceProviderParams) -> Option<ConfigDetails> {
// update this in a deferred way, so it can be updated lazily later, rather than
// just when being created...
self.params = params.clone();
let coin_list = ProviderCoinGecko::get_minimal_coin_list()?;
self.full_coin_list = coin_list;
// now work out the IDs of the coins we want, based off the symbol
let mut lookup = BTreeMap::new();
let mut index = 0usize;
for coin in &self.full_coin_list {
// filter out pegged values we don't want, due to symbol collisions..
// TODO: something smarter than this, but not sure how, given collisions...
// filter item symbols are in lowercase...
if let Some(val) = self.params.coin_name_ignore_items.get(&coin.symbol.to_ascii_lowercase()) {
if coin.name.contains(val) {
// skip this item
index += 1;
continue;
}
}
lookup.insert(coin.symbol.to_ascii_uppercase(), index);
index += 1;
}
for coin in &self.params.wanted_coin_symbols {
if let Some(index) = lookup.get(&coin.to_ascii_uppercase()) |
}
self.currency_val = params.fiat_currency.to_ascii_lowercase();
if self.currency_val.is_empty() {
eprintln!("Error: Fiat Currency value for CoinGecko provider was not specified. Using NZD instead...");
self.currency_val = "nzd".to_string();
}
return Some(ConfigDetails::new());
}
fn get_current_prices(&self) -> Result<Vec<CoinPriceItem>, GetDataError> {
if self.ids_wanted.is_empty() {
return Err(GetDataError::ConfigError("No currency symbols configured/requested".to_string()));
}
let ids_param = self.ids_wanted.join(",");
let request_url = format!("https://api.coingecko.com/api/v3/coins/markets?vs_currency={}&ids={}",
self.currency_val, ids_param);
let price_results = ureq::get(&request_url).call();
if price_results.is_err() {
return Err(GetDataError::CantConnect(format!("Error calling https://api.coingecko.com/api/v3/coins/markets: {:?}", price_results.err())));
}
// TODO: error handling!
let coin_price_resp = price_results.unwrap().into_string().unwrap();
let coin_price_results = serde_json::from_str::<Vec<CoinMarketPriceItem>>(&coin_price_resp);
if coin_price_results.is_err() {
return Err(GetDataError::ParseError(coin_price_results.err().unwrap().to_string()));
}
let coin_price_results = coin_price_results.unwrap();
if coin_price_results.is_empty() {
return Err(GetDataError::EmptyResults);
}
let mut results = Vec::with_capacity(coin_price_results.len());
for src_res in &coin_price_results {
let new_val = CoinPriceItem{ symbol: src_res.symbol.to_ascii_uppercase(), name: src_res.name.clone(),
current_price: src_res.current_price,
watermarks_24h: Some(Watermarks::new(src_res.low_24h, src_res.high_24h)),
price_change_24h: src_res.price_change_24h,
percent_change_1h: None,
percent_change_24h: src_res.price_change_percentage_24h };
results.push(new_val);
}
return Ok(results);
}
}
| {
let item = &self.full_coin_list[*index];
self.ids_wanted.push(item.id.clone());
} |
database.go | package main
import (
"database/sql"
"fmt"
"net"
"encoding/binary"
"time"
_ "github.com/go-sql-driver/mysql"
"errors"
)
type Database struct {
db *sql.DB
}
type AccountInfo struct {
username string
maxBots int
admin int
}
func NewDatabase(dbAddr string, dbUser string, dbPassword string, dbName string) *Database {
db, err := sql.Open("mysql", fmt.Sprintf("%s:%s@tcp(%s)/%s", dbUser, dbPassword, dbAddr, dbName))
if err != nil {
fmt.Println(err)
}
fmt.Println("\x1b[35m[ \x1b[1;32mJoker Started \x1b[35m]")
return &Database{db}
}
func (this *Database) TryLogin(username string, password string, ip net.Addr) (bool, AccountInfo) {
rows, err := this.db.Query("SELECT username, max_bots, admin FROM users WHERE username = ? AND password = ? AND (wrc = 0 OR (UNIX_TIMESTAMP() - last_paid < `intvl` * 24 * 60 * 60))", username, password)
t := time.Now()
strRemoteAddr := ip.String()
host, port, err := net.SplitHostPort(strRemoteAddr)
if err != nil {
fmt.Println(err)
fmt.Printf("Failed Login In :: %s :: %s :: %s :: %s\n", username, host, port, t.Format("20060102150405"))
this.db.Exec("INSERT INTO logins (username, action, ip) VALUES (?, ?, ?)", username, "Fail", host)
return false, AccountInfo{"", 0, 0}
}
defer rows.Close()
if !rows.Next() {
fmt.Printf("Failed Login In :: %s :: %s :: %s :: %s\n", username, host, port, t.Format("20060102150405"))
this.db.Exec("INSERT INTO logins (username, action, ip) VALUES (?, ?, ?)", username, "Fail", host)
return false, AccountInfo{"", 0, 0}
}
var accInfo AccountInfo
rows.Scan(&accInfo.username, &accInfo.maxBots, &accInfo.admin)
fmt.Printf("Logged In :: %s :: %s :: %s :: %s\n", accInfo.username, host, port, t.Format("20060102150405"))
this.db.Exec("INSERT INTO logins (username, action, ip) VALUES (?, ?, ?)", accInfo.username, "Login", host)
return true, accInfo
}
func (this *Database) CreateBasic(username string, password string, max_bots int, duration int, cooldown int) bool {
rows, err := this.db.Query("SELECT username FROM users WHERE username = ?", username)
if err != nil {
fmt.Println(err)
return false
}
if rows.Next() {
return false
}
this.db.Exec("INSERT INTO users (username, password, max_bots, admin, last_paid, cooldown, duration_limit) VALUES (?, ?, ?, 0, UNIX_TIMESTAMP(), ?, ?)", username, password, max_bots, cooldown, duration)
return true
}
func (this *Database) CreateAdmin(username string, password string, max_bots int, duration int, cooldown int) bool {
rows, err := this.db.Query("SELECT username FROM users WHERE username = ?", username)
if err != nil {
fmt.Println(err)
return false
}
if rows.Next() {
return false
}
this.db.Exec("INSERT INTO users (username, password, max_bots, admin, last_paid, cooldown, duration_limit) VALUES (?, ?, ?, 1, UNIX_TIMESTAMP(), ?, ?)", username, password, max_bots, cooldown, duration)
return true
}
func (this *Database) RemoveUser(username string) (bool) {
rows, err := this.db.Query("DELETE FROM `users` WHERE username = ?", username)
if err != nil {
fmt.Println(err)
return false
}
if rows.Next() {
return false
}
this.db.Exec("DELETE FROM `users` WHERE username = ?", username)
return true
}
func (this *Database) ContainsWhitelistedTargets(attack *Attack) bool {
rows, err := this.db.Query("SELECT prefix, netmask FROM whitelist")
if err != nil {
fmt.Println(err)
return false
}
defer rows.Close()
for rows.Next() {
var prefix string
var netmask uint8
rows.Scan(&prefix, &netmask)
// Parse prefix
ip := net.ParseIP(prefix)
ip = ip[12:]
iWhitelistPrefix := binary.BigEndian.Uint32(ip)
for aPNetworkOrder, aN := range attack.Targets {
rvBuf := make([]byte, 4)
binary.BigEndian.PutUint32(rvBuf, aPNetworkOrder)
iAttackPrefix := binary.BigEndian.Uint32(rvBuf)
if aN > netmask { // Whitelist is less specific than attack target
if netshift(iWhitelistPrefix, netmask) == netshift(iAttackPrefix, netmask) {
return true
}
} else if aN < netmask { // Attack target is less specific than whitelist
if (iAttackPrefix >> aN) == (iWhitelistPrefix >> aN) {
return true
}
} else { // Both target and whitelist have same prefix
if (iWhitelistPrefix == iAttackPrefix) {
return true
}
} | }
}
return false
}
func (this *Database) CanLaunchAttack(username string, duration uint32, fullCommand string, maxBots int, allowConcurrent int) (bool, error) {
rows, err := this.db.Query("SELECT id, duration_limit, admin, cooldown FROM users WHERE username = ?", username)
defer rows.Close()
if err != nil {
fmt.Println(err)
}
var userId, durationLimit, admin, cooldown uint32
if !rows.Next() {
return false, errors.New("Your access has been terminated")
}
rows.Scan(&userId, &durationLimit, &admin, &cooldown)
if durationLimit != 0 && duration > durationLimit {
return false, errors.New(fmt.Sprintf("You may not send attacks longer than %d seconds.", durationLimit))
}
rows.Close()
if admin == 0 {
rows, err = this.db.Query("SELECT time_sent, duration FROM history WHERE user_id = ? AND (time_sent + duration + ?) > UNIX_TIMESTAMP()", userId, cooldown)
if err != nil {
fmt.Println(err)
}
if rows.Next() {
var timeSent, historyDuration uint32
rows.Scan(&timeSent, &historyDuration)
return false, errors.New(fmt.Sprintf("Please wait %d seconds before sending another attack", (timeSent + historyDuration + cooldown) - uint32(time.Now().Unix())))
}
}
this.db.Exec("INSERT INTO history (user_id, time_sent, duration, command, max_bots) VALUES (?, UNIX_TIMESTAMP(), ?, ?, ?)", userId, duration, fullCommand, maxBots)
return true, nil
}
func (this *Database) CheckApiCode(apikey string) (bool, AccountInfo) {
rows, err := this.db.Query("SELECT username, max_bots, admin FROM users WHERE api_key = ?", apikey)
if err != nil {
fmt.Println(err)
return false, AccountInfo{"", 0, 0}
}
defer rows.Close()
if !rows.Next() {
return false, AccountInfo{"", 0, 0}
}
var accInfo AccountInfo
rows.Scan(&accInfo.username, &accInfo.maxBots, &accInfo.admin)
return true, accInfo
} | |
serverctx.go | // Copyright © 2021 Luther Systems, Ltd. All right reserved.
package grpclogging
import (
"context"
"sync"
"github.com/sirupsen/logrus"
)
// logMetadataCtxKey is a key to store logging data within context.
type logMetadataCtxKey struct{}
// ctxSetLogMetadata adds logging metadata to context.
func ctxSetLogMetadata(ctx context.Context, fields logrus.Fields) context.Context {
fieldMap := new(sync.Map)
for key, val := range fields {
fieldMap.Store(key, val)
}
return context.WithValue(ctx, logMetadataCtxKey{}, fieldMap)
}
// ctxGetLogMetadata retrieves logging metadata from context.
func ctxGetLogMetadata(ctx context.Context) *sync.Map {
val, _ := ctx.Value(logMetadataCtxKey{}).(*sync.Map)
return val
}
// GetLogrusFields returns stored logging metadata.
func GetLogrusFields(ctx context.Context) logrus.Fields {
fields := logrus.Fields{}
fieldMap := ctxGetLogMetadata(ctx)
if fieldMap == nil { | fieldMap.Range(func(key, val interface{}) bool {
if keyStr, ok := key.(string); ok {
fields[keyStr] = val
}
return true
})
return fields
}
// GetLogrusEntry returns stored logging metadata as a logrus Entry.
func GetLogrusEntry(ctx context.Context, base *logrus.Entry) *logrus.Entry {
fields := GetLogrusFields(ctx)
if fields != nil {
return base.WithFields(fields)
}
return base
}
// AddLogrusField adds a log field to the request context for later retrieval.
// This is intended to be used from a handler once `LogrusMethodInterceptor` has
// been used to initialize the context.
func AddLogrusField(ctx context.Context, key, value string) {
fieldMap := ctxGetLogMetadata(ctx)
if fieldMap == nil {
return
}
fieldMap.Store(key, value)
}
|
return fields
}
|
test_tcti.py | #!/usr/bin/python3 -u
# SPDX-License-Identifier: BSD-2
import unittest
from tpm2_pytss import *
from .TSS2_BaseTest import TSS2_EsapiTest
class TestTCTI(TSS2_EsapiTest):
def test_init(self):
self.assertEqual(self.tcti.version, 2)
self.assertGreater(self.tcti.magic, 0)
v1ctx = ffi.cast("TSS2_TCTI_CONTEXT_COMMON_V1 *", self.tcti._ctx)
v1ctx.version = 1
tcti = TCTI(self.tcti._ctx)
self.assertEqual(tcti.version, 1)
self.assertEqual(tcti._v2, None)
def test_transmit_receive(self):
|
def test_finalize(self):
tcti = TCTI(self.tcti._ctx)
tcti.finalize()
def test_cancel(self):
if getattr(self.tcti, "name", "") == "swtpm":
self.skipTest("cancel supported by swtpm")
startup = b"\x80\x01\x00\x00\x00\x0C\x00\x00\x01\x44\x00\x00"
self.tcti.transmit(startup)
self.tcti.cancel()
def test_get_poll_handles(self):
tcti_name = getattr(self.tcti, "name", "")
try:
handles = self.tcti.get_poll_handles()
except TSS2_Exception as e:
if e.rc != lib.TSS2_TCTI_RC_NOT_IMPLEMENTED:
raise e
else:
self.skipTest(f"get_poll_handles not supported by {tcti_name}")
def test_set_locality(self):
self.tcti.set_locality(TPMA_LOCALITY.TWO)
def test_make_sticky(self):
tcti_name = getattr(self.tcti, "name", "")
if tcti_name in ("swtpm", "mssim"):
self.skipTest(f"make_sticky not supported by {tcti_name}")
raise Exception(self.tcti.name)
self.tcti.make_sticky(0, 0)
tcti._v2 = None
with self.assertRaises(RuntimeError) as e:
self.tcti.make_sticky(0, 0)
self.assertEqual(str(e.exception), "unsupported by TCTI API version")
def test_tctildr(self):
self.assertIsInstance(self.tcti.name, str)
self.assertIsInstance(self.tcti.conf, str)
with self.assertRaises(TypeError):
TCTILdr(name=None, conf=1234)
with self.assertRaises(TypeError):
TCTILdr(name=1234, conf=None)
if __name__ == "__main__":
unittest.main()
| startup = b"\x80\x01\x00\x00\x00\x0C\x00\x00\x01\x44\x00\x00"
self.tcti.transmit(startup)
resp = self.tcti.receive()
self.assertEqual(resp, b"\x80\x01\x00\x00\x00\n\x00\x00\x01\x00") |
main.py | # PyTorch StudioGAN: https://github.com/POSTECH-CVLab/PyTorch-StudioGAN | # See license file or visit https://github.com/POSTECH-CVLab/PyTorch-StudioGAN for details
# src/main.py
import json
import os
import sys
import random
import warnings
from argparse import ArgumentParser
from utils.misc import *
from utils.make_hdf5 import make_hdf5
from utils.log import make_run_name
from loader import prepare_train_eval
import torch
from torch.backends import cudnn
import torch.multiprocessing as mp
RUN_NAME_FORMAT = (
"{framework}-"
"{phase}-"
"{timestamp}"
)
def main():
parser = ArgumentParser(add_help=False)
parser.add_argument('-c', '--config_path', type=str, default='./src/configs/CIFAR10/ContraGAN.json')
parser.add_argument('--checkpoint_folder', type=str, default=None)
parser.add_argument('-current', '--load_current', action='store_true', help='whether you load the current or best checkpoint')
parser.add_argument('--log_output_path', type=str, default=None)
parser.add_argument('-DDP', '--distributed_data_parallel', action='store_true')
parser.add_argument('-n', '--nodes', default=1, type=int, metavar='N')
parser.add_argument('-nr', '--nr', default=0, type=int, help='ranking within the nodes')
parser.add_argument('--seed', type=int, default=-1, help='seed for generating random numbers')
parser.add_argument('--num_workers', type=int, default=8, help='')
parser.add_argument('-sync_bn', '--synchronized_bn', action='store_true', help='whether turn on synchronized batchnorm')
parser.add_argument('-mpc', '--mixed_precision', action='store_true', help='whether turn on mixed precision training')
parser.add_argument('-LARS', '--LARS_optimizer', action='store_true', help='whether turn on LARS optimizer')
parser.add_argument('-rm_API', '--disable_debugging_API', action='store_true', help='whether disable pytorch autograd debugging mode')
parser.add_argument('--reduce_train_dataset', type=float, default=1.0, help='control the number of train dataset')
parser.add_argument('--truncated_factor', type=float, default=-1.0, help='factor for truncation trick')
parser.add_argument('-stat_otf', '--bn_stat_OnTheFly', action='store_true', help='when evaluating, use the statistics of a batch')
parser.add_argument('-std_stat', '--standing_statistics', action='store_true')
parser.add_argument('--standing_step', type=int, default=-1, help='# of steps for accumulation batchnorm')
parser.add_argument('--freeze_layers', type=int, default=-1, help='# of layers for freezing discriminator')
parser.add_argument('-l', '--load_all_data_in_memory', action='store_true')
parser.add_argument('-t', '--train', action='store_true')
parser.add_argument('-e', '--eval', action='store_true')
parser.add_argument('-s', '--save_images', action='store_true')
parser.add_argument('-iv', '--image_visualization', action='store_true', help='select whether conduct image visualization')
parser.add_argument('-knn', '--k_nearest_neighbor', action='store_true', help='select whether conduct k-nearest neighbor analysis')
parser.add_argument('-itp', '--interpolation', action='store_true', help='whether conduct interpolation analysis')
parser.add_argument('-fa', '--frequency_analysis', action='store_true', help='whether conduct frequency analysis')
parser.add_argument('-tsne', '--tsne_analysis', action='store_true', help='whether conduct tsne analysis')
parser.add_argument('--nrow', type=int, default=10, help='number of rows to plot image canvas')
parser.add_argument('--ncol', type=int, default=8, help='number of cols to plot image canvas')
parser.add_argument('--print_every', type=int, default=100, help='control log interval')
parser.add_argument('--save_every', type=int, default=2000, help='control evaluation and save interval')
parser.add_argument('--eval_type', type=str, default='test', help='[train/valid/test]')
from template_lib.v2.config_cfgnode import update_parser_defaults_from_yaml, global_cfg
update_parser_defaults_from_yaml(parser=parser)
args = parser.parse_args()
if not args.train and \
not args.eval and \
not args.save_images and \
not args.image_visualization and \
not args.k_nearest_neighbor and \
not args.interpolation and \
not args.frequency_analysis and \
not args.tsne_analysis:
parser.print_help(sys.stderr)
sys.exit(1)
if args.config_path is not None:
with open(args.config_path) as f:
model_configs = json.load(f)
train_configs = vars(args)
else:
raise NotImplementedError
hdf5_path_train = make_hdf5(model_configs['data_processing'], train_configs, mode="train") \
if train_configs['load_all_data_in_memory'] else None
if train_configs['seed'] == -1:
train_configs['seed'] = random.randint(1,4096)
cudnn.benchmark, cudnn.deterministic = True, False
else:
cudnn.benchmark, cudnn.deterministic = False, True
fix_all_seed(train_configs['seed'])
gpus_per_node, rank = torch.cuda.device_count(), torch.cuda.current_device()
world_size = gpus_per_node*train_configs['nodes']
if world_size == 1:
warnings.warn('You have chosen a specific GPU. This will completely disable data parallelism.')
run_name = make_run_name(RUN_NAME_FORMAT, framework=train_configs['config_path'].split('/')[-1][:-5], phase='train')
if train_configs['disable_debugging_API']: torch.autograd.set_detect_anomaly(False)
check_flags(train_configs, model_configs, world_size)
if train_configs['distributed_data_parallel'] and world_size > 1:
print("Train the models through DistributedDataParallel (DDP) mode.")
mp.spawn(prepare_train_eval, nprocs=gpus_per_node, args=(gpus_per_node, world_size, run_name,
train_configs, model_configs, hdf5_path_train))
else:
prepare_train_eval(rank, gpus_per_node, world_size, run_name, train_configs, model_configs, hdf5_path_train=hdf5_path_train)
if __name__ == '__main__':
main() | # The MIT License (MIT) |
regenerateFixtures.js | // @generated
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @fullSyntaxTransform
*/
'use strict';
var fs = require('fs');
var path = require('path');
var transformGraphQL = require('./transformGraphQL');
var readFixtures = require('./readFixtures');
var SRC_DIR = path.dirname(__dirname);
var FIXTURE_PATH = path.join(SRC_DIR, '__fixtures__');
var SCHEMA_PATH = path.join(SRC_DIR, '__tests__', './testschema.rfc.json');
function | (basename, text) {
fs.writeFileSync(path.join(FIXTURE_PATH, basename), text, 'utf8');
}
var transform = transformGraphQL.bind(null, SCHEMA_PATH);
function genFixtures() {
var fixtures = readFixtures(FIXTURE_PATH);
Object.keys(fixtures).forEach(function (filename) {
var fixture = fixtures[filename];
if (fixture.output !== undefined) {
// fixture for valid input, update the expected output
try {
var graphql = transform(fixture.input, filename);
writeFixture(filename, ['Input:', fixture.input, '', // newline
'Output:', graphql, '']. // newline
join('\n'));
console.log('Updated fixture `%s`.', filename);
} catch (e) {
console.error('Failed to transform fixture `%s`: %s: %s', filename, e.message, e.stack);
}
} // else: fixture for invalid code, nothing to update
});
}
genFixtures(); | writeFixture |
postgres-schema-state.js | /**
* @license
*
* Use of this source code is governed by an MIT-style license
*/
import { SchemaState } from './schema-state'
export class | extends SchemaState {}
| PostgresSchemaState |
STEPIK.py | # ЭКРАНИРОВАНИЕ
# a = "строка с \" кавычкой двойной и \' одинарной"
# Чтобы в строке появился символ обратной косой черты
# a = "Строка с обратным слешем \\"
# Перенос строки
# a = "Первая строка \nВторая строка
'''
Сделать перенос строки в Питоне можно и другим способом — объявить строку с помощью тройных кавычек.
То есть вы как бы сразу расставляете переносы строк прямо в программе, записывая значение переменной на разных строках.
Со строками, объявленными с помощью одинарных кавычек, так сделать не получится.
a = """Первая строка
Вторая строка
Третья строка"""
Помните, что если строка начинается с трёх кавычек, то и заканчиваться она должна тремя такими же кавычками.
'''
# a = '''Первая строка
# Вторая строка
# Третья строка'''
# print(a)
# ------------------------------------------
'''
result = print('Что вернет функция print?')
print(result)
Первой строкой эта программа выведет текст "Что вернет функция print?", а второй — слово None.
Специально для обозначения «ничего» в Питоне есть отдельный тип данных NoneType.
Переменные этого типа могут иметь только одно значение — None. None — это «никакое» или «неопределенное» значение.
В программе выше переменная result как раз и будет иметь значение None. Это одно из ключевых слов языка и,
если вы хотите присвоить None переменной, его надо писать как есть, без кавычек:
z = None
'''
# ---------------------------------------------------
# КВАДРАТНЫЙ КОРЕНЬ ИЗ ЧИСЛА
# print(9 ** 0.5) # Возвведение числа в степень 0.5 дает квадратный корень этого числа
# ----------------------------------------------------
# фОРМАТ ЕСТЕСТВЕННЫХ ЧИСЕЛ - через е-1
# print(5e-1) # это то же самое, что и 0.5. Число пять умножается на 10 в степени -1
# print(5 * 10**-1) # запись идентичная первой
#
# print(1234e-2) # это то же самое выражение, что и
# print(1234 * 10**-2)
#
# # Также можно использовать положительную степень
# print(1234e2) # аналогично (1234 * 10**2)
# print(1234 * 10**2) # 10**2 - 10 в квадрате
# -------------------------------------------------------
# #Помните, что любые арифметические операции выше по приоритету операций сравнения и логических операторов.
# -------------------------------------------------------
# Площадь треугольника:
# S = √p(p - a)(p - b)(p - c), где p = (a + b + c) / 2
# где (a + b + c) / 2
# Площадь прямоугольника:
# S = a · b
# Площадь круга:
# S = π r2
# Вычисление площади фигур
# s = input()
# if s == "треугольник":
# a = float(input())
# b = float(input())
# c = float(input())
# p = (a + b + c) / 2
# print((p*((p-a)*(p-b)*(p-c)))**0.5) # Вычислене площади треугольника по формуле Герона
# elif s == "прямоугольник":
# a = float(input())
# b = float(input())
# print(a * b)
# elif s == "круг":
# r = float(input())
# print(3.14 * r**2)
# ----------------------------------------------------
# Склонение существительных
# x = int(input())
#
# if x % 10 == 1 and x % 100 != 11:
# print(x, 'программист')
# # elif x % 10 == 2 and x % 20 != 12 or x % 10 == 3 and x % 20 != 13 or x % 10 == 4 and x % 20 != 14:
# elif (x % 10 >= 2) and (x % 10 <= 4) and (x % 100 < 10 or x % 100 > 20):
# print(x, 'программиста')
# else:
# print(x, 'программистов')
# --------------------------------------------------
# Счастливый билет. Сумма первых трех цифр должна быть равна сумме последних трех
# x = int(input())
# n1 = x % 10
# x = x // 10
# n2 = x % 10
# x = x // 10
# n3 = x % 10
# x = x // 10
# n4 = x % 10
# x = x // 10
# n5 = x % 10
# x = x // 10
# n6 = x % 10
# if n1 + n2 + n3 == n4 + n5 + n6:
# print('Счастливый')
# else:
# print("Обычный")
# ------------------------------------------------
# Выводим треугольник из звездочек
# n = int(input())
# i = 1
# while i <= n:
# print('*' * i)
# i += 1
#
# stars = '*'
# while len(stars) <= n:
# print(stars)
# stars += '*'
# ----------------------------------------------
# Вычисляем сумму числе на задаваемом отрезке от а до b
# a = int(input())
# b = int(input())
# i = a
# while i != b:
# i += 1
# a += i
#
# print(a)
# ----------------------------------------
# Суммируем вводимые числа и, после первого нуля, подсчитываем сумму этих чисел
# n = 1
# i = 0
# while n != 0:
# n = int(input())
# i += n
# print(i)
# -----------------------------------------------------
# Ищем наименьшее общее кратное двух чисел
# a = int(input())
# b = int(input())
# i = 1
#
# while i % a != 0 or i % b != 0:
# i = i + 1
# print(i)
# --------------------------------------------------
# i = 0
#
# while i < 5:
# a, b = input("Введите два любых числа через пробел").split() # split() разделяет пары чисел по пробелу между ними
# a = int(a)
# b = int(b)
# if(a == 0) and (b == 0): # Если обе введенных цифры равны 0
# break # досрочно завершаем цикл
# if(a == 0) or (b == 0):
# continue # код ниже не выполняется и переходим к следующей итерации
# # (в том случае, если по условию она должна быть), вывод произведения чисел и приращивание i не происходит.
# То есть, эту пару чисел игнорируем
# print(a * b)
# i += 1
# ---------------------------------------------
# Напишите программу, которая считывает целые числа с консоли по одному числу в строке.
#
# Для каждого введённого числа проверить:
# если число меньше 10, то пропускаем это число;
# если число больше 100, то прекращаем считывать числа;
# в остальных случаях вывести это число обратно на консоль в отдельной строке.
# while True:
# n = int(input())
# if n < 10:
# continue
# if n > 100:
# break
# else:
# print(n)
# -------------------------------------------------
# Квадрат из звездочек в цикле for
# a = int(input())
# for i in range(a):
# print('*' * a)
# Двойной цикл
# n = int(input())
# for i in range(n):
# for j in range(n): # внутренний цикл выводит звездочку n-раз и создает строку
# print('*', end=' ') # end - указываем, что будем использовать в качестве разделителя.
# # Вданном случае "пробел". Если end отсутствует, будет обычный перевод на новую строку
# print() # этот print делает новую строку без вывода на экран
# -----------------------------------------------------------
# Таблица умножения
# Напишите программу, на вход которой даются четыре числа aa, bb, cc и dd, каждое в своей строке.
# Программа должна вывести фрагмент таблицы умножения для всех чисел отрезка [a; b]
# на все числа отрезка [c;d].
#
# Числа a, b, c и d являются натуральными и не превосходят 10, a <= b, c <= d.
#
# Следуйте формату вывода из примера, для разделения элементов внутри строки используйте '\t' — символ табуляции.
# Заметьте, что левым столбцом и верхней строкой выводятся сами числа из заданных отрезков
# — заголовочные столбец и строка таблицы.
# a = int(input())
# b = int(input())
# c = int(input())
# d = int(input())
#
# print()
# for x in range(c, d + 1):
# print('\t', x, end='')
# print()
# for i in range(a, b + 1):
# print(i, end='\t')
# for n in range(c, d + 1):
# if n < 10:
# print('', n * i, end='\t')
# else:
# print(n * i, end='\t')
# print()
# ------------------------------------------
# Вариант 1 Вывести сумму всех нечетных числел от a до b (включая границы)
# a, b = input().split() # получвем первое и последнее значения диапазона в одной строке через пробел
# a = int(a) # переводим значения в цифоровой формат
# b = int(b)
# x = 0
# for i in range(a, b+1): # циклом проходимся по множеству от a до b
# if i % 2 == 1: # если значение нечетное
# x += i # складываем значения
# print(x) # выводим сумму
#---------------------------------------------
# Вариант 2 Вывести сумму всех нечетных числел от a до b (включая границы)
# a, b = input().split() # получвем первое и последнее значения диапазона в одной строке через пробел
# a = int(a) # переводим значения в цифоровой формат
# b = int(b)
# x = 0
# if a % 2 == 0: # если первое число четное
# a = a + 1 # увеличиваем его на 1 (берем ближайшее к нему нечетное число)
# for i in range(a, b+1, 2): # циклом проходимся по множеству от a до b с шагом 2 (через 1) по нечетным числам
# x += i # складываем значения
# print(x) # выводим сумму
#---------------------------------------------
# Вариант 3 Вывести сумму всех нечетных числел от a до b (включая границы)
# Отличается от предыдущего вводом данных
# a, b = (int(i) for i in input().split()) # В такой ситуации, когда нам нужно к последовательности объектов
# # применить одну и ту же ф-ю, мы применяем специальную конструкцию,
# # которая называется list comprehensive (комплексный список).
# # В начале указываем, какую ф-ю мы применяем int(), которую применям для каждого элемента последовательности
# # Выражение, генерирующее эту последвательность, записывается справа input().split().
# # Для каждого объета этой последовательности через цикл применяем ф-ю int()
# # Такую последовательность удобно применять, когда несколько чисел выводятся в одной строке | # a = a + 1 # увеличиваем его на 1 (берем ближайшее к нему нечетное число)
# for i in range(a, b+1, 2): # циклом проходимся по множеству от a до b с шагом 2 (через 1) по нечетным числам
# x += i # складываем значения
# print(x) # выводим сумму
#---------------------------------------------
'''
Напишите программу, которая считывает с клавиатуры два числа a и b,
считает и выводит на консоль среднее арифметическое всех чисел из отрезка [a; b], которые делятся на 3.
В приведенном ниже примере среднее арифметическое считается для чисел на отрезке [-5; 12].
Всего чисел, делящихся на 3, на этом отрезке 6: -3, 0, 3, 6, 9, 12. Их среднее арифметическое равно 4.5
На вход программе подаются интервалы, внутри которых всегда есть хотя бы одно число, которое делится на 3.
'''
# a, b = (int(i) for i in input().split())
# x = 0
# z = 0
# for i in range(a, b+1):
# if i % 3 == 0:
# x += i
# z += 1
# print(x / z)
#------------------------------------------ | # x = 0
# if a % 2 == 0: # если первое число четное |
galleria.flickr.js | /**
* Galleria Flickr Plugin 2012-04-04
* http://galleria.io
*
* Licensed under the MIT license
* https://raw.github.com/aino/galleria/master/LICENSE
*
*/
(function($) {
/*global jQuery, Galleria, window */
Galleria.requires(1.25, 'The Flickr Plugin requires Galleria version 1.2.5 or later.');
// The script path
var PATH = Galleria.utils.getScriptPath();
/**
@class
@constructor
@example var flickr = new Galleria.Flickr();
@author http://aino.se
@requires jQuery
@requires Galleria
@param {String} [api_key] Flickr API key to be used, defaults to the Galleria key
@returns Instance
*/
Galleria.Flickr = function( api_key ) {
this.api_key = api_key || '2a2ce06c15780ebeb0b706650fc890b2';
this.options = {
max: 30, // photos to return
imageSize: 'medium', // photo size ( thumb,small,medium,big,original )
thumbSize: 'thumb', // thumbnail size ( thumb,small,medium,big,original )
sort: 'interestingness-desc', // sort option ( date-posted-asc, date-posted-desc, date-taken-asc, date-taken-desc, interestingness-desc, interestingness-asc, relevance )
description: false, // set this to true to get description as caption
complete: function(){}, // callback to be called inside the Galleria.prototype.load
backlink: false // set this to true if you want to pass a link back to the original image
};
};
Galleria.Flickr.prototype = {
// bring back the constructor reference
constructor: Galleria.Flickr,
/**
Search for anything at Flickr
@param {String} phrase The string to search for
@param {Function} [callback] The callback to be called when the data is ready
@returns Instance
*/
search: function( phrase, callback ) {
return this._find({
text: phrase
}, callback );
},
/**
Search for anything at Flickr by tag
@param {String} tag The tag(s) to search for
@param {Function} [callback] The callback to be called when the data is ready
@returns Instance
*/
tags: function( tag, callback ) {
return this._find({
tags: tag
}, callback);
},
/**
Get a user's public photos
@param {String} username The username as shown in the URL to fetch
@param {Function} [callback] The callback to be called when the data is ready
@returns Instance
*/
user: function( username, callback ) {
return this._call({
method: 'flickr.urls.lookupUser',
url: 'flickr.com/photos/' + username
}, function( data ) {
this._find({
user_id: data.user.id,
method: 'flickr.people.getPublicPhotos'
}, callback);
});
},
/**
Get photos from a photoset by ID
@param {String|Number} photoset_id The photoset id to fetch
@param {Function} [callback] The callback to be called when the data is ready
@returns Instance
*/
set: function( photoset_id, callback ) {
return this._find({
photoset_id: photoset_id,
method: 'flickr.photosets.getPhotos'
}, callback);
},
/**
Get photos from a gallery by ID
@param {String|Number} gallery_id The gallery id to fetch
@param {Function} [callback] The callback to be called when the data is ready
@returns Instance
*/
gallery: function( gallery_id, callback ) {
return this._find({
gallery_id: gallery_id,
method: 'flickr.galleries.getPhotos'
}, callback);
},
/**
Search groups and fetch photos from the first group found
Useful if you know the exact name of a group and want to show the groups photos.
@param {String} group The group name to search for
@param {Function} [callback] The callback to be called when the data is ready
@returns Instance
*/
groupsearch: function( group, callback ) {
return this._call({
text: group,
method: 'flickr.groups.search'
}, function( data ) {
this.group( data.groups.group[0].nsid, callback );
});
},
/**
Get photos from a group by ID
@param {String} group_id The group id to fetch
@param {Function} [callback] The callback to be called when the data is ready
@returns Instance
*/
group: function ( group_id, callback ) {
return this._find({
group_id: group_id,
method: 'flickr.groups.pools.getPhotos'
}, callback );
},
/**
Set flickr options
@param {Object} options The options object to blend
@returns Instance
*/
setOptions: function( options ) {
$.extend(this.options, options);
return this;
},
// call Flickr and raise errors
_call: function( params, callback ) {
var url = 'http://api.flickr.com/services/rest/?';
var scope = this;
params = $.extend({
format : 'json',
jsoncallback : '?',
api_key: this.api_key
}, params );
$.each(params, function( key, value ) {
url += '&' + key + '=' + value;
});
$.getJSON(url, function(data) {
if ( data.stat === 'ok' ) {
callback.call(scope, data);
} else {
Galleria.raise( data.code.toString() + ' ' + data.stat + ': ' + data.message, true );
}
});
return scope;
},
// "hidden" way of getting a big image (~1024) from flickr
_getBig: function( photo ) {
if ( photo.url_l ) {
return photo.url_l;
} else if ( parseInt( photo.width_o, 10 ) > 1280 ) {
return 'http://farm'+photo.farm + '.static.flickr.com/'+photo.server +
'/' + photo.id + '_' + photo.secret + '_b.jpg';
}
return photo.url_o || photo.url_z || photo.url_m;
},
// get image size by option name
_getSize: function( photo, size ) {
var img;
switch(size) {
case 'thumb':
img = photo.url_t;
break;
case 'small':
img = photo.url_s;
break; |
case 'original':
img = photo.url_o ? photo.url_o : this._getBig( photo );
break;
default:
img = photo.url_z || photo.url_m;
break;
}
return img;
},
// ask flickr for photos, parse the result and call the callback with the galleria-ready data array
_find: function( params, callback ) {
params = $.extend({
method: 'flickr.photos.search',
extras: 'url_t,url_m,url_o,url_s,url_l,url_z,description',
sort: this.options.sort
}, params );
return this._call( params, function(data) {
var gallery = [],
photos = data.photos ? data.photos.photo : data.photoset.photo,
len = Math.min( this.options.max, photos.length ),
photo,
i;
for ( i=0; i<len; i++ ) {
photo = photos[i];
gallery.push({
thumb: this._getSize( photo, this.options.thumbSize ),
image: this._getSize( photo, this.options.imageSize ),
big: this._getBig( photo ),
title: photos[i].title,
description: this.options.description && photos[i].description ? photos[i].description._content : '',
link: this.options.backlink ? 'http://flickr.com/photos/' + photo.owner + '/' + photo.id : ''
});
}
callback.call( this, gallery );
});
}
};
/**
Galleria modifications
We fake-extend the load prototype to make Flickr integration as simple as possible
*/
// save the old prototype in a local variable
var load = Galleria.prototype.load;
// fake-extend the load prototype using the flickr data
Galleria.prototype.load = function() {
// pass if no data is provided or flickr option not found
if ( arguments.length || typeof this._options.flickr !== 'string' ) {
load.apply( this, Galleria.utils.array( arguments ) );
return;
}
// define some local vars
var self = this,
args = Galleria.utils.array( arguments ),
flickr = this._options.flickr.split(':'),
f,
opts = $.extend({}, self._options.flickrOptions),
loader = typeof opts.loader !== 'undefined' ?
opts.loader : $('<div>').css({
width: 48,
height: 48,
opacity: 0.7,
background:'#000 url('+PATH+'loader.gif) no-repeat 50% 50%'
});
if ( flickr.length ) {
// validate the method
if ( typeof Galleria.Flickr.prototype[ flickr[0] ] !== 'function' ) {
Galleria.raise( flickr[0] + ' method not found in Flickr plugin' );
return load.apply( this, args );
}
// validate the argument
if ( !flickr[1] ) {
Galleria.raise( 'No flickr argument found' );
return load.apply( this, args );
}
// apply the preloader
window.setTimeout(function() {
self.$( 'target' ).append( loader );
},100);
// create the instance
f = new Galleria.Flickr();
// apply Flickr options
if ( typeof self._options.flickrOptions === 'object' ) {
f.setOptions( self._options.flickrOptions );
}
// call the flickr method and trigger the DATA event
f[ flickr[0] ]( flickr[1], function( data ) {
self._data = data;
loader.remove();
self.trigger( Galleria.DATA );
f.options.complete.call(f, data);
});
} else {
// if flickr array not found, pass
load.apply( this, args );
}
};
}( jQuery ) ); |
case 'big':
img = this._getBig( photo );
break; |
build.rs | #![deny(rust_2018_idioms, warnings)]
fn | () {
// Assert that the DEP_K8S_OPENAPI_*_VERSION is set by the k8s-openapi crate's build script correctly.
const MIN: usize = 11;
const MAX: usize = 19;
let enabled_version = {
let mut enabled_versions = (MIN..=MAX).filter(|v| std::env::var(format!("CARGO_FEATURE_TEST_V1_{}", v)).is_ok());
let v1 = enabled_versions.next().expect("None of the test_v1_* features are enabled on the k8s-openapi-tests crate.");
if let Some(v2) = enabled_versions.next() {
panic!("Both test_v1_{} and test_v1_{} features are enabled on the k8s-openapi-tests crate.", v1, v2);
}
v1
};
let expected_k8s_openapi_version = 0x00_01_00_00_u32 | ((enabled_version as u32) << 8);
let actual_k8s_openapi_version: u32 =
std::env::vars_os()
.find_map(|(key, value)| {
let key = key.into_string().ok()?;
if key.starts_with("DEP_K8S_OPENAPI_") && key.ends_with("_VERSION") {
let value = value.into_string().ok()?;
Some(value)
}
else {
None
}
}).expect("DEP_K8S_OPENAPI_*_VERSION must have been set by k8s-openapi")
.parse().expect("DEP_K8S_OPENAPI_*_VERSION is malformed");
assert_eq!(actual_k8s_openapi_version, expected_k8s_openapi_version);
if actual_k8s_openapi_version >= 0x00_01_10_00 {
println!(r#"cargo:rustc-cfg=k8s_apiextensions="v1""#);
}
else {
println!(r#"cargo:rustc-cfg=k8s_apiextensions="v1beta1""#);
}
}
| main |
payment.tests.js | var should = require('should');
var assert = require('assert');
var request = require('supertest');
var mongoose = require('mongoose');
var winston = require('winston');
describe('Routing', function() {
var url = 'http://104.236.131.132:3000';
// within before() you can run all the operations that are needed to setup your tests. In this case
// I want to create a connection with the database, and when I'm done, I call done(). | done();
});
describe('Payment Tests', function() {
it('Create charge', function(done) {
var body = {
amount:1000,
sender:"cus_AMNM90zBSGAthT",
recipient: "acct_1A1flXCTEZoCLhJu"
};
request(url)
.post('/payment/charge')
.expect(200)
.send(body)
.end(function(err, res) {
if (err || !res || !err && !res) {
throw err;
} else {
res.should.be.json;
}
done();
});
});
});
}); | before(function(done) {
// In our tests we use the test db
mongoose.connect("mongodb://104.236.131.132/mlb"); |
web3-core-requestManager.min.js | "use strict";var _typeof="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t};!function(t){if("object"===("undefined"==typeof exports?"undefined":_typeof(exports))&&"undefined"!=typeof module)module.exports=t();else if("function"==typeof define&&define.amd)define([],t);else{("undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this).RequestManager=t()}}(function(){return function t(n,e,r){function i(s,u){if(!e[s]){if(!n[s]){var a="function"==typeof require&&require;if(!u&&a)return a(s,!0);if(o)return o(s,!0);var h=new Error("Cannot find module '"+s+"'");throw h.code="MODULE_NOT_FOUND",h}var l=e[s]={exports:{}};n[s][0].call(l.exports,function(t){var e=n[s][1][t];return i(e||t)},l,l.exports,t,n,e,r)}return e[s].exports}for(var o="function"==typeof require&&require,s=0;s<r.length;s++)i(r[s]);return i}({1:[function(t,n,e){(function(){function t(t){function n(n,e,r,i,o,s){for(;o>=0&&o<s;o+=t){var u=i?i[o]:o;r=e(r,n[u],u,n)}return r}return function(e,r,i,o){r=M(r,o,4);var s=!N(e)&&b.keys(e),u=(s||e).length,a=t>0?0:u-1;return arguments.length<3&&(i=e[s?s[a]:a],a+=t),n(e,r,i,s,a,u)}}function r(t){return function(n,e,r){e=x(e,r);for(var i=C(n),o=t>0?0:i-1;o>=0&&o<i;o+=t)if(e(n[o],o,n))return o;return-1}}function i(t,n,e){return function(r,i,o){var s=0,u=C(r);if("number"==typeof o)t>0?s=o>=0?o:Math.max(o+u,s):u=o>=0?Math.min(o+1,u):o+u+1;else if(e&&o&&u)return o=e(r,i),r[o]===i?o:-1;if(i!==i)return(o=n(f.call(r,s,u),b.isNaN))>=0?o+s:-1;for(o=t>0?s:u-1;o>=0&&o<u;o+=t)if(r[o]===i)return o;return-1}}function o(t,n){var e=B.length,r=t.constructor,i=b.isFunction(r)&&r.prototype||h,o="constructor";for(b.has(t,o)&&!b.contains(n,o)&&n.push(o);e--;)(o=B[e])in t&&t[o]!==i[o]&&!b.contains(n,o)&&n.push(o)}var s=this,u=s._,a=Array.prototype,h=Object.prototype,l=Function.prototype,c=a.push,f=a.slice,d=h.toString,p=h.hasOwnProperty,m=Array.isArray,v=Object.keys,g=l.bind,y=Object.create,w=function(){},b=function t(n){return n instanceof t?n:this instanceof t?void(this._wrapped=n):new t(n)};void 0!==e?(void 0!==n&&n.exports&&(e=n.exports=b),e._=b):s._=b,b.VERSION="1.8.3";var M=function(t,n,e){if(void 0===n)return t;switch(null==e?3:e){case 1:return function(e){return t.call(n,e)};case 2:return function(e,r){return t.call(n,e,r)};case 3:return function(e,r,i){return t.call(n,e,r,i)};case 4:return function(e,r,i,o){return t.call(n,e,r,i,o)}}return function(){return t.apply(n,arguments)}},x=function(t,n,e){return null==t?b.identity:b.isFunction(t)?M(t,n,e):b.isObject(t)?b.matcher(t):b.property(t)};b.iteratee=function(t,n){return x(t,n,1/0)};var k=function(t,n){return function(e){var r=arguments.length;if(r<2||null==e)return e;for(var i=1;i<r;i++)for(var o=arguments[i],s=t(o),u=s.length,a=0;a<u;a++){var h=s[a];n&&void 0!==e[h]||(e[h]=o[h])}return e}},_=function(t){if(!b.isObject(t))return{};if(y)return y(t);w.prototype=t;var n=new w;return w.prototype=null,n},S=function(t){return function(n){return null==n?void 0:n[t]}},A=Math.pow(2,53)-1,C=S("length"),N=function(t){var n=C(t);return"number"==typeof n&&n>=0&&n<=A};b.each=b.forEach=function(t,n,e){n=M(n,e);var r,i;if(N(t))for(r=0,i=t.length;r<i;r++)n(t[r],r,t);else{var o=b.keys(t);for(r=0,i=o.length;r<i;r++)n(t[o[r]],o[r],t)}return t},b.map=b.collect=function(t,n,e){n=x(n,e);for(var r=!N(t)&&b.keys(t),i=(r||t).length,o=Array(i),s=0;s<i;s++){var u=r?r[s]:s;o[s]=n(t[u],u,t)}return o},b.reduce=b.foldl=b.inject=t(1),b.reduceRight=b.foldr=t(-1),b.find=b.detect=function(t,n,e){var r;if(void 0!==(r=N(t)?b.findIndex(t,n,e):b.findKey(t,n,e))&&-1!==r)return t[r]},b.filter=b.select=function(t,n,e){var r=[];return n=x(n,e),b.each(t,function(t,e,i){n(t,e,i)&&r.push(t)}),r},b.reject=function(t,n,e){return b.filter(t,b.negate(x(n)),e)},b.every=b.all=function(t,n,e){n=x(n,e);for(var r=!N(t)&&b.keys(t),i=(r||t).length,o=0;o<i;o++){var s=r?r[o]:o;if(!n(t[s],s,t))return!1}return!0},b.some=b.any=function(t,n,e){n=x(n,e);for(var r=!N(t)&&b.keys(t),i=(r||t).length,o=0;o<i;o++){var s=r?r[o]:o;if(n(t[s],s,t))return!0}return!1},b.contains=b.includes=b.include=function(t,n,e,r){return N(t)||(t=b.values(t)),("number"!=typeof e||r)&&(e=0),b.indexOf(t,n,e)>=0},b.invoke=function(t,n){var e=f.call(arguments,2),r=b.isFunction(n);return b.map(t,function(t){var i=r?n:t[n];return null==i?i:i.apply(t,e)})},b.pluck=function(t,n){return b.map(t,b.property(n))},b.where=function(t,n){return b.filter(t,b.matcher(n))},b.findWhere=function(t,n){return b.find(t,b.matcher(n))},b.max=function(t,n,e){var r,i,o=-1/0,s=-1/0;if(null==n&&null!=t)for(var u=0,a=(t=N(t)?t:b.values(t)).length;u<a;u++)(r=t[u])>o&&(o=r);else n=x(n,e),b.each(t,function(t,e,r){((i=n(t,e,r))>s||i===-1/0&&o===-1/0)&&(o=t,s=i)});return o},b.min=function(t,n,e){var r,i,o=1/0,s=1/0;if(null==n&&null!=t)for(var u=0,a=(t=N(t)?t:b.values(t)).length;u<a;u++)(r=t[u])<o&&(o=r);else n=x(n,e),b.each(t,function(t,e,r){((i=n(t,e,r))<s||i===1/0&&o===1/0)&&(o=t,s=i)});return o},b.shuffle=function(t){for(var n,e=N(t)?t:b.values(t),r=e.length,i=Array(r),o=0;o<r;o++)(n=b.random(0,o))!==o&&(i[o]=i[n]),i[n]=e[o];return i},b.sample=function(t,n,e){return null==n||e?(N(t)||(t=b.values(t)),t[b.random(t.length-1)]):b.shuffle(t).slice(0,Math.max(0,n))},b.sortBy=function(t,n,e){return n=x(n,e),b.pluck(b.map(t,function(t,e,r){return{value:t,index:e,criteria:n(t,e,r)}}).sort(function(t,n){var e=t.criteria,r=n.criteria;if(e!==r){if(e>r||void 0===e)return 1;if(e<r||void 0===r)return-1}return t.index-n.index}),"value")};var T=function(t){return function(n,e,r){var i={};return e=x(e,r),b.each(n,function(r,o){var s=e(r,o,n);t(i,r,s)}),i}};b.groupBy=T(function(t,n,e){b.has(t,e)?t[e].push(n):t[e]=[n]}),b.indexBy=T(function(t,n,e){t[e]=n}),b.countBy=T(function(t,n,e){b.has(t,e)?t[e]++:t[e]=1}),b.toArray=function(t){return t?b.isArray(t)?f.call(t):N(t)?b.map(t,b.identity):b.values(t):[]},b.size=function(t){return null==t?0:N(t)?t.length:b.keys(t).length},b.partition=function(t,n,e){n=x(n,e);var r=[],i=[];return b.each(t,function(t,e,o){(n(t,e,o)?r:i).push(t)}),[r,i]},b.first=b.head=b.take=function(t,n,e){if(null!=t)return null==n||e?t[0]:b.initial(t,t.length-n)},b.initial=function(t,n,e){return f.call(t,0,Math.max(0,t.length-(null==n||e?1:n)))},b.last=function(t,n,e){if(null!=t)return null==n||e?t[t.length-1]:b.rest(t,Math.max(0,t.length-n))},b.rest=b.tail=b.drop=function(t,n,e){return f.call(t,null==n||e?1:n)},b.compact=function(t){return b.filter(t,b.identity)};var E=function t(n,e,r,i){for(var o=[],s=0,u=i||0,a=C(n);u<a;u++){var h=n[u];if(N(h)&&(b.isArray(h)||b.isArguments(h))){e||(h=t(h,e,r));var l=0,c=h.length;for(o.length+=c;l<c;)o[s++]=h[l++]}else r||(o[s++]=h)}return o};b.flatten=function(t,n){return E(t,n,!1)},b.without=function(t){return b.difference(t,f.call(arguments,1))},b.uniq=b.unique=function(t,n,e,r){b.isBoolean(n)||(r=e,e=n,n=!1),null!=e&&(e=x(e,r));for(var i=[],o=[],s=0,u=C(t);s<u;s++){var a=t[s],h=e?e(a,s,t):a;n?(s&&o===h||i.push(a),o=h):e?b.contains(o,h)||(o.push(h),i.push(a)):b.contains(i,a)||i.push(a)}return i},b.union=function(){return b.uniq(E(arguments,!0,!0))},b.intersection=function(t){for(var n=[],e=arguments.length,r=0,i=C(t);r<i;r++){var o=t[r];if(!b.contains(n,o)){for(var s=1;s<e&&b.contains(arguments[s],o);s++);s===e&&n.push(o)}}return n},b.difference=function(t){var n=E(arguments,!0,!0,1);return b.filter(t,function(t){return!b.contains(n,t)})},b.zip=function(){return b.unzip(arguments)},b.unzip=function(t){for(var n=t&&b.max(t,C).length||0,e=Array(n),r=0;r<n;r++)e[r]=b.pluck(t,r);return e},b.object=function(t,n){for(var e={},r=0,i=C(t);r<i;r++)n?e[t[r]]=n[r]:e[t[r][0]]=t[r][1];return e},b.findIndex=r(1),b.findLastIndex=r(-1),b.sortedIndex=function(t,n,e,r){for(var i=(e=x(e,r,1))(n),o=0,s=C(t);o<s;){var u=Math.floor((o+s)/2);e(t[u])<i?o=u+1:s=u}return o},b.indexOf=i(1,b.findIndex,b.sortedIndex),b.lastIndexOf=i(-1,b.findLastIndex),b.range=function(t,n,e){null==n&&(n=t||0,t=0),e=e||1;for(var r=Math.max(Math.ceil((n-t)/e),0),i=Array(r),o=0;o<r;o++,t+=e)i[o]=t;return i};var j=function(t,n,e,r,i){if(!(r instanceof n))return t.apply(e,i);var o=_(t.prototype),s=t.apply(o,i);return b.isObject(s)?s:o};b.bind=function(t,n){if(g&&t.bind===g)return g.apply(t,f.call(arguments,1));if(!b.isFunction(t))throw new TypeError("Bind must be called on a function");var e=f.call(arguments,2);return function r(){return j(t,r,n,this,e.concat(f.call(arguments)))}},b.partial=function(t){var n=f.call(arguments,1);return function e(){for(var r=0,i=n.length,o=Array(i),s=0;s<i;s++)o[s]=n[s]===b?arguments[r++]:n[s];for(;r<arguments.length;)o.push(arguments[r++]);return j(t,e,this,this,o)}},b.bindAll=function(t){var n,e,r=arguments.length;if(r<=1)throw new Error("bindAll must be passed function names");for(n=1;n<r;n++)t[e=arguments[n]]=b.bind(t[e],t);return t},b.memoize=function(t,n){var e=function e(r){var i=e.cache,o=""+(n?n.apply(this,arguments):r);return b.has(i,o)||(i[o]=t.apply(this,arguments)),i[o]};return e.cache={},e},b.delay=function(t,n){var e=f.call(arguments,2);return setTimeout(function(){return t.apply(null,e)},n)},b.defer=b.partial(b.delay,b,1),b.throttle=function(t,n,e){var r,i,o,s=null,u=0;e||(e={});var a=function(){u=!1===e.leading?0:b.now(),s=null,o=t.apply(r,i),s||(r=i=null)};return function(){var h=b.now();u||!1!==e.leading||(u=h);var l=n-(h-u);return r=this,i=arguments,l<=0||l>n?(s&&(clearTimeout(s),s=null),u=h,o=t.apply(r,i),s||(r=i=null)):s||!1===e.trailing||(s=setTimeout(a,l)),o}},b.debounce=function(t,n,e){var r,i,o,s,u,a=function a(){var h=b.now()-s;h<n&&h>=0?r=setTimeout(a,n-h):(r=null,e||(u=t.apply(o,i),r||(o=i=null)))};return function(){o=this,i=arguments,s=b.now();var h=e&&!r;return r||(r=setTimeout(a,n)),h&&(u=t.apply(o,i),o=i=null),u}},b.wrap=function(t,n){return b.partial(n,t)},b.negate=function(t){return function(){return!t.apply(this,arguments)}},b.compose=function(){var t=arguments,n=t.length-1;return function(){for(var e=n,r=t[n].apply(this,arguments);e--;)r=t[e].call(this,r);return r}},b.after=function(t,n){return function(){if(--t<1)return n.apply(this,arguments)}},b.before=function(t,n){var e;return function(){return--t>0&&(e=n.apply(this,arguments)),t<=1&&(n=null),e}},b.once=b.partial(b.before,2);var I=!{toString:null}.propertyIsEnumerable("toString"),B=["valueOf","isPrototypeOf","toString","propertyIsEnumerable","hasOwnProperty","toLocaleString"];b.keys=function(t){if(!b.isObject(t))return[];if(v)return v(t);var n=[];for(var e in t)b.has(t,e)&&n.push(e);return I&&o(t,n),n},b.allKeys=function(t){if(!b.isObject(t))return[];var n=[];for(var e in t)n.push(e);return I&&o(t,n),n},b.values=function(t){for(var n=b.keys(t),e=n.length,r=Array(e),i=0;i<e;i++)r[i]=t[n[i]];return r},b.mapObject=function(t,n,e){n=x(n,e);for(var r,i=b.keys(t),o=i.length,s={},u=0;u<o;u++)s[r=i[u]]=n(t[r],r,t);return s},b.pairs=function(t){for(var n=b.keys(t),e=n.length,r=Array(e),i=0;i<e;i++)r[i]=[n[i],t[n[i]]];return r},b.invert=function(t){for(var n={},e=b.keys(t),r=0,i=e.length;r<i;r++)n[t[e[r]]]=e[r];return n},b.functions=b.methods=function(t){var n=[];for(var e in t)b.isFunction(t[e])&&n.push(e);return n.sort()},b.extend=k(b.allKeys),b.extendOwn=b.assign=k(b.keys),b.findKey=function(t,n,e){n=x(n,e);for(var r,i=b.keys(t),o=0,s=i.length;o<s;o++)if(r=i[o],n(t[r],r,t))return r},b.pick=function(t,n,e){var r,i,o={},s=t;if(null==s)return o;b.isFunction(n)?(i=b.allKeys(s),r=M(n,e)):(i=E(arguments,!1,!1,1),r=function(t,n,e){return n in e},s=Object(s));for(var u=0,a=i.length;u<a;u++){var h=i[u],l=s[h];r(l,h,s)&&(o[h]=l)}return o},b.omit=function(t,n,e){if(b.isFunction(n))n=b.negate(n);else{var r=b.map(E(arguments,!1,!1,1),String);n=function(t,n){return!b.contains(r,n)}}return b.pick(t,n,e)},b.defaults=k(b.allKeys,!0),b.create=function(t,n){var e=_(t);return n&&b.extendOwn(e,n),e},b.clone=function(t){return b.isObject(t)?b.isArray(t)?t.slice():b.extend({},t):t},b.tap=function(t,n){return n(t),t},b.isMatch=function(t,n){var e=b.keys(n),r=e.length;if(null==t)return!r;for(var i=Object(t),o=0;o<r;o++){var s=e[o];if(n[s]!==i[s]||!(s in i))return!1}return!0};var O=function t(n,e,r,i){if(n===e)return 0!==n||1/n==1/e;if(null==n||null==e)return n===e;n instanceof b&&(n=n._wrapped),e instanceof b&&(e=e._wrapped);var o=d.call(n);if(o!==d.call(e))return!1;switch(o){case"[object RegExp]":case"[object String]":return""+n==""+e;case"[object Number]":return+n!=+n?+e!=+e:0==+n?1/+n==1/e:+n==+e;case"[object Date]":case"[object Boolean]":return+n==+e}var s="[object Array]"===o;if(!s){if("object"!=(void 0===n?"undefined":_typeof(n))||"object"!=(void 0===e?"undefined":_typeof(e)))return!1;var u=n.constructor,a=e.constructor;if(u!==a&&!(b.isFunction(u)&&u instanceof u&&b.isFunction(a)&&a instanceof a)&&"constructor"in n&&"constructor"in e)return!1}r=r||[],i=i||[];for(var h=r.length;h--;)if(r[h]===n)return i[h]===e;if(r.push(n),i.push(e),s){if((h=n.length)!==e.length)return!1;for(;h--;)if(!t(n[h],e[h],r,i))return!1}else{var l,c=b.keys(n);if(h=c.length,b.keys(e).length!==h)return!1;for(;h--;)if(l=c[h],!b.has(e,l)||!t(n[l],e[l],r,i))return!1}return r.pop(),i.pop(),!0};b.isEqual=function(t,n){return O(t,n)},b.isEmpty=function(t){return null==t||(N(t)&&(b.isArray(t)||b.isString(t)||b.isArguments(t))?0===t.length:0===b.keys(t).length)},b.isElement=function(t){return!(!t||1!==t.nodeType)},b.isArray=m||function(t){return"[object Array]"===d.call(t)},b.isObject=function(t){var n=void 0===t?"undefined":_typeof(t);return"function"===n||"object"===n&&!!t},b.each(["Arguments","Function","String","Number","Date","RegExp","Error"],function(t){b["is"+t]=function(n){return d.call(n)==="[object "+t+"]"}}),b.isArguments(arguments)||(b.isArguments=function(t){return b.has(t,"callee")}),"function"!=typeof/./&&"object"!=("undefined"==typeof Int8Array?"undefined":_typeof(Int8Array))&&(b.isFunction=function(t){return"function"==typeof t||!1}),b.isFinite=function(t){return isFinite(t)&&!isNaN(parseFloat(t))},b.isNaN=function(t){return b.isNumber(t)&&t!==+t},b.isBoolean=function(t){return!0===t||!1===t||"[object Boolean]"===d.call(t)},b.isNull=function(t){return null===t},b.isUndefined=function(t){return void 0===t},b.has=function(t,n){return null!=t&&p.call(t,n)},b.noConflict=function(){return s._=u,this},b.identity=function(t){return t},b.constant=function(t){return function(){return t}},b.noop=function(){},b.property=S,b.propertyOf=function(t){return null==t?function(){}:function(n){return t[n]}},b.matcher=b.matches=function(t){return t=b.extendOwn({},t),function(n){return b.isMatch(n,t)}},b.times=function(t,n,e){var r=Array(Math.max(0,t));n=M(n,e,1);for(var i=0;i<t;i++)r[i]=n(i);return r},b.random=function(t,n){return null==n&&(n=t,t=0),t+Math.floor(Math.random()*(n-t+1))},b.now=Date.now||function(){return(new Date).getTime()};var R={"&":"&","<":"<",">":">",'"':""","'":"'","`":"`"},P=b.invert(R),L=function(t){var n=function(n){return t[n]},e="(?:"+b.keys(t).join("|")+")",r=RegExp(e),i=RegExp(e,"g");return function(t){return t=null==t?"":""+t,r.test(t)?t.replace(i,n):t}};b.escape=L(R),b.unescape=L(P),b.result=function(t,n,e){var r=null==t?void 0:t[n];return void 0===r&&(r=e),b.isFunction(r)?r.call(t):r};var F=0;b.uniqueId=function(t){var n=++F+"";return t?t+n:n},b.templateSettings={evaluate:/<%([\s\S]+?)%>/g,interpolate:/<%=([\s\S]+?)%>/g,escape:/<%-([\s\S]+?)%>/g};var H=/(.)^/,q={"'":"'","\\":"\\","\r":"r","\n":"n","\u2028":"u2028","\u2029":"u2029"},U=/\\|'|\r|\n|\u2028|\u2029/g,z=function(t){return"\\"+q[t]};b.template=function(t,n,e){!n&&e&&(n=e),n=b.defaults({},n,b.templateSettings);var r=RegExp([(n.escape||H).source,(n.interpolate||H).source,(n.evaluate||H).source].join("|")+"|$","g"),i=0,o="__p+='";t.replace(r,function(n,e,r,s,u){return o+=t.slice(i,u).replace(U,z),i=u+n.length,e?o+="'+\n((__t=("+e+"))==null?'':_.escape(__t))+\n'":r?o+="'+\n((__t=("+r+"))==null?'':__t)+\n'":s&&(o+="';\n"+s+"\n__p+='"),n}),o+="';\n",n.variable||(o="with(obj||{}){\n"+o+"}\n"),o="var __t,__p='',__j=Array.prototype.join,print=function(){__p+=__j.call(arguments,'');};\n"+o+"return __p;\n";try{var s=new Function(n.variable||"obj","_",o)}catch(t){throw t.source=o,t}var u=function(t){return s.call(this,t,b)},a=n.variable||"obj";return u.source="function("+a+"){\n"+o+"}",u},b.chain=function(t){var n=b(t);return n._chain=!0,n};var W=function(t,n){return t._chain?b(n).chain():n};b.mixin=function(t){b.each(b.functions(t),function(n){var e=b[n]=t[n];b.prototype[n]=function(){var t=[this._wrapped];return c.apply(t,arguments),W(this,e.apply(b,t))}})},b.mixin(b),b.each(["pop","push","reverse","shift","sort","splice","unshift"],function(t){var n=a[t];b.prototype[t]=function(){var e=this._wrapped;return n.apply(e,arguments),"shift"!==t&&"splice"!==t||0!==e.length||delete e[0],W(this,e)}}),b.each(["concat","join","slice"],function(t){var n=a[t];b.prototype[t]=function(){return W(this,n.apply(this._wrapped,arguments))}}),b.prototype.value=function(){return this._wrapped},b.prototype.valueOf=b.prototype.toJSON=b.prototype.value,b.prototype.toString=function(){return""+this._wrapped}}).call(this)},{}],2:[function(t,n,e){n.exports={defaultBlock:"latest",defaultAccount:null}},{}],3:[function(t,n,e){n.exports={ErrorResponse:function(t){var n=t&&t.error&&t.error.message?t.error.message:"Returned error: "+JSON.stringify(t);return new Error(n)},InvalidNumberOfParams:function(t,n,e){return new Error('Invalid number of parameters for "'+e+'". Got '+t+" expected "+n+"!")},InvalidConnection:function(t){return new Error("CONNECTION ERROR: Couldn't connect to node "+t+".")},InvalidProvider:function(){return new Error("Provider not set or invalid")},InvalidResponse:function(t){var n=t&&t.error&&t.error.message?t.error.message:"Invalid JSON RPC response: "+JSON.stringify(t);return new Error(n)},ConnectionTimeout:function(t){return new Error("CONNECTION TIMEOUT: timeout of "+t+" ms achived")}}},{}],4:[function(t,n,e){var r=t("underscore"),i=t("web3-utils"),o=t("web3-eth-iban"),s=t("./config"),u=function(t){return i.toBN(t).toString(10)},a=function(t){return"latest"===t||"pending"===t||"earliest"===t},h=function(t){if(void 0!==t)return a(t)?t:i.numberToHex(t)},l=function(t){return null!==t.blockNumber&&(t.blockNumber=i.hexToNumber(t.blockNumber)),null!==t.transactionIndex&&(t.transactionIndex=i.hexToNumber(t.transactionIndex)),t.nonce=i.hexToNumber(t.nonce),t.gas=i.hexToNumber(t.gas),t.gasPrice=u(t.gasPrice),t.value=u(t.value),t.to&&(t.to=i.toChecksumAddress(t.to)),t.from&&(t.from=i.toChecksumAddress(t.from)),t},c=function(t){if("string"==typeof t.blockHash&&"string"==typeof t.transactionHash&&"string"==typeof t.logIndex){var n=i.sha3(t.blockHash.replace("0x","")+t.transactionHash.replace("0x","")+t.logIndex.replace("0x",""));t.id="log_"+n.replace("0x","").substr(0,8)}else t.id||(t.id=null);return null!==t.blockNumber&&(t.blockNumber=i.hexToNumber(t.blockNumber)),null!==t.transactionIndex&&(t.transactionIndex=i.hexToNumber(t.transactionIndex)),null!==t.logIndex&&(t.logIndex=i.hexToNumber(t.logIndex)),t.address&&(t.address=i.toChecksumAddress(t.address)),t},f=function(t){var n=new o(t);if(n.isValid()&&n.isDirect())return n.toAddress().toLowerCase();if(i.isAddress(t))return"0x"+t.toLowerCase().replace("0x","");throw new Error('Provided address "'+t+"\" is invalid, the capitalization checksum test failed, or its an indrect IBAN address which can't be converted.")};n.exports={inputDefaultBlockNumberFormatter:function(t){return void 0===t||null===t?s.defaultBlock:"genesis"===t||"earliest"===t?"0x0":h(t)},inputBlockNumberFormatter:h,inputCallFormatter:function(t){var n=t.from||s.defaultAccount;return n&&(t.from=f(n)),t.to&&(t.to=f(t.to)),["gasPrice","gas","gasLimit","value","nonce"].filter(function(n){return void 0!==t[n]}).forEach(function(n){t[n]=i.numberToHex(t[n])}),t},inputTransactionFormatter:function(t){if(!r.isNumber(t.from)&&!r.isObject(t.from)){if(t.from=t.from||s.defaultAccount,!t.from&&!r.isNumber(t.from))throw new Error('The send transactions "from" field must be defined!');t.from=f(t.from)}return t.to&&(t.to=f(t.to)),(t.gas||t.gasLimit)&&(t.gas=t.gas||t.gasLimit),["gasPrice","gas","value","nonce"].filter(function(n){return void 0!==t[n]}).forEach(function(n){t[n]=i.numberToHex(t[n])}),t},inputAddressFormatter:f,inputPostFormatter:function(t){return t.ttl&&(t.ttl=i.numberToHex(t.ttl)),t.workToProve&&(t.workToProve=i.numberToHex(t.workToProve)),t.priority&&(t.priority=i.numberToHex(t.priority)),r.isArray(t.topics)||(t.topics=t.topics?[t.topics]:[]),t.topics=t.topics.map(function(t){return 0===t.indexOf("0x")?t:i.fromUtf8(t)}),t},inputLogFormatter:function(t){var n=function(t){return null===t||void 0===t?null:0===(t=String(t)).indexOf("0x")?t:i.fromUtf8(t)};return t.topics=t.topics||[],t.topics=t.topics.map(function(t){return r.isArray(t)?t.map(n):n(t)}),n=null,t.address&&(t.address=f(t.address)),t},inputSignFormatter:function(t){return i.isHex(t)?t:i.utf8ToHex(t)},outputBigNumberFormatter:u,outputTransactionFormatter:l,outputTransactionReceiptFormatter:function(t){if("object"!==(void 0===t?"undefined":_typeof(t)))throw new Error("Received receipt is invalid: "+t);return null!==t.blockNumber&&(t.blockNumber=i.hexToNumber(t.blockNumber)),null!==t.transactionIndex&&(t.transactionIndex=i.hexToNumber(t.transactionIndex)),t.cumulativeGasUsed=i.hexToNumber(t.cumulativeGasUsed),t.gasUsed=i.hexToNumber(t.gasUsed),r.isArray(t.logs)&&(t.logs=t.logs.map(c)),t.contractAddress&&(t.contractAddress=i.toChecksumAddress(t.contractAddress)),t},outputBlockFormatter:function(t){return t.gasLimit=i.hexToNumber(t.gasLimit),t.gasUsed=i.hexToNumber(t.gasUsed),t.size=i.hexToNumber(t.size),t.timestamp=i.hexToNumber(t.timestamp),null!==t.number&&(t.number=i.hexToNumber(t.number)),t.difficulty&&(t.difficulty=u(t.difficulty)),t.totalDifficulty&&(t.totalDifficulty=u(t.totalDifficulty)),r.isArray(t.transactions)&&t.transactions.forEach(function(t){if(!r.isString(t))return l(t)}),t.miner&&(t.miner=i.toChecksumAddress(t.miner)),t},outputLogFormatter:c,outputPostFormatter:function(t){return t.expiry=i.hexToNumber(t.expiry),t.sent=i.hexToNumber(t.sent),t.ttl=i.hexToNumber(t.ttl),t.workProved=i.hexToNumber(t.workProved),t.topics||(t.topics=[]),t.topics=t.topics.map(function(t){return i.toUtf8(t)}),t},outputSyncingFormatter:function(t){return t.startingBlock=i.hexToNumber(t.startingBlock),t.currentBlock=i.hexToNumber(t.currentBlock),t.highestBlock=i.hexToNumber(t.highestBlock),t.knownStates&&(t.knownStates=i.hexToNumber(t.knownStates),t.pulledStates=i.hexToNumber(t.pulledStates)),t}}},{"./config":2,underscore:1,"web3-eth-iban":11,"web3-utils":33}],5:[function(t,n,e){var r=t("./errors"),i=t("./formatters"),o=t("./config");n.exports={errors:r,formatters:i,config:o}},{"./config":2,"./errors":3,"./formatters":4}],6:[function(t,n,e){arguments[4][1][0].apply(e,arguments)},{dup:1}],7:[function(t,n,e){var r=t("./jsonrpc"),i=t("web3-core-helpers").errors,o=function(t){this.requestManager=t,this.requests=[]};o.prototype.add=function(t){this.requests.push(t)},o.prototype.execute=function(){var t=this.requests;this.requestManager.sendBatch(t,function(n,e){e=e||[],t.map(function(t,n){return e[n]||{}}).forEach(function(n,e){if(t[e].callback){if(n&&n.error)return t[e].callback(i.ErrorResponse(n));if(!r.isValidResponse(n))return t[e].callback(i.InvalidResponse(n));t[e].callback(null,t[e].format?t[e].format(n.result):n.result)}})})},n.exports=o},{"./jsonrpc":9,"web3-core-helpers":5}],8:[function(t,n,e){var r=null,i=Function("return this")();void 0!==i.ethereumProvider?r=i.ethereumProvider:void 0!==i.web3&&i.web3.currentProvider&&(i.web3.currentProvider.sendAsync&&(i.web3.currentProvider.send=i.web3.currentProvider.sendAsync,delete i.web3.currentProvider.sendAsync),!i.web3.currentProvider.on&&i.web3.currentProvider.connection&&"ipcProviderWrapper"===i.web3.currentProvider.connection.constructor.name&&(i.web3.currentProvider.on=function(t,n){if("function"!=typeof n)throw new Error("The second parameter callback must be a function.");switch(t){case"data":this.connection.on("data",function(t){var e="";t=t.toString();try{e=JSON.parse(t)}catch(e){return n(new Error("Couldn't parse response data"+t))}e.id||-1===e.method.indexOf("_subscription")||n(null,e)});break;default:this.connection.on(t,n)}}),r=i.web3.currentProvider),n.exports=r},{}],9:[function(t,n,e){var r={messageId:0};r.toPayload=function(t,n){if(!t)throw new Error('JSONRPC method should be specified for params: "'+JSON.stringify(n)+'"!');return r.messageId++,{jsonrpc:"2.0",id:r.messageId,method:t,params:n||[]}},r.isValidResponse=function(t){function n(t){return!(!t||t.error||"2.0"!==t.jsonrpc||"number"!=typeof t.id&&"string"!=typeof t.id||void 0===t.result)}return Array.isArray(t)?t.every(n):n(t)},r.toBatchPayload=function(t){return t.map(function(t){return r.toPayload(t.method,t.params)})},n.exports=r},{}],10:[function(t,n,e){!function(n,e){function r(t,n){if(!t)throw new Error(n||"Assertion failed")}function i(t,n){t.super_=n;var e=function(){};e.prototype=n.prototype,t.prototype=new e,t.prototype.constructor=t}function o(t,n,e){if(o.isBN(t))return t;this.negative=0,this.words=null,this.length=0,this.red=null,null!==t&&("le"!==n&&"be"!==n||(e=n,n=10),this._init(t||0,n||10,e||"be"))}function s(t,n,e){for(var r=0,i=Math.min(t.length,e),o=n;o<i;o++){var s=t.charCodeAt(o)-48;r<<=4,r|=s>=49&&s<=54?s-49+10:s>=17&&s<=22?s-17+10:15&s}return r}function u(t,n,e,r){for(var i=0,o=Math.min(t.length,e),s=n;s<o;s++){var u=t.charCodeAt(s)-48;i*=r,i+=u>=49?u-49+10:u>=17?u-17+10:u}return i}function a(t){for(var n=new Array(t.bitLength()),e=0;e<n.length;e++){var r=e/26|0,i=e%26;n[e]=(t.words[r]&1<<i)>>>i}return n}function h(t,n,e){e.negative=n.negative^t.negative;var r=t.length+n.length|0;e.length=r,r=r-1|0;var i=0|t.words[0],o=0|n.words[0],s=i*o,u=67108863&s,a=s/67108864|0;e.words[0]=u;for(var h=1;h<r;h++){for(var l=a>>>26,c=67108863&a,f=Math.min(h,n.length-1),d=Math.max(0,h-t.length+1);d<=f;d++){var p=h-d|0;l+=(s=(i=0|t.words[p])*(o=0|n.words[d])+c)/67108864|0,c=67108863&s}e.words[h]=0|c,a=0|l}return 0!==a?e.words[h]=0|a:e.length--,e.strip()}function l(t,n,e){e.negative=n.negative^t.negative,e.length=t.length+n.length;for(var r=0,i=0,o=0;o<e.length-1;o++){var s=i;i=0;for(var u=67108863&r,a=Math.min(o,n.length-1),h=Math.max(0,o-t.length+1);h<=a;h++){var l=o-h,c=(0|t.words[l])*(0|n.words[h]),f=67108863&c;u=67108863&(f=f+u|0),i+=(s=(s=s+(c/67108864|0)|0)+(f>>>26)|0)>>>26,s&=67108863}e.words[o]=u,r=s,s=i}return 0!==r?e.words[o]=r:e.length--,e.strip()}function c(t,n,e){return(new f).mulp(t,n,e)}function f(t,n){this.x=t,this.y=n}function d(t,n){this.name=t,this.p=new o(n,16),this.n=this.p.bitLength(),this.k=new o(1).iushln(this.n).isub(this.p),this.tmp=this._tmp()}function p(){d.call(this,"k256","ffffffff ffffffff ffffffff ffffffff ffffffff ffffffff fffffffe fffffc2f")}function m(){d.call(this,"p224","ffffffff ffffffff ffffffff ffffffff 00000000 00000000 00000001")}function v(){d.call(this,"p192","ffffffff ffffffff ffffffff fffffffe ffffffff ffffffff")}function g(){d.call(this,"25519","7fffffffffffffff ffffffffffffffff ffffffffffffffff ffffffffffffffed")}function y(t){if("string"==typeof t){var n=o._prime(t);this.m=n.p,this.prime=n}else r(t.gtn(1),"modulus must be greater than 1"),this.m=t,this.prime=null}function w(t){y.call(this,t),this.shift=this.m.bitLength(),this.shift%26!=0&&(this.shift+=26-this.shift%26),this.r=new o(1).iushln(this.shift),this.r2=this.imod(this.r.sqr()),this.rinv=this.r._invmp(this.m),this.minv=this.rinv.mul(this.r).isubn(1).div(this.m),this.minv=this.minv.umod(this.r),this.minv=this.r.sub(this.minv)}"object"===(void 0===n?"undefined":_typeof(n))?n.exports=o:e.BN=o,o.BN=o,o.wordSize=26;var b;try{b=t("buffer").Buffer}catch(t){}o.isBN=function(t){return t instanceof o||null!==t&&"object"===(void 0===t?"undefined":_typeof(t))&&t.constructor.wordSize===o.wordSize&&Array.isArray(t.words)},o.max=function(t,n){return t.cmp(n)>0?t:n},o.min=function(t,n){return t.cmp(n)<0?t:n},o.prototype._init=function(t,n,e){if("number"==typeof t)return this._initNumber(t,n,e);if("object"===(void 0===t?"undefined":_typeof(t)))return this._initArray(t,n,e);"hex"===n&&(n=16),r(n===(0|n)&&n>=2&&n<=36);var i=0;"-"===(t=t.toString().replace(/\s+/g,""))[0]&&i++,16===n?this._parseHex(t,i):this._parseBase(t,n,i),"-"===t[0]&&(this.negative=1),this.strip(),"le"===e&&this._initArray(this.toArray(),n,e)},o.prototype._initNumber=function(t,n,e){t<0&&(this.negative=1,t=-t),t<67108864?(this.words=[67108863&t],this.length=1):t<4503599627370496?(this.words=[67108863&t,t/67108864&67108863],this.length=2):(r(t<9007199254740992),this.words=[67108863&t,t/67108864&67108863,1],this.length=3),"le"===e&&this._initArray(this.toArray(),n,e)},o.prototype._initArray=function(t,n,e){if(r("number"==typeof t.length),t.length<=0)return this.words=[0],this.length=1,this;this.length=Math.ceil(t.length/3),this.words=new Array(this.length);for(var i=0;i<this.length;i++)this.words[i]=0;var o,s,u=0;if("be"===e)for(i=t.length-1,o=0;i>=0;i-=3)s=t[i]|t[i-1]<<8|t[i-2]<<16,this.words[o]|=s<<u&67108863,this.words[o+1]=s>>>26-u&67108863,(u+=24)>=26&&(u-=26,o++);else if("le"===e)for(i=0,o=0;i<t.length;i+=3)s=t[i]|t[i+1]<<8|t[i+2]<<16,this.words[o]|=s<<u&67108863,this.words[o+1]=s>>>26-u&67108863,(u+=24)>=26&&(u-=26,o++);return this.strip()},o.prototype._parseHex=function(t,n){this.length=Math.ceil((t.length-n)/6),this.words=new Array(this.length);for(var e=0;e<this.length;e++)this.words[e]=0;var r,i,o=0;for(e=t.length-6,r=0;e>=n;e-=6)i=s(t,e,e+6),this.words[r]|=i<<o&67108863,this.words[r+1]|=i>>>26-o&4194303,(o+=24)>=26&&(o-=26,r++);e+6!==n&&(i=s(t,n,e+6),this.words[r]|=i<<o&67108863,this.words[r+1]|=i>>>26-o&4194303),this.strip()},o.prototype._parseBase=function(t,n,e){this.words=[0],this.length=1;for(var r=0,i=1;i<=67108863;i*=n)r++;r--,i=i/n|0;for(var o=t.length-e,s=o%r,a=Math.min(o,o-s)+e,h=0,l=e;l<a;l+=r)h=u(t,l,l+r,n),this.imuln(i),this.words[0]+h<67108864?this.words[0]+=h:this._iaddn(h);if(0!==s){var c=1;for(h=u(t,l,t.length,n),l=0;l<s;l++)c*=n;this.imuln(c),this.words[0]+h<67108864?this.words[0]+=h:this._iaddn(h)}},o.prototype.copy=function(t){t.words=new Array(this.length);for(var n=0;n<this.length;n++)t.words[n]=this.words[n];t.length=this.length,t.negative=this.negative,t.red=this.red},o.prototype.clone=function(){var t=new o(null);return this.copy(t),t},o.prototype._expand=function(t){for(;this.length<t;)this.words[this.length++]=0;return this},o.prototype.strip=function(){for(;this.length>1&&0===this.words[this.length-1];)this.length--;return this._normSign()},o.prototype._normSign=function(){return 1===this.length&&0===this.words[0]&&(this.negative=0),this},o.prototype.inspect=function(){return(this.red?"<BN-R: ":"<BN: ")+this.toString(16)+">"};var M=["","0","00","000","0000","00000","000000","0000000","00000000","000000000","0000000000","00000000000","000000000000","0000000000000","00000000000000","000000000000000","0000000000000000","00000000000000000","000000000000000000","0000000000000000000","00000000000000000000","000000000000000000000","0000000000000000000000","00000000000000000000000","000000000000000000000000","0000000000000000000000000"],x=[0,0,25,16,12,11,10,9,8,8,7,7,7,7,6,6,6,6,6,6,6,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5],k=[0,0,33554432,43046721,16777216,48828125,60466176,40353607,16777216,43046721,1e7,19487171,35831808,62748517,7529536,11390625,16777216,24137569,34012224,47045881,64e6,4084101,5153632,6436343,7962624,9765625,11881376,14348907,17210368,20511149,243e5,28629151,33554432,39135393,45435424,52521875,60466176];o.prototype.toString=function(t,n){t=t||10,n=0|n||1;var e;if(16===t||"hex"===t){e="";for(var i=0,o=0,s=0;s<this.length;s++){var u=this.words[s],a=(16777215&(u<<i|o)).toString(16);e=0!==(o=u>>>24-i&16777215)||s!==this.length-1?M[6-a.length]+a+e:a+e,(i+=2)>=26&&(i-=26,s--)}for(0!==o&&(e=o.toString(16)+e);e.length%n!=0;)e="0"+e;return 0!==this.negative&&(e="-"+e),e}if(t===(0|t)&&t>=2&&t<=36){var h=x[t],l=k[t];e="";var c=this.clone();for(c.negative=0;!c.isZero();){var f=c.modn(l).toString(t);e=(c=c.idivn(l)).isZero()?f+e:M[h-f.length]+f+e}for(this.isZero()&&(e="0"+e);e.length%n!=0;)e="0"+e;return 0!==this.negative&&(e="-"+e),e}r(!1,"Base should be between 2 and 36")},o.prototype.toNumber=function(){var t=this.words[0];return 2===this.length?t+=67108864*this.words[1]:3===this.length&&1===this.words[2]?t+=4503599627370496+67108864*this.words[1]:this.length>2&&r(!1,"Number can only safely store up to 53 bits"),0!==this.negative?-t:t},o.prototype.toJSON=function(){return this.toString(16)},o.prototype.toBuffer=function(t,n){return r(void 0!==b),this.toArrayLike(b,t,n)},o.prototype.toArray=function(t,n){return this.toArrayLike(Array,t,n)},o.prototype.toArrayLike=function(t,n,e){var i=this.byteLength(),o=e||Math.max(1,i);r(i<=o,"byte array longer than desired length"),r(o>0,"Requested array length <= 0"),this.strip();var s,u,a="le"===n,h=new t(o),l=this.clone();if(a){for(u=0;!l.isZero();u++)s=l.andln(255),l.iushrn(8),h[u]=s;for(;u<o;u++)h[u]=0}else{for(u=0;u<o-i;u++)h[u]=0;for(u=0;!l.isZero();u++)s=l.andln(255),l.iushrn(8),h[o-u-1]=s}return h},Math.clz32?o.prototype._countBits=function(t){return 32-Math.clz32(t)}:o.prototype._countBits=function(t){var n=t,e=0;return n>=4096&&(e+=13,n>>>=13),n>=64&&(e+=7,n>>>=7),n>=8&&(e+=4,n>>>=4),n>=2&&(e+=2,n>>>=2),e+n},o.prototype._zeroBits=function(t){if(0===t)return 26;var n=t,e=0;return 0==(8191&n)&&(e+=13,n>>>=13),0==(127&n)&&(e+=7,n>>>=7),0==(15&n)&&(e+=4,n>>>=4),0==(3&n)&&(e+=2,n>>>=2),0==(1&n)&&e++,e},o.prototype.bitLength=function(){var t=this.words[this.length-1],n=this._countBits(t);return 26*(this.length-1)+n},o.prototype.zeroBits=function(){if(this.isZero())return 0;for(var t=0,n=0;n<this.length;n++){var e=this._zeroBits(this.words[n]);if(t+=e,26!==e)break}return t},o.prototype.byteLength=function(){return Math.ceil(this.bitLength()/8)},o.prototype.toTwos=function(t){return 0!==this.negative?this.abs().inotn(t).iaddn(1):this.clone()},o.prototype.fromTwos=function(t){return this.testn(t-1)?this.notn(t).iaddn(1).ineg():this.clone()},o.prototype.isNeg=function(){return 0!==this.negative},o.prototype.neg=function(){return this.clone().ineg()},o.prototype.ineg=function(){return this.isZero()||(this.negative^=1),this},o.prototype.iuor=function(t){for(;this.length<t.length;)this.words[this.length++]=0;for(var n=0;n<t.length;n++)this.words[n]=this.words[n]|t.words[n];return this.strip()},o.prototype.ior=function(t){return r(0==(this.negative|t.negative)),this.iuor(t)},o.prototype.or=function(t){return this.length>t.length?this.clone().ior(t):t.clone().ior(this)},o.prototype.uor=function(t){return this.length>t.length?this.clone().iuor(t):t.clone().iuor(this)},o.prototype.iuand=function(t){var n;n=this.length>t.length?t:this;for(var e=0;e<n.length;e++)this.words[e]=this.words[e]&t.words[e];return this.length=n.length,this.strip()},o.prototype.iand=function(t){return r(0==(this.negative|t.negative)),this.iuand(t)},o.prototype.and=function(t){return this.length>t.length?this.clone().iand(t):t.clone().iand(this)},o.prototype.uand=function(t){return this.length>t.length?this.clone().iuand(t):t.clone().iuand(this)},o.prototype.iuxor=function(t){var n,e;this.length>t.length?(n=this,e=t):(n=t,e=this);for(var r=0;r<e.length;r++)this.words[r]=n.words[r]^e.words[r];if(this!==n)for(;r<n.length;r++)this.words[r]=n.words[r];return this.length=n.length,this.strip()},o.prototype.ixor=function(t){return r(0==(this.negative|t.negative)),this.iuxor(t)},o.prototype.xor=function(t){return this.length>t.length?this.clone().ixor(t):t.clone().ixor(this)},o.prototype.uxor=function(t){return this.length>t.length?this.clone().iuxor(t):t.clone().iuxor(this)},o.prototype.inotn=function(t){r("number"==typeof t&&t>=0);var n=0|Math.ceil(t/26),e=t%26;this._expand(n),e>0&&n--;for(var i=0;i<n;i++)this.words[i]=67108863&~this.words[i];return e>0&&(this.words[i]=~this.words[i]&67108863>>26-e),this.strip()},o.prototype.notn=function(t){return this.clone().inotn(t)},o.prototype.setn=function(t,n){r("number"==typeof t&&t>=0);var e=t/26|0,i=t%26;return this._expand(e+1),this.words[e]=n?this.words[e]|1<<i:this.words[e]&~(1<<i),this.strip()},o.prototype.iadd=function(t){var n;if(0!==this.negative&&0===t.negative)return this.negative=0,n=this.isub(t),this.negative^=1,this._normSign();if(0===this.negative&&0!==t.negative)return t.negative=0,n=this.isub(t),t.negative=1,n._normSign();var e,r;this.length>t.length?(e=this,r=t):(e=t,r=this);for(var i=0,o=0;o<r.length;o++)n=(0|e.words[o])+(0|r.words[o])+i,this.words[o]=67108863&n,i=n>>>26;for(;0!==i&&o<e.length;o++)n=(0|e.words[o])+i,this.words[o]=67108863&n,i=n>>>26;if(this.length=e.length,0!==i)this.words[this.length]=i,this.length++;else if(e!==this)for(;o<e.length;o++)this.words[o]=e.words[o];return this},o.prototype.add=function(t){var n;return 0!==t.negative&&0===this.negative?(t.negative=0,n=this.sub(t),t.negative^=1,n):0===t.negative&&0!==this.negative?(this.negative=0,n=t.sub(this),this.negative=1,n):this.length>t.length?this.clone().iadd(t):t.clone().iadd(this)},o.prototype.isub=function(t){if(0!==t.negative){t.negative=0;var n=this.iadd(t);return t.negative=1,n._normSign()}if(0!==this.negative)return this.negative=0,this.iadd(t),this.negative=1,this._normSign();var e=this.cmp(t);if(0===e)return this.negative=0,this.length=1,this.words[0]=0,this;var r,i;e>0?(r=this,i=t):(r=t,i=this);for(var o=0,s=0;s<i.length;s++)o=(n=(0|r.words[s])-(0|i.words[s])+o)>>26,this.words[s]=67108863&n;for(;0!==o&&s<r.length;s++)o=(n=(0|r.words[s])+o)>>26,this.words[s]=67108863&n;if(0===o&&s<r.length&&r!==this)for(;s<r.length;s++)this.words[s]=r.words[s];return this.length=Math.max(this.length,s),r!==this&&(this.negative=1),this.strip()},o.prototype.sub=function(t){return this.clone().isub(t)};var _=function(t,n,e){var r,i,o,s=t.words,u=n.words,a=e.words,h=0,l=0|s[0],c=8191&l,f=l>>>13,d=0|s[1],p=8191&d,m=d>>>13,v=0|s[2],g=8191&v,y=v>>>13,w=0|s[3],b=8191&w,M=w>>>13,x=0|s[4],k=8191&x,_=x>>>13,S=0|s[5],A=8191&S,C=S>>>13,N=0|s[6],T=8191&N,E=N>>>13,j=0|s[7],I=8191&j,B=j>>>13,O=0|s[8],R=8191&O,P=O>>>13,L=0|s[9],F=8191&L,H=L>>>13,q=0|u[0],U=8191&q,z=q>>>13,W=0|u[1],D=8191&W,Z=W>>>13,J=0|u[2],$=8191&J,V=J>>>13,X=0|u[3],K=8191&X,G=X>>>13,Q=0|u[4],Y=8191&Q,tt=Q>>>13,nt=0|u[5],et=8191&nt,rt=nt>>>13,it=0|u[6],ot=8191&it,st=it>>>13,ut=0|u[7],at=8191&ut,ht=ut>>>13,lt=0|u[8],ct=8191<,ft=lt>>>13,dt=0|u[9],pt=8191&dt,mt=dt>>>13;e.negative=t.negative^n.negative,e.length=19;var vt=(h+(r=Math.imul(c,U))|0)+((8191&(i=(i=Math.imul(c,z))+Math.imul(f,U)|0))<<13)|0;h=((o=Math.imul(f,z))+(i>>>13)|0)+(vt>>>26)|0,vt&=67108863,r=Math.imul(p,U),i=(i=Math.imul(p,z))+Math.imul(m,U)|0,o=Math.imul(m,z);var gt=(h+(r=r+Math.imul(c,D)|0)|0)+((8191&(i=(i=i+Math.imul(c,Z)|0)+Math.imul(f,D)|0))<<13)|0;h=((o=o+Math.imul(f,Z)|0)+(i>>>13)|0)+(gt>>>26)|0,gt&=67108863,r=Math.imul(g,U),i=(i=Math.imul(g,z))+Math.imul(y,U)|0,o=Math.imul(y,z),r=r+Math.imul(p,D)|0,i=(i=i+Math.imul(p,Z)|0)+Math.imul(m,D)|0,o=o+Math.imul(m,Z)|0;var yt=(h+(r=r+Math.imul(c,$)|0)|0)+((8191&(i=(i=i+Math.imul(c,V)|0)+Math.imul(f,$)|0))<<13)|0;h=((o=o+Math.imul(f,V)|0)+(i>>>13)|0)+(yt>>>26)|0,yt&=67108863,r=Math.imul(b,U),i=(i=Math.imul(b,z))+Math.imul(M,U)|0,o=Math.imul(M,z),r=r+Math.imul(g,D)|0,i=(i=i+Math.imul(g,Z)|0)+Math.imul(y,D)|0,o=o+Math.imul(y,Z)|0,r=r+Math.imul(p,$)|0,i=(i=i+Math.imul(p,V)|0)+Math.imul(m,$)|0,o=o+Math.imul(m,V)|0;var wt=(h+(r=r+Math.imul(c,K)|0)|0)+((8191&(i=(i=i+Math.imul(c,G)|0)+Math.imul(f,K)|0))<<13)|0;h=((o=o+Math.imul(f,G)|0)+(i>>>13)|0)+(wt>>>26)|0,wt&=67108863,r=Math.imul(k,U),i=(i=Math.imul(k,z))+Math.imul(_,U)|0,o=Math.imul(_,z),r=r+Math.imul(b,D)|0,i=(i=i+Math.imul(b,Z)|0)+Math.imul(M,D)|0,o=o+Math.imul(M,Z)|0,r=r+Math.imul(g,$)|0,i=(i=i+Math.imul(g,V)|0)+Math.imul(y,$)|0,o=o+Math.imul(y,V)|0,r=r+Math.imul(p,K)|0,i=(i=i+Math.imul(p,G)|0)+Math.imul(m,K)|0,o=o+Math.imul(m,G)|0;var bt=(h+(r=r+Math.imul(c,Y)|0)|0)+((8191&(i=(i=i+Math.imul(c,tt)|0)+Math.imul(f,Y)|0))<<13)|0;h=((o=o+Math.imul(f,tt)|0)+(i>>>13)|0)+(bt>>>26)|0,bt&=67108863,r=Math.imul(A,U),i=(i=Math.imul(A,z))+Math.imul(C,U)|0,o=Math.imul(C,z),r=r+Math.imul(k,D)|0,i=(i=i+Math.imul(k,Z)|0)+Math.imul(_,D)|0,o=o+Math.imul(_,Z)|0,r=r+Math.imul(b,$)|0,i=(i=i+Math.imul(b,V)|0)+Math.imul(M,$)|0,o=o+Math.imul(M,V)|0,r=r+Math.imul(g,K)|0,i=(i=i+Math.imul(g,G)|0)+Math.imul(y,K)|0,o=o+Math.imul(y,G)|0,r=r+Math.imul(p,Y)|0,i=(i=i+Math.imul(p,tt)|0)+Math.imul(m,Y)|0,o=o+Math.imul(m,tt)|0;var Mt=(h+(r=r+Math.imul(c,et)|0)|0)+((8191&(i=(i=i+Math.imul(c,rt)|0)+Math.imul(f,et)|0))<<13)|0;h=((o=o+Math.imul(f,rt)|0)+(i>>>13)|0)+(Mt>>>26)|0,Mt&=67108863,r=Math.imul(T,U),i=(i=Math.imul(T,z))+Math.imul(E,U)|0,o=Math.imul(E,z),r=r+Math.imul(A,D)|0,i=(i=i+Math.imul(A,Z)|0)+Math.imul(C,D)|0,o=o+Math.imul(C,Z)|0,r=r+Math.imul(k,$)|0,i=(i=i+Math.imul(k,V)|0)+Math.imul(_,$)|0,o=o+Math.imul(_,V)|0,r=r+Math.imul(b,K)|0,i=(i=i+Math.imul(b,G)|0)+Math.imul(M,K)|0,o=o+Math.imul(M,G)|0,r=r+Math.imul(g,Y)|0,i=(i=i+Math.imul(g,tt)|0)+Math.imul(y,Y)|0,o=o+Math.imul(y,tt)|0,r=r+Math.imul(p,et)|0,i=(i=i+Math.imul(p,rt)|0)+Math.imul(m,et)|0,o=o+Math.imul(m,rt)|0;var xt=(h+(r=r+Math.imul(c,ot)|0)|0)+((8191&(i=(i=i+Math.imul(c,st)|0)+Math.imul(f,ot)|0))<<13)|0;h=((o=o+Math.imul(f,st)|0)+(i>>>13)|0)+(xt>>>26)|0,xt&=67108863,r=Math.imul(I,U),i=(i=Math.imul(I,z))+Math.imul(B,U)|0,o=Math.imul(B,z),r=r+Math.imul(T,D)|0,i=(i=i+Math.imul(T,Z)|0)+Math.imul(E,D)|0,o=o+Math.imul(E,Z)|0,r=r+Math.imul(A,$)|0,i=(i=i+Math.imul(A,V)|0)+Math.imul(C,$)|0,o=o+Math.imul(C,V)|0,r=r+Math.imul(k,K)|0,i=(i=i+Math.imul(k,G)|0)+Math.imul(_,K)|0,o=o+Math.imul(_,G)|0,r=r+Math.imul(b,Y)|0,i=(i=i+Math.imul(b,tt)|0)+Math.imul(M,Y)|0,o=o+Math.imul(M,tt)|0,r=r+Math.imul(g,et)|0,i=(i=i+Math.imul(g,rt)|0)+Math.imul(y,et)|0,o=o+Math.imul(y,rt)|0,r=r+Math.imul(p,ot)|0,i=(i=i+Math.imul(p,st)|0)+Math.imul(m,ot)|0,o=o+Math.imul(m,st)|0;var kt=(h+(r=r+Math.imul(c,at)|0)|0)+((8191&(i=(i=i+Math.imul(c,ht)|0)+Math.imul(f,at)|0))<<13)|0;h=((o=o+Math.imul(f,ht)|0)+(i>>>13)|0)+(kt>>>26)|0,kt&=67108863,r=Math.imul(R,U),i=(i=Math.imul(R,z))+Math.imul(P,U)|0,o=Math.imul(P,z),r=r+Math.imul(I,D)|0,i=(i=i+Math.imul(I,Z)|0)+Math.imul(B,D)|0,o=o+Math.imul(B,Z)|0,r=r+Math.imul(T,$)|0,i=(i=i+Math.imul(T,V)|0)+Math.imul(E,$)|0,o=o+Math.imul(E,V)|0,r=r+Math.imul(A,K)|0,i=(i=i+Math.imul(A,G)|0)+Math.imul(C,K)|0,o=o+Math.imul(C,G)|0,r=r+Math.imul(k,Y)|0,i=(i=i+Math.imul(k,tt)|0)+Math.imul(_,Y)|0,o=o+Math.imul(_,tt)|0,r=r+Math.imul(b,et)|0,i=(i=i+Math.imul(b,rt)|0)+Math.imul(M,et)|0,o=o+Math.imul(M,rt)|0,r=r+Math.imul(g,ot)|0,i=(i=i+Math.imul(g,st)|0)+Math.imul(y,ot)|0,o=o+Math.imul(y,st)|0,r=r+Math.imul(p,at)|0,i=(i=i+Math.imul(p,ht)|0)+Math.imul(m,at)|0,o=o+Math.imul(m,ht)|0;var _t=(h+(r=r+Math.imul(c,ct)|0)|0)+((8191&(i=(i=i+Math.imul(c,ft)|0)+Math.imul(f,ct)|0))<<13)|0;h=((o=o+Math.imul(f,ft)|0)+(i>>>13)|0)+(_t>>>26)|0,_t&=67108863,r=Math.imul(F,U),i=(i=Math.imul(F,z))+Math.imul(H,U)|0,o=Math.imul(H,z),r=r+Math.imul(R,D)|0,i=(i=i+Math.imul(R,Z)|0)+Math.imul(P,D)|0,o=o+Math.imul(P,Z)|0,r=r+Math.imul(I,$)|0,i=(i=i+Math.imul(I,V)|0)+Math.imul(B,$)|0,o=o+Math.imul(B,V)|0,r=r+Math.imul(T,K)|0,i=(i=i+Math.imul(T,G)|0)+Math.imul(E,K)|0,o=o+Math.imul(E,G)|0,r=r+Math.imul(A,Y)|0,i=(i=i+Math.imul(A,tt)|0)+Math.imul(C,Y)|0,o=o+Math.imul(C,tt)|0,r=r+Math.imul(k,et)|0,i=(i=i+Math.imul(k,rt)|0)+Math.imul(_,et)|0,o=o+Math.imul(_,rt)|0,r=r+Math.imul(b,ot)|0,i=(i=i+Math.imul(b,st)|0)+Math.imul(M,ot)|0,o=o+Math.imul(M,st)|0,r=r+Math.imul(g,at)|0,i=(i=i+Math.imul(g,ht)|0)+Math.imul(y,at)|0,o=o+Math.imul(y,ht)|0,r=r+Math.imul(p,ct)|0,i=(i=i+Math.imul(p,ft)|0)+Math.imul(m,ct)|0,o=o+Math.imul(m,ft)|0;var St=(h+(r=r+Math.imul(c,pt)|0)|0)+((8191&(i=(i=i+Math.imul(c,mt)|0)+Math.imul(f,pt)|0))<<13)|0;h=((o=o+Math.imul(f,mt)|0)+(i>>>13)|0)+(St>>>26)|0,St&=67108863,r=Math.imul(F,D),i=(i=Math.imul(F,Z))+Math.imul(H,D)|0,o=Math.imul(H,Z),r=r+Math.imul(R,$)|0,i=(i=i+Math.imul(R,V)|0)+Math.imul(P,$)|0,o=o+Math.imul(P,V)|0,r=r+Math.imul(I,K)|0,i=(i=i+Math.imul(I,G)|0)+Math.imul(B,K)|0,o=o+Math.imul(B,G)|0,r=r+Math.imul(T,Y)|0,i=(i=i+Math.imul(T,tt)|0)+Math.imul(E,Y)|0,o=o+Math.imul(E,tt)|0,r=r+Math.imul(A,et)|0,i=(i=i+Math.imul(A,rt)|0)+Math.imul(C,et)|0,o=o+Math.imul(C,rt)|0,r=r+Math.imul(k,ot)|0,i=(i=i+Math.imul(k,st)|0)+Math.imul(_,ot)|0,o=o+Math.imul(_,st)|0,r=r+Math.imul(b,at)|0,i=(i=i+Math.imul(b,ht)|0)+Math.imul(M,at)|0,o=o+Math.imul(M,ht)|0,r=r+Math.imul(g,ct)|0,i=(i=i+Math.imul(g,ft)|0)+Math.imul(y,ct)|0,o=o+Math.imul(y,ft)|0;var At=(h+(r=r+Math.imul(p,pt)|0)|0)+((8191&(i=(i=i+Math.imul(p,mt)|0)+Math.imul(m,pt)|0))<<13)|0;h=((o=o+Math.imul(m,mt)|0)+(i>>>13)|0)+(At>>>26)|0,At&=67108863,r=Math.imul(F,$),i=(i=Math.imul(F,V))+Math.imul(H,$)|0,o=Math.imul(H,V),r=r+Math.imul(R,K)|0,i=(i=i+Math.imul(R,G)|0)+Math.imul(P,K)|0,o=o+Math.imul(P,G)|0,r=r+Math.imul(I,Y)|0,i=(i=i+Math.imul(I,tt)|0)+Math.imul(B,Y)|0,o=o+Math.imul(B,tt)|0,r=r+Math.imul(T,et)|0,i=(i=i+Math.imul(T,rt)|0)+Math.imul(E,et)|0,o=o+Math.imul(E,rt)|0,r=r+Math.imul(A,ot)|0,i=(i=i+Math.imul(A,st)|0)+Math.imul(C,ot)|0,o=o+Math.imul(C,st)|0,r=r+Math.imul(k,at)|0,i=(i=i+Math.imul(k,ht)|0)+Math.imul(_,at)|0,o=o+Math.imul(_,ht)|0,r=r+Math.imul(b,ct)|0,i=(i=i+Math.imul(b,ft)|0)+Math.imul(M,ct)|0,o=o+Math.imul(M,ft)|0;var Ct=(h+(r=r+Math.imul(g,pt)|0)|0)+((8191&(i=(i=i+Math.imul(g,mt)|0)+Math.imul(y,pt)|0))<<13)|0;h=((o=o+Math.imul(y,mt)|0)+(i>>>13)|0)+(Ct>>>26)|0,Ct&=67108863,r=Math.imul(F,K),i=(i=Math.imul(F,G))+Math.imul(H,K)|0,o=Math.imul(H,G),r=r+Math.imul(R,Y)|0,i=(i=i+Math.imul(R,tt)|0)+Math.imul(P,Y)|0,o=o+Math.imul(P,tt)|0,r=r+Math.imul(I,et)|0,i=(i=i+Math.imul(I,rt)|0)+Math.imul(B,et)|0,o=o+Math.imul(B,rt)|0,r=r+Math.imul(T,ot)|0,i=(i=i+Math.imul(T,st)|0)+Math.imul(E,ot)|0,o=o+Math.imul(E,st)|0,r=r+Math.imul(A,at)|0,i=(i=i+Math.imul(A,ht)|0)+Math.imul(C,at)|0,o=o+Math.imul(C,ht)|0,r=r+Math.imul(k,ct)|0,i=(i=i+Math.imul(k,ft)|0)+Math.imul(_,ct)|0,o=o+Math.imul(_,ft)|0;var Nt=(h+(r=r+Math.imul(b,pt)|0)|0)+((8191&(i=(i=i+Math.imul(b,mt)|0)+Math.imul(M,pt)|0))<<13)|0;h=((o=o+Math.imul(M,mt)|0)+(i>>>13)|0)+(Nt>>>26)|0,Nt&=67108863,r=Math.imul(F,Y),i=(i=Math.imul(F,tt))+Math.imul(H,Y)|0,o=Math.imul(H,tt),r=r+Math.imul(R,et)|0,i=(i=i+Math.imul(R,rt)|0)+Math.imul(P,et)|0,o=o+Math.imul(P,rt)|0,r=r+Math.imul(I,ot)|0,i=(i=i+Math.imul(I,st)|0)+Math.imul(B,ot)|0,o=o+Math.imul(B,st)|0,r=r+Math.imul(T,at)|0,i=(i=i+Math.imul(T,ht)|0)+Math.imul(E,at)|0,o=o+Math.imul(E,ht)|0,r=r+Math.imul(A,ct)|0,i=(i=i+Math.imul(A,ft)|0)+Math.imul(C,ct)|0,o=o+Math.imul(C,ft)|0;var Tt=(h+(r=r+Math.imul(k,pt)|0)|0)+((8191&(i=(i=i+Math.imul(k,mt)|0)+Math.imul(_,pt)|0))<<13)|0;h=((o=o+Math.imul(_,mt)|0)+(i>>>13)|0)+(Tt>>>26)|0,Tt&=67108863,r=Math.imul(F,et),i=(i=Math.imul(F,rt))+Math.imul(H,et)|0,o=Math.imul(H,rt),r=r+Math.imul(R,ot)|0,i=(i=i+Math.imul(R,st)|0)+Math.imul(P,ot)|0,o=o+Math.imul(P,st)|0,r=r+Math.imul(I,at)|0,i=(i=i+Math.imul(I,ht)|0)+Math.imul(B,at)|0,o=o+Math.imul(B,ht)|0,r=r+Math.imul(T,ct)|0,i=(i=i+Math.imul(T,ft)|0)+Math.imul(E,ct)|0,o=o+Math.imul(E,ft)|0;var Et=(h+(r=r+Math.imul(A,pt)|0)|0)+((8191&(i=(i=i+Math.imul(A,mt)|0)+Math.imul(C,pt)|0))<<13)|0;h=((o=o+Math.imul(C,mt)|0)+(i>>>13)|0)+(Et>>>26)|0,Et&=67108863,r=Math.imul(F,ot),i=(i=Math.imul(F,st))+Math.imul(H,ot)|0,o=Math.imul(H,st),r=r+Math.imul(R,at)|0,i=(i=i+Math.imul(R,ht)|0)+Math.imul(P,at)|0,o=o+Math.imul(P,ht)|0,r=r+Math.imul(I,ct)|0,i=(i=i+Math.imul(I,ft)|0)+Math.imul(B,ct)|0,o=o+Math.imul(B,ft)|0;var jt=(h+(r=r+Math.imul(T,pt)|0)|0)+((8191&(i=(i=i+Math.imul(T,mt)|0)+Math.imul(E,pt)|0))<<13)|0;h=((o=o+Math.imul(E,mt)|0)+(i>>>13)|0)+(jt>>>26)|0,jt&=67108863,r=Math.imul(F,at),i=(i=Math.imul(F,ht))+Math.imul(H,at)|0,o=Math.imul(H,ht),r=r+Math.imul(R,ct)|0,i=(i=i+Math.imul(R,ft)|0)+Math.imul(P,ct)|0,o=o+Math.imul(P,ft)|0;var It=(h+(r=r+Math.imul(I,pt)|0)|0)+((8191&(i=(i=i+Math.imul(I,mt)|0)+Math.imul(B,pt)|0))<<13)|0;h=((o=o+Math.imul(B,mt)|0)+(i>>>13)|0)+(It>>>26)|0,It&=67108863,r=Math.imul(F,ct),i=(i=Math.imul(F,ft))+Math.imul(H,ct)|0,o=Math.imul(H,ft);var Bt=(h+(r=r+Math.imul(R,pt)|0)|0)+((8191&(i=(i=i+Math.imul(R,mt)|0)+Math.imul(P,pt)|0))<<13)|0;h=((o=o+Math.imul(P,mt)|0)+(i>>>13)|0)+(Bt>>>26)|0,Bt&=67108863;var Ot=(h+(r=Math.imul(F,pt))|0)+((8191&(i=(i=Math.imul(F,mt))+Math.imul(H,pt)|0))<<13)|0;return h=((o=Math.imul(H,mt))+(i>>>13)|0)+(Ot>>>26)|0,Ot&=67108863,a[0]=vt,a[1]=gt,a[2]=yt,a[3]=wt,a[4]=bt,a[5]=Mt,a[6]=xt,a[7]=kt,a[8]=_t,a[9]=St,a[10]=At,a[11]=Ct,a[12]=Nt,a[13]=Tt,a[14]=Et,a[15]=jt,a[16]=It,a[17]=Bt,a[18]=Ot,0!==h&&(a[19]=h,e.length++),e};Math.imul||(_=h),o.prototype.mulTo=function(t,n){var e=this.length+t.length;return 10===this.length&&10===t.length?_(this,t,n):e<63?h(this,t,n):e<1024?l(this,t,n):c(this,t,n)},f.prototype.makeRBT=function(t){for(var n=new Array(t),e=o.prototype._countBits(t)-1,r=0;r<t;r++)n[r]=this.revBin(r,e,t);return n},f.prototype.revBin=function(t,n,e){if(0===t||t===e-1)return t;for(var r=0,i=0;i<n;i++)r|=(1&t)<<n-i-1,t>>=1;return r},f.prototype.permute=function(t,n,e,r,i,o){for(var s=0;s<o;s++)r[s]=n[t[s]],i[s]=e[t[s]]},f.prototype.transform=function(t,n,e,r,i,o){this.permute(o,t,n,e,r,i);for(var s=1;s<i;s<<=1)for(var u=s<<1,a=Math.cos(2*Math.PI/u),h=Math.sin(2*Math.PI/u),l=0;l<i;l+=u)for(var c=a,f=h,d=0;d<s;d++){var p=e[l+d],m=r[l+d],v=e[l+d+s],g=r[l+d+s],y=c*v-f*g;g=c*g+f*v,v=y,e[l+d]=p+v,r[l+d]=m+g,e[l+d+s]=p-v,r[l+d+s]=m-g,d!==u&&(y=a*c-h*f,f=a*f+h*c,c=y)}},f.prototype.guessLen13b=function(t,n){var e=1|Math.max(n,t),r=1&e,i=0;for(e=e/2|0;e;e>>>=1)i++;return 1<<i+1+r},f.prototype.conjugate=function(t,n,e){if(!(e<=1))for(var r=0;r<e/2;r++){var i=t[r];t[r]=t[e-r-1],t[e-r-1]=i,i=n[r],n[r]=-n[e-r-1],n[e-r-1]=-i}},f.prototype.normalize13b=function(t,n){for(var e=0,r=0;r<n/2;r++){var i=8192*Math.round(t[2*r+1]/n)+Math.round(t[2*r]/n)+e;t[r]=67108863&i,e=i<67108864?0:i/67108864|0}return t},f.prototype.convert13b=function(t,n,e,i){for(var o=0,s=0;s<n;s++)o+=0|t[s],e[2*s]=8191&o,o>>>=13,e[2*s+1]=8191&o,o>>>=13;for(s=2*n;s<i;++s)e[s]=0;r(0===o),r(0==(-8192&o))},f.prototype.stub=function(t){for(var n=new Array(t),e=0;e<t;e++)n[e]=0;return n},f.prototype.mulp=function(t,n,e){var r=2*this.guessLen13b(t.length,n.length),i=this.makeRBT(r),o=this.stub(r),s=new Array(r),u=new Array(r),a=new Array(r),h=new Array(r),l=new Array(r),c=new Array(r),f=e.words;f.length=r,this.convert13b(t.words,t.length,s,r),this.convert13b(n.words,n.length,h,r),this.transform(s,o,u,a,r,i),this.transform(h,o,l,c,r,i);for(var d=0;d<r;d++){var p=u[d]*l[d]-a[d]*c[d];a[d]=u[d]*c[d]+a[d]*l[d],u[d]=p}return this.conjugate(u,a,r),this.transform(u,a,f,o,r,i),this.conjugate(f,o,r),this.normalize13b(f,r),e.negative=t.negative^n.negative,e.length=t.length+n.length,e.strip()},o.prototype.mul=function(t){var n=new o(null);return n.words=new Array(this.length+t.length),this.mulTo(t,n)},o.prototype.mulf=function(t){var n=new o(null);return n.words=new Array(this.length+t.length),c(this,t,n)},o.prototype.imul=function(t){return this.clone().mulTo(t,this)},o.prototype.imuln=function(t){r("number"==typeof t),r(t<67108864);for(var n=0,e=0;e<this.length;e++){var i=(0|this.words[e])*t,o=(67108863&i)+(67108863&n);n>>=26,n+=i/67108864|0,n+=o>>>26,this.words[e]=67108863&o}return 0!==n&&(this.words[e]=n,this.length++),this},o.prototype.muln=function(t){return this.clone().imuln(t)},o.prototype.sqr=function(){return this.mul(this)},o.prototype.isqr=function(){return this.imul(this.clone())},o.prototype.pow=function(t){var n=a(t);if(0===n.length)return new o(1);for(var e=this,r=0;r<n.length&&0===n[r];r++,e=e.sqr());if(++r<n.length)for(var i=e.sqr();r<n.length;r++,i=i.sqr())0!==n[r]&&(e=e.mul(i));return e},o.prototype.iushln=function(t){r("number"==typeof t&&t>=0);var n,e=t%26,i=(t-e)/26,o=67108863>>>26-e<<26-e;if(0!==e){var s=0;for(n=0;n<this.length;n++){var u=this.words[n]&o,a=(0|this.words[n])-u<<e;this.words[n]=a|s,s=u>>>26-e}s&&(this.words[n]=s,this.length++)}if(0!==i){for(n=this.length-1;n>=0;n--)this.words[n+i]=this.words[n];for(n=0;n<i;n++)this.words[n]=0;this.length+=i}return this.strip()},o.prototype.ishln=function(t){return r(0===this.negative),this.iushln(t)},o.prototype.iushrn=function(t,n,e){r("number"==typeof t&&t>=0);var i;i=n?(n-n%26)/26:0;var o=t%26,s=Math.min((t-o)/26,this.length),u=67108863^67108863>>>o<<o,a=e;if(i-=s,i=Math.max(0,i),a){for(var h=0;h<s;h++)a.words[h]=this.words[h];a.length=s}if(0===s);else if(this.length>s)for(this.length-=s,h=0;h<this.length;h++)this.words[h]=this.words[h+s];else this.words[0]=0,this.length=1;var l=0;for(h=this.length-1;h>=0&&(0!==l||h>=i);h--){var c=0|this.words[h];this.words[h]=l<<26-o|c>>>o,l=c&u}return a&&0!==l&&(a.words[a.length++]=l),0===this.length&&(this.words[0]=0,this.length=1),this.strip()},o.prototype.ishrn=function(t,n,e){return r(0===this.negative),this.iushrn(t,n,e)},o.prototype.shln=function(t){return this.clone().ishln(t)},o.prototype.ushln=function(t){return this.clone().iushln(t)},o.prototype.shrn=function(t){return this.clone().ishrn(t)},o.prototype.ushrn=function(t){return this.clone().iushrn(t)},o.prototype.testn=function(t){r("number"==typeof t&&t>=0);var n=t%26,e=(t-n)/26,i=1<<n;return!(this.length<=e)&&!!(this.words[e]&i)},o.prototype.imaskn=function(t){r("number"==typeof t&&t>=0);var n=t%26,e=(t-n)/26;if(r(0===this.negative,"imaskn works only with positive numbers"),this.length<=e)return this;if(0!==n&&e++,this.length=Math.min(e,this.length),0!==n){var i=67108863^67108863>>>n<<n;this.words[this.length-1]&=i}return this.strip()},o.prototype.maskn=function(t){return this.clone().imaskn(t)},o.prototype.iaddn=function(t){return r("number"==typeof t),r(t<67108864),t<0?this.isubn(-t):0!==this.negative?1===this.length&&(0|this.words[0])<t?(this.words[0]=t-(0|this.words[0]),this.negative=0,this):(this.negative=0,this.isubn(t),this.negative=1,this):this._iaddn(t)},o.prototype._iaddn=function(t){this.words[0]+=t;for(var n=0;n<this.length&&this.words[n]>=67108864;n++)this.words[n]-=67108864,n===this.length-1?this.words[n+1]=1:this.words[n+1]++;return this.length=Math.max(this.length,n+1),this},o.prototype.isubn=function(t){if(r("number"==typeof t),r(t<67108864),t<0)return this.iaddn(-t);if(0!==this.negative)return this.negative=0,this.iaddn(t),this.negative=1,this;if(this.words[0]-=t,1===this.length&&this.words[0]<0)this.words[0]=-this.words[0],this.negative=1;else for(var n=0;n<this.length&&this.words[n]<0;n++)this.words[n]+=67108864,this.words[n+1]-=1;return this.strip()},o.prototype.addn=function(t){return this.clone().iaddn(t)},o.prototype.subn=function(t){return this.clone().isubn(t)},o.prototype.iabs=function(){return this.negative=0,this},o.prototype.abs=function(){return this.clone().iabs()},o.prototype._ishlnsubmul=function(t,n,e){var i,o=t.length+e;this._expand(o);var s,u=0;for(i=0;i<t.length;i++){s=(0|this.words[i+e])+u;var a=(0|t.words[i])*n;u=((s-=67108863&a)>>26)-(a/67108864|0),this.words[i+e]=67108863&s}for(;i<this.length-e;i++)u=(s=(0|this.words[i+e])+u)>>26,this.words[i+e]=67108863&s;if(0===u)return this.strip();for(r(-1===u),u=0,i=0;i<this.length;i++)u=(s=-(0|this.words[i])+u)>>26,this.words[i]=67108863&s;return this.negative=1,this.strip()},o.prototype._wordDiv=function(t,n){var e=this.length-t.length,r=this.clone(),i=t,s=0|i.words[i.length-1];0!==(e=26-this._countBits(s))&&(i=i.ushln(e),r.iushln(e),s=0|i.words[i.length-1]);var u,a=r.length-i.length;if("mod"!==n){(u=new o(null)).length=a+1,u.words=new Array(u.length);for(var h=0;h<u.length;h++)u.words[h]=0}var l=r.clone()._ishlnsubmul(i,1,a);0===l.negative&&(r=l,u&&(u.words[a]=1));for(var c=a-1;c>=0;c--){var f=67108864*(0|r.words[i.length+c])+(0|r.words[i.length+c-1]);for(f=Math.min(f/s|0,67108863),r._ishlnsubmul(i,f,c);0!==r.negative;)f--,r.negative=0,r._ishlnsubmul(i,1,c),r.isZero()||(r.negative^=1);u&&(u.words[c]=f)}return u&&u.strip(),r.strip(),"div"!==n&&0!==e&&r.iushrn(e),{div:u||null,mod:r}},o.prototype.divmod=function(t,n,e){if(r(!t.isZero()),this.isZero())return{div:new o(0),mod:new o(0)};var i,s,u;return 0!==this.negative&&0===t.negative?(u=this.neg().divmod(t,n),"mod"!==n&&(i=u.div.neg()),"div"!==n&&(s=u.mod.neg(),e&&0!==s.negative&&s.iadd(t)),{div:i,mod:s}):0===this.negative&&0!==t.negative?(u=this.divmod(t.neg(),n),"mod"!==n&&(i=u.div.neg()),{div:i,mod:u.mod}):0!=(this.negative&t.negative)?(u=this.neg().divmod(t.neg(),n),"div"!==n&&(s=u.mod.neg(),e&&0!==s.negative&&s.isub(t)),{div:u.div,mod:s}):t.length>this.length||this.cmp(t)<0?{div:new o(0),mod:this}:1===t.length?"div"===n?{div:this.divn(t.words[0]),mod:null}:"mod"===n?{div:null,mod:new o(this.modn(t.words[0]))}:{div:this.divn(t.words[0]),mod:new o(this.modn(t.words[0]))}:this._wordDiv(t,n)},o.prototype.div=function(t){return this.divmod(t,"div",!1).div},o.prototype.mod=function(t){return this.divmod(t,"mod",!1).mod},o.prototype.umod=function(t){return this.divmod(t,"mod",!0).mod},o.prototype.divRound=function(t){var n=this.divmod(t);if(n.mod.isZero())return n.div;var e=0!==n.div.negative?n.mod.isub(t):n.mod,r=t.ushrn(1),i=t.andln(1),o=e.cmp(r);return o<0||1===i&&0===o?n.div:0!==n.div.negative?n.div.isubn(1):n.div.iaddn(1)},o.prototype.modn=function(t){r(t<=67108863);for(var n=(1<<26)%t,e=0,i=this.length-1;i>=0;i--)e=(n*e+(0|this.words[i]))%t;return e},o.prototype.idivn=function(t){r(t<=67108863);for(var n=0,e=this.length-1;e>=0;e--){var i=(0|this.words[e])+67108864*n;this.words[e]=i/t|0,n=i%t}return this.strip()},o.prototype.divn=function(t){return this.clone().idivn(t)},o.prototype.egcd=function(t){r(0===t.negative),r(!t.isZero());var n=this,e=t.clone();n=0!==n.negative?n.umod(t):n.clone();for(var i=new o(1),s=new o(0),u=new o(0),a=new o(1),h=0;n.isEven()&&e.isEven();)n.iushrn(1),e.iushrn(1),++h;for(var l=e.clone(),c=n.clone();!n.isZero();){for(var f=0,d=1;0==(n.words[0]&d)&&f<26;++f,d<<=1);if(f>0)for(n.iushrn(f);f-- >0;)(i.isOdd()||s.isOdd())&&(i.iadd(l),s.isub(c)),i.iushrn(1),s.iushrn(1);for(var p=0,m=1;0==(e.words[0]&m)&&p<26;++p,m<<=1);if(p>0)for(e.iushrn(p);p-- >0;)(u.isOdd()||a.isOdd())&&(u.iadd(l),a.isub(c)),u.iushrn(1),a.iushrn(1);n.cmp(e)>=0?(n.isub(e),i.isub(u),s.isub(a)):(e.isub(n),u.isub(i),a.isub(s))}return{a:u,b:a,gcd:e.iushln(h)}},o.prototype._invmp=function(t){r(0===t.negative),r(!t.isZero());var n=this,e=t.clone();n=0!==n.negative?n.umod(t):n.clone();for(var i=new o(1),s=new o(0),u=e.clone();n.cmpn(1)>0&&e.cmpn(1)>0;){for(var a=0,h=1;0==(n.words[0]&h)&&a<26;++a,h<<=1);if(a>0)for(n.iushrn(a);a-- >0;)i.isOdd()&&i.iadd(u),i.iushrn(1);for(var l=0,c=1;0==(e.words[0]&c)&&l<26;++l,c<<=1);if(l>0)for(e.iushrn(l);l-- >0;)s.isOdd()&&s.iadd(u),s.iushrn(1);n.cmp(e)>=0?(n.isub(e),i.isub(s)):(e.isub(n),s.isub(i))}var f;return(f=0===n.cmpn(1)?i:s).cmpn(0)<0&&f.iadd(t),f},o.prototype.gcd=function(t){if(this.isZero())return t.abs();if(t.isZero())return this.abs();var n=this.clone(),e=t.clone();n.negative=0,e.negative=0;for(var r=0;n.isEven()&&e.isEven();r++)n.iushrn(1),e.iushrn(1);for(;;){for(;n.isEven();)n.iushrn(1);for(;e.isEven();)e.iushrn(1);var i=n.cmp(e);if(i<0){var o=n;n=e,e=o}else if(0===i||0===e.cmpn(1))break;n.isub(e)}return e.iushln(r)},o.prototype.invm=function(t){return this.egcd(t).a.umod(t)},o.prototype.isEven=function(){return 0==(1&this.words[0])},o.prototype.isOdd=function(){return 1==(1&this.words[0])},o.prototype.andln=function(t){return this.words[0]&t},o.prototype.bincn=function(t){r("number"==typeof t);var n=t%26,e=(t-n)/26,i=1<<n;if(this.length<=e)return this._expand(e+1),this.words[e]|=i,this;for(var o=i,s=e;0!==o&&s<this.length;s++){var u=0|this.words[s];o=(u+=o)>>>26,u&=67108863,this.words[s]=u}return 0!==o&&(this.words[s]=o,this.length++),this},o.prototype.isZero=function(){return 1===this.length&&0===this.words[0]},o.prototype.cmpn=function(t){var n=t<0;if(0!==this.negative&&!n)return-1;if(0===this.negative&&n)return 1;this.strip();var e;if(this.length>1)e=1;else{n&&(t=-t),r(t<=67108863,"Number is too big");var i=0|this.words[0];e=i===t?0:i<t?-1:1}return 0!==this.negative?0|-e:e},o.prototype.cmp=function(t){if(0!==this.negative&&0===t.negative)return-1;if(0===this.negative&&0!==t.negative)return 1;var n=this.ucmp(t);return 0!==this.negative?0|-n:n},o.prototype.ucmp=function(t){if(this.length>t.length)return 1;if(this.length<t.length)return-1;for(var n=0,e=this.length-1;e>=0;e--){var r=0|this.words[e],i=0|t.words[e];if(r!==i){r<i?n=-1:r>i&&(n=1);break}}return n},o.prototype.gtn=function(t){return 1===this.cmpn(t)},o.prototype.gt=function(t){return 1===this.cmp(t)},o.prototype.gten=function(t){return this.cmpn(t)>=0},o.prototype.gte=function(t){return this.cmp(t)>=0},o.prototype.ltn=function(t){return-1===this.cmpn(t)},o.prototype.lt=function(t){return-1===this.cmp(t)},o.prototype.lten=function(t){return this.cmpn(t)<=0},o.prototype.lte=function(t){return this.cmp(t)<=0},o.prototype.eqn=function(t){return 0===this.cmpn(t)},o.prototype.eq=function(t){return 0===this.cmp(t)},o.red=function(t){return new y(t)},o.prototype.toRed=function(t){return r(!this.red,"Already a number in reduction context"),r(0===this.negative,"red works only with positives"),t.convertTo(this)._forceRed(t)},o.prototype.fromRed=function(){return r(this.red,"fromRed works only with numbers in reduction context"),this.red.convertFrom(this)},o.prototype._forceRed=function(t){return this.red=t,this},o.prototype.forceRed=function(t){return r(!this.red,"Already a number in reduction context"),this._forceRed(t)},o.prototype.redAdd=function(t){return r(this.red,"redAdd works only with red numbers"),this.red.add(this,t)},o.prototype.redIAdd=function(t){return r(this.red,"redIAdd works only with red numbers"),this.red.iadd(this,t)},o.prototype.redSub=function(t){return r(this.red,"redSub works only with red numbers"),this.red.sub(this,t)},o.prototype.redISub=function(t){return r(this.red,"redISub works only with red numbers"),this.red.isub(this,t)},o.prototype.redShl=function(t){return r(this.red,"redShl works only with red numbers"),this.red.shl(this,t)},o.prototype.redMul=function(t){return r(this.red,"redMul works only with red numbers"),this.red._verify2(this,t),this.red.mul(this,t)},o.prototype.redIMul=function(t){return r(this.red,"redMul works only with red numbers"),this.red._verify2(this,t),this.red.imul(this,t)},o.prototype.redSqr=function(){return r(this.red,"redSqr works only with red numbers"),this.red._verify1(this),this.red.sqr(this)},o.prototype.redISqr=function(){return r(this.red,"redISqr works only with red numbers"),this.red._verify1(this),this.red.isqr(this)},o.prototype.redSqrt=function(){return r(this.red,"redSqrt works only with red numbers"),this.red._verify1(this),this.red.sqrt(this)},o.prototype.redInvm=function(){return r(this.red,"redInvm works only with red numbers"),this.red._verify1(this),this.red.invm(this)},o.prototype.redNeg=function(){return r(this.red,"redNeg works only with red numbers"),this.red._verify1(this),this.red.neg(this)},o.prototype.redPow=function(t){return r(this.red&&!t.red,"redPow(normalNum)"),this.red._verify1(this),this.red.pow(this,t)};var S={k256:null,p224:null,p192:null,p25519:null};d.prototype._tmp=function(){var t=new o(null);return t.words=new Array(Math.ceil(this.n/13)),t},d.prototype.ireduce=function(t){var n,e=t;do{this.split(e,this.tmp),n=(e=(e=this.imulK(e)).iadd(this.tmp)).bitLength()}while(n>this.n);var r=n<this.n?-1:e.ucmp(this.p);return 0===r?(e.words[0]=0,e.length=1):r>0?e.isub(this.p):e.strip(),e},d.prototype.split=function(t,n){t.iushrn(this.n,0,n)},d.prototype.imulK=function(t){return t.imul(this.k)},i(p,d),p.prototype.split=function(t,n){for(var e=Math.min(t.length,9),r=0;r<e;r++)n.words[r]=t.words[r];if(n.length=e,t.length<=9)return t.words[0]=0,void(t.length=1);var i=t.words[9];for(n.words[n.length++]=4194303&i,r=10;r<t.length;r++){var o=0|t.words[r];t.words[r-10]=(4194303&o)<<4|i>>>22,i=o}i>>>=22,t.words[r-10]=i,0===i&&t.length>10?t.length-=10:t.length-=9},p.prototype.imulK=function(t){t.words[t.length]=0,t.words[t.length+1]=0,t.length+=2;for(var n=0,e=0;e<t.length;e++){var r=0|t.words[e];n+=977*r,t.words[e]=67108863&n,n=64*r+(n/67108864|0)}return 0===t.words[t.length-1]&&(t.length--,0===t.words[t.length-1]&&t.length--),t},i(m,d),i(v,d),i(g,d),g.prototype.imulK=function(t){for(var n=0,e=0;e<t.length;e++){var r=19*(0|t.words[e])+n,i=67108863&r;r>>>=26,t.words[e]=i,n=r}return 0!==n&&(t.words[t.length++]=n),t},o._prime=function(t){if(S[t])return S[t];var n;if("k256"===t)n=new p;else if("p224"===t)n=new m;else if("p192"===t)n=new v;else{if("p25519"!==t)throw new Error("Unknown prime "+t);n=new g}return S[t]=n,n},y.prototype._verify1=function(t){r(0===t.negative,"red works only with positives"),r(t.red,"red works only with red numbers")},y.prototype._verify2=function(t,n){r(0==(t.negative|n.negative),"red works only with positives"),r(t.red&&t.red===n.red,"red works only with red numbers")},y.prototype.imod=function(t){return this.prime?this.prime.ireduce(t)._forceRed(this):t.umod(this.m)._forceRed(this)},y.prototype.neg=function(t){return t.isZero()?t.clone():this.m.sub(t)._forceRed(this)},y.prototype.add=function(t,n){this._verify2(t,n);var e=t.add(n);return e.cmp(this.m)>=0&&e.isub(this.m),e._forceRed(this)},y.prototype.iadd=function(t,n){this._verify2(t,n);var e=t.iadd(n);return e.cmp(this.m)>=0&&e.isub(this.m),e},y.prototype.sub=function(t,n){this._verify2(t,n);var e=t.sub(n);return e.cmpn(0)<0&&e.iadd(this.m),e._forceRed(this)},y.prototype.isub=function(t,n){this._verify2(t,n);var e=t.isub(n);return e.cmpn(0)<0&&e.iadd(this.m),e},y.prototype.shl=function(t,n){return this._verify1(t),this.imod(t.ushln(n))},y.prototype.imul=function(t,n){return this._verify2(t,n),this.imod(t.imul(n))},y.prototype.mul=function(t,n){return this._verify2(t,n),this.imod(t.mul(n))},y.prototype.isqr=function(t){return this.imul(t,t.clone())},y.prototype.sqr=function(t){return this.mul(t,t)},y.prototype.sqrt=function(t){if(t.isZero())return t.clone();var n=this.m.andln(3);if(r(n%2==1),3===n){var e=this.m.add(new o(1)).iushrn(2);return this.pow(t,e)}for(var i=this.m.subn(1),s=0;!i.isZero()&&0===i.andln(1);)s++,i.iushrn(1);r(!i.isZero());var u=new o(1).toRed(this),a=u.redNeg(),h=this.m.subn(1).iushrn(1),l=this.m.bitLength();for(l=new o(2*l*l).toRed(this);0!==this.pow(l,h).cmp(a);)l.redIAdd(a);for(var c=this.pow(l,i),f=this.pow(t,i.addn(1).iushrn(1)),d=this.pow(t,i),p=s;0!==d.cmp(u);){for(var m=d,v=0;0!==m.cmp(u);v++)m=m.redSqr();r(v<p);var g=this.pow(c,new o(1).iushln(p-v-1));f=f.redMul(g),c=g.redSqr(),d=d.redMul(c),p=v}return f},y.prototype.invm=function(t){var n=t._invmp(this.m);return 0!==n.negative?(n.negative=0,this.imod(n).redNeg()):this.imod(n)},y.prototype.pow=function(t,n){if(n.isZero())return new o(1);if(0===n.cmpn(1))return t.clone();var e=new Array(16);e[0]=new o(1).toRed(this),e[1]=t;for(var r=2;r<e.length;r++)e[r]=this.mul(e[r-1],t);var i=e[0],s=0,u=0,a=n.bitLength()%26;for(0===a&&(a=26),r=n.length-1;r>=0;r--){for(var h=n.words[r],l=a-1;l>=0;l--){var c=h>>l&1;i!==e[0]&&(i=this.sqr(i)),0!==c||0!==s?(s<<=1,s|=c,(4===++u||0===r&&0===l)&&(i=this.mul(i,e[s]),u=0,s=0)):u=0}a=26}return i},y.prototype.convertTo=function(t){var n=t.umod(this.m);return n===t?n.clone():n},y.prototype.convertFrom=function(t){var n=t.clone();return n.red=null,n},o.mont=function(t){return new w(t)},i(w,y),w.prototype.convertTo=function(t){return this.imod(t.ushln(this.shift))},w.prototype.convertFrom=function(t){var n=this.imod(t.mul(this.rinv));return n.red=null,n},w.prototype.imul=function(t,n){if(t.isZero()||n.isZero())return t.words[0]=0,t.length=1,t;var e=t.imul(n),r=e.maskn(this.shift).mul(this.minv).imaskn(this.shift).mul(this.m),i=e.isub(r).iushrn(this.shift),o=i;return i.cmp(this.m)>=0?o=i.isub(this.m):i.cmpn(0)<0&&(o=i.iadd(this.m)),o._forceRed(this)},w.prototype.mul=function(t,n){if(t.isZero()||n.isZero())return new o(0)._forceRed(this);var e=t.mul(n),r=e.maskn(this.shift).mul(this.minv).imaskn(this.shift).mul(this.m),i=e.isub(r).iushrn(this.shift),s=i;return i.cmp(this.m)>=0?s=i.isub(this.m):i.cmpn(0)<0&&(s=i.iadd(this.m)),s._forceRed(this)},w.prototype.invm=function(t){return this.imod(t._invmp(this.m).mul(this.r2))._forceRed(this)}}(void 0===n||n,this)},{}],11:[function(t,n,e){var r=t("web3-utils"),i=t("bn.js"),o=function(t,n){for(var e=t;e.length<2*n;)e="0"+e;return e},s=function(t){var n="A".charCodeAt(0),e="Z".charCodeAt(0);return t=t.toUpperCase(),(t=t.substr(4)+t.substr(0,4)).split("").map(function(t){var r=t.charCodeAt(0);return r>=n&&r<=e?r-n+10:t}).join("")},u=function(t){for(var n,e=t;e.length>2;)n=e.slice(0,9),e=parseInt(n,10)%97+e.slice(n.length);return parseInt(e,10)%97},a=function(t){this._iban=t};a.toAddress=function(t){if(!(t=new a(t)).isDirect())throw new Error("IBAN is indirect and can't be converted");return t.toAddress()},a.toIban=function(t){return a.fromAddress(t).toString()},a.fromAddress=function(t){if(!r.isAddress(t))throw new Error("Provided address is not a valid address: "+t);t=t.replace("0x","").replace("0X","");var n=new i(t,16).toString(36),e=o(n,15);return a.fromBban(e.toUpperCase())},a.fromBban=function(t){var n=("0"+(98-u(s("XE00"+t)))).slice(-2);return new a("XE"+n+t)},a.createIndirect=function(t){return a.fromBban("ETH"+t.institution+t.identifier)},a.isValid=function(t){return new a(t).isValid()},a.prototype.isValid=function(){return/^XE[0-9]{2}(ETH[0-9A-Z]{13}|[0-9A-Z]{30,31})$/.test(this._iban)&&1===u(s(this._iban))},a.prototype.isDirect=function(){return 34===this._iban.length||35===this._iban.length},a.prototype.isIndirect=function(){return 20===this._iban.length},a.prototype.checksum=function(){return this._iban.substr(2,2)},a.prototype.institution=function(){return this.isIndirect()?this._iban.substr(7,4):""},a.prototype.client=function(){return this.isIndirect()?this._iban.substr(11):""},a.prototype.toAddress=function(){if(this.isDirect()){var t=this._iban.substr(4),n=new i(t,36);return r.toChecksumAddress(n.toString(16,20))}return""},a.prototype.toString=function(){return this._iban},n.exports=a},{"bn.js":10,"web3-utils":33}],12:[function(t,n,e){n.exports=XMLHttpRequest},{}],13:[function(t,n,e){var r=t("web3-core-helpers").errors,i=t("xhr2"),o=function(t,n){this.host=t||"http://localhost:8545",this.timeout=n||0,this.connected=!1};o.prototype.send=function(t,n){var e=this,o=new i;o.open("POST",this.host,!0),o.setRequestHeader("Content-Type","application/json"),o.onreadystatechange=function(){if(4===o.readyState&&1!==o.timeout){var t=o.responseText,i=null;try{t=JSON.parse(t)}catch(t){i=r.InvalidResponse(o.responseText)}e.connected=!0,n(i,t)}},o.ontimeout=function(){e.connected=!1,n(r.ConnectionTimeout(this.timeout))};try{o.send(JSON.stringify(t))}catch(t){this.connected=!1,n(r.InvalidConnection(this.host))}},n.exports=o},{"web3-core-helpers":5,xhr2:12}],14:[function(t,n,e){!function(t,r,i,o,s,u){function a(t,n){return function(){return t.call(this,n.apply(this,arguments))}}function h(t){return function(n){return n[t]}}function l(t,n){return n.apply(u,t)}function c(t){var n=t.length-1,e=i.prototype.slice;if(0==n)return function(){return t.call(this,e.call(arguments))};if(1==n)return function(){return t.call(this,arguments[0],e.call(arguments,1))};var r=i(t.length);return function(){for(var i=0;i<n;i++)r[i]=arguments[i];return r[n]=e.call(arguments,n),t.apply(this,r)}}function f(t){return function(n,e){return t(e,n)}}function d(t,n){return function(e){return t(e)&&n(e)}}function p(){}function m(){return!0}function v(t){return function(){return t}}function g(t,n){return n&&n.constructor===t}function y(t){return t!==u}function w(t,n){return n instanceof r&&A(function(t){return t in n},t)}function b(t,n){return[t,n]}function M(t){return N(t.reduce(f(b),G))}function x(t){return _(function(t,n){return t.unshift(n),t},[],t)}function k(t,n){return n?b(t(Q(n)),k(t,Y(n))):G}function _(t,n,e){return e?t(_(t,n,Y(e)),Q(e)):n}function S(t,n,e){function r(t,e){return t?n(Q(t))?(e(Q(t)),Y(t)):b(Q(t),r(Y(t),e)):G}return r(t,e||p)}function A(t,n){return!n||t(Q(n))&&A(t,Y(n))}function C(t,n){t&&(Q(t).apply(null,n),C(Y(t),n))}function N(t){function n(t,e){return t?n(Y(t),b(Q(t),e)):e}return n(t,G)}function T(t,n){return n&&(t(Q(n))?Q(n):T(t,Y(n)))}function E(t){function n(){var t=0;F!==u&&F.length>d&&(e("Max buffer length exceeded: textNode"),t=Math.max(t,F.length)),H.length>d&&(e("Max buffer length exceeded: numberNode"),t=Math.max(t,H.length)),L=d-t+V}function e(t){F!==u&&(l(F),c(),F=u),i=o(t+"\nLn: "+K+"\nCol: "+X+"\nChr: "+s),f(U(u,u,i))}function r(t){return"\r"==t||"\n"==t||" "==t||"\t"==t}var i,s,a,h=t(yt).emit,l=t(wt).emit,c=t(bt).emit,f=t(ct).emit,d=65536,p=/[\\"\n]/g,m=0,v=m++,g=m++,y=m++,w=m++,b=m++,M=m++,x=m++,k=m++,_=m++,S=m++,A=m++,C=m++,N=m++,T=m++,E=m++,j=m++,I=m++,B=m++,O=m++,R=m++,P=m,L=d,F=u,H="",q=!1,z=!1,W=v,D=[],Z=null,J=0,$=0,V=0,X=0,K=1;t(mt).on(function(t){if(!i){if(z)return e("Cannot write after close");var o=0;for(s=t[0];s&&(a=s,s=t[o++]);)switch(V++,"\n"==s?(K++,X=0):X++,W){case v:if("{"===s)W=y;else if("["===s)W=b;else if(!r(s))return e("Non-whitespace before {[.");continue;case k:case y:if(r(s))continue;if(W===k)D.push(_);else{if("}"===s){l({}),c(),W=D.pop()||g;continue}D.push(w)}if('"'!==s)return e('Malformed object key should start with " ');W=x;continue;case _:case w:if(r(s))continue;if(":"===s)W===w?(D.push(w),F!==u&&(l({}),h(F),F=u),$++):F!==u&&(h(F),F=u),W=g;else if("}"===s)F!==u&&(l(F),c(),F=u),c(),$--,W=D.pop()||g;else{if(","!==s)return e("Bad object");W===w&&D.push(w),F!==u&&(l(F),c(),F=u),W=k}continue;case b:case g:if(r(s))continue;if(W===b){if(l([]),$++,W=g,"]"===s){c(),$--,W=D.pop()||g;continue}D.push(M)}if('"'===s)W=x;else if("{"===s)W=y;else if("["===s)W=b;else if("t"===s)W=S;else if("f"===s)W=N;else if("n"===s)W=I;else if("-"===s)H+=s;else if("0"===s)H+=s,W=P;else{if(-1==="123456789".indexOf(s))return e("Bad value");H+=s,W=P}continue;case M:if(","===s)D.push(M),F!==u&&(l(F),c(),F=u),W=g;else{if("]"!==s){if(r(s))continue;return e("Bad array")}F!==u&&(l(F),c(),F=u),c(),$--,W=D.pop()||g}continue;case x:F===u&&(F="");var f=o-1;t:for(;;){for(;J>0;)if(Z+=s,s=t.charAt(o++),4===J?(F+=String.fromCharCode(parseInt(Z,16)),J=0,f=o-1):J++,!s)break t;if('"'===s&&!q){W=D.pop()||g,F+=t.substring(f,o-1);break}if(!("\\"!==s||q||(q=!0,F+=t.substring(f,o-1),s=t.charAt(o++))))break;if(q){if(q=!1,"n"===s?F+="\n":"r"===s?F+="\r":"t"===s?F+="\t":"f"===s?F+="\f":"b"===s?F+="\b":"u"===s?(J=1,Z=""):F+=s,s=t.charAt(o++),f=o-1,s)continue;break}p.lastIndex=o;var d=p.exec(t);if(!d){o=t.length+1,F+=t.substring(f,o-1);break}if(o=d.index+1,!(s=t.charAt(d.index))){F+=t.substring(f,o-1);break}}continue;case S:if(!s)continue;if("r"!==s)return e("Invalid true started with t"+s);W=A;continue;case A:if(!s)continue;if("u"!==s)return e("Invalid true started with tr"+s);W=C;continue;case C:if(!s)continue;if("e"!==s)return e("Invalid true started with tru"+s);l(!0),c(),W=D.pop()||g;continue;case N:if(!s)continue;if("a"!==s)return e("Invalid false started with f"+s);W=T;continue;case T:if(!s)continue;if("l"!==s)return e("Invalid false started with fa"+s);W=E;continue;case E:if(!s)continue;if("s"!==s)return e("Invalid false started with fal"+s);W=j;continue;case j:if(!s)continue;if("e"!==s)return e("Invalid false started with fals"+s);l(!1),c(),W=D.pop()||g;continue;case I:if(!s)continue;if("u"!==s)return e("Invalid null started with n"+s);W=B;continue;case B:if(!s)continue;if("l"!==s)return e("Invalid null started with nu"+s);W=O;continue;case O:if(!s)continue;if("l"!==s)return e("Invalid null started with nul"+s);l(null),c(),W=D.pop()||g;continue;case R:if("."!==s)return e("Leading zero not followed by .");H+=s,W=P;continue;case P:if(-1!=="0123456789".indexOf(s))H+=s;else if("."===s){if(-1!==H.indexOf("."))return e("Invalid number has two dots");H+=s}else if("e"===s||"E"===s){if(-1!==H.indexOf("e")||-1!==H.indexOf("E"))return e("Invalid number has two exponential");H+=s}else if("+"===s||"-"===s){if("e"!==a&&"E"!==a)return e("Invalid symbol in number");H+=s}else H&&(l(parseFloat(H)),c(),H=""),o--,W=D.pop()||g;continue;default:return e("Unknown state: "+W)}V>=L&&n()}}),t(vt).on(function(){if(W==v)return l({}),c(),void(z=!0);W===g&&0===$||e("Unexpected end"),F!==u&&(l(F),c(),F=u),z=!0})}function j(t,n){var e,r={};for(var i in n)t(i).on(function(t){return function(n){e=t(e,n)}}(n[i]),r);t(ht).on(function(t){var n=Q(e),r=et(n),i=Y(e);i&&(rt(Q(i))[r]=t)}),t(lt).on(function(){var t=Q(e),n=et(t),r=Y(e);r&&delete rt(Q(r))[n]}),t(gt).on(function(){for(var e in n)t(e).un(r)})}function I(t){var n={};return t&&t.split("\r\n").forEach(function(t){var e=t.indexOf(": ");n[t.substring(0,e)]=t.substring(e+2)}),n}function B(t,n){function e(t){return{"http:":80,"https:":443}[t]}function r(n){return n.port||e(n.protocol||t.protocol)}return!!(n.protocol&&n.protocol!=t.protocol||n.host&&n.host!=t.host||n.host&&r(n)!=r(t))}function O(t){var n=/(\w+:)?(?:\/\/)([\w.-]+)?(?::(\d+))?\/?/.exec(t)||[];return{protocol:n[1]||"",host:n[2]||"",port:n[3]||""}}function R(){return new XMLHttpRequest}function P(n,e,r,i,o,s,a){function h(){var t=e.responseText,n=t.substr(f);n&&l(n),f=X(t)}var l=n(mt).emit,c=n(ct).emit,f=0,d=!0;n(gt).on(function(){e.onreadystatechange=null,e.abort()}),"onprogress"in e&&(e.onprogress=h),e.onreadystatechange=function(){function t(){try{d&&n(pt).emit(e.status,I(e.getAllResponseHeaders())),d=!1}catch(t){}}switch(e.readyState){case 2:case 3:return t();case 4:t(),2==String(e.status)[0]?(h(),n(vt).emit()):c(U(e.status,e.responseText))}};try{e.open(r,i,!0);for(var p in s)e.setRequestHeader(p,s[p]);B(t.location,O(i))||e.setRequestHeader("X-Requested-With","XMLHttpRequest"),e.withCredentials=a,e.send(o)}catch(n){t.setTimeout($(c,U(u,u,n)),0)}}function L(t,n){return{key:t,node:n}}function F(t){function n(t,n){var e=rt(Q(t));return g(i,e)?r(t,X(e),n):t}function e(t,n,e){rt(Q(t))[n]=e}function r(t,n,r){t&&e(t,n,r);var i=b(L(n,r),t);return o(i),i}var o=t(ut).emit,s=t(at).emit,u=t(dt).emit,a=t(ft).emit,h={};return h[wt]=function(t,i){if(!t)return u(i),r(t,it,i);var o=n(t,i),s=Y(o),a=et(Q(o));return e(s,a,i),b(L(a,i),s)},h[bt]=function(t){return s(t),Y(t)||a(rt(Q(t)))},h[yt]=r,h}function H(t,n,e){function r(t){return function(n){return n.id==t}}var i,o;return{on:function(e,r){var s={listener:e,id:r||e};return n&&n.emit(t,e,s.id),i=b(s,i),o=b(e,o),this},emit:function(){C(o,arguments)},un:function(n){var s;i=S(i,r(n),function(t){s=t}),s&&(o=S(o,function(t){return t==s.listener}),e&&e.emit(t,s.listener,s.id))},listeners:function(){return o},hasListener:function(t){return y(T(t?r(t):m,i))}}}function q(){function t(t){return e[t]=H(t,r,i)}function n(n){return e[n]||t(n)}var e={},r=t("newListener"),i=t("removeListener");return["emit","on","un"].forEach(function(t){n[t]=c(function(e,r){l(r,n(e)[t])})}),n}function U(t,n,e){try{var r=s.parse(n)}catch(t){}return{statusCode:t,body:n,jsonBody:r,thrown:e}}function z(t,n){function e(t,n,e){var r=N(e);t(n,x(Y(k(et,r))),x(k(rt,r)))}function r(n,r,i){var o=t(n).emit;r.on(function(t){var n=i(t);!1!==n&&e(o,rt(n),t)},n),t("removeListener").on(function(e){e==n&&(t(e).listeners()||r.un(n))})}var i={node:t(at),path:t(ut)};t("newListener").on(function(t){var e=/(node|path):(.*)/.exec(t);if(e){var o=i[e[1]];o.hasListener(t)||r(t,o,n(e[2]))}})}function W(t,n){function e(t,n,e){e=e||n;var i=r(n);return t.on(function(){var n=!1;h.forget=function(){n=!0},l(arguments,i),delete h.forget,n&&t.un(e)},e),h}function r(t){return function(){try{return t.apply(h,arguments)}catch(t){setTimeout(function(){throw t})}}}function i(n,e){return t(n+":"+e)}function o(t){return function(){var n=t.apply(this,arguments);y(n)&&(n==J.drop?m():g(n))}}function s(t,n,r){var s;s="node"==t?o(r):r,e(i(t,n),s,r)}function u(t,n){for(var e in n)s(t,e,n[e])}function a(t,n,e){return K(n)?s(t,n,e):u(t,n),h}var h,f=/^(node|path):./,d=t(ft),m=t(lt).emit,g=t(ht).emit,w=c(function(n,r){if(h[n])l(r,h[n]);else{var i=t(n),o=r[0];f.test(n)?e(i,o):i.on(o)}return h});return t(dt).on(function(t){h.root=v(t)}),t(pt).on(function(t,n){h.header=function(t){return t?n[t]:n}}),h={on:w,addListener:w,removeListener:function(n,e,r){if("done"==n)d.un(e);else if("node"==n||"path"==n)t.un(n+":"+e,r);else{var i=e;t(n).un(i)}return h},emit:t.emit,node:$(a,"node"),path:$(a,"path"),done:$(e,d),start:$(function(n,e){return t(n).on(r(e),e),h},pt),fail:t(ct).on,abort:t(gt).emit,header:p,root:p,source:n}}function D(t,n,e,r,i){var o=q();return n&&P(o,R(),t,n,e,r,i),E(o),j(o,F(o)),z(o,ot),W(o,n)}function Z(t,n,e,r,i,o,u){return i=i?s.parse(s.stringify(i)):{},r?K(r)||(r=s.stringify(r),i["Content-Type"]=i["Content-Type"]||"application/json"):r=null,t(e||"GET",function(t,n){return!1===n&&(-1==t.indexOf("?")?t+="?":t+="&",t+="_="+(new Date).getTime()),t}(n,u),r,i,o||!1)}function J(t){var n=tt("resume","pause","pipe"),e=$(w,n);return t?e(t)||K(t)?Z(D,t):Z(D,t.url,t.method,t.body,t.headers,t.withCredentials,t.cached):D()}var $=c(function(t,n){var e=n.length;return c(function(r){for(var i=0;i<r.length;i++)n[e+i]=r[i];return n.length=e+r.length,t.apply(this,n)})}),V=(c(function(t){function n(t,n){return[l(t,n)]}var e=M(t);return c(function(t){return _(n,t,e)[0]})}),c(function(t){return c(function(n){for(var e,r=0;r<X(t);r++)if(e=l(n,t[r]))return e})})),X=h("length"),K=$(g,String),G=null,Q=h(0),Y=h(1),tt=c(M),nt=function(){var t=function(t){return t.exec.bind(t)},n=c(function(n){return n.unshift(/^/),t(RegExp(n.map(h("source")).join("")))}),e=/(\$?)/,r=/()/,i=/\["([^"]+)"\]/,o=/\[(\d+|\*)\]/,s=/{([\w ]*?)}/,u=/(?:{([\w ]*?)})?/,a=n(e,/([\w-_]+|\*)/,u),l=n(e,i,u),f=n(e,o,u),d=n(e,r,s),p=n(/\.\./),m=n(/\./),v=n(e,/!/),g=n(/$/);return function(t){return t(V(a,l,f,d),p,m,v,g)}}(),et=h("key"),rt=h("node"),it={},ot=nt(function(t,n,e,r,i){function s(t,n){return!!n[g]?d(t,Q):t}function u(t){return t==m?m:d(function(t){return x(t)!=it},a(t,Y))}function h(){return function(t){return x(t)==it}}function l(t,n,e){return _(function(t,n){return n(t,e)},n,t)}function c(t,n,e,r,i){var o=t(e);if(o){var s=l(n,r,o);return i(e.substr(X(o[0])),s)}}function f(t,n){return $(c,t,n)}function p(t,n){return n}function v(t,n){return S(t,n,t?v:p)}var g=1,y=2,b=3,x=a(et,Q),k=a(rt,Q),S=V(f(t,tt(s,function(t,n){var e=n[b];return e?d(a($(w,M(e.split(/\W+/))),k),t):t},function(t,n){var e=n[y];return d(e&&"*"!=e?function(t){return x(t)==e}:m,t)},u)),f(n,tt(function(t){if(t==m)return m;var n=h(),e=t,r=u(function(t){return i(t)}),i=V(n,e,r);return i})),f(e,tt()),f(r,tt(s,h)),f(i,tt(function(t){return function(n){var e=t(n);return!0===e?Q(n):e}})),function(t){throw o('"'+t+'" could not be tokenised')});return function(t){try{return v(t,m)}catch(n){throw o('Could not compile "'+t+'" because '+n.message)}}}),st=1,ut=st++,at=st++,ht=st++,lt=st++,ct="fail",ft=st++,dt=st++,pt="start",mt="data",vt="end",gt=st++,yt=st++,wt=st++,bt=st++;J.drop=function(){return J.drop},"object"===(void 0===e?"undefined":_typeof(e))?n.exports=J:t.oboe=J}(function(){try{return window}catch(t){return self}}(),Object,Array,Error,JSON)},{}],15:[function(t,n,e){arguments[4][1][0].apply(e,arguments)},{dup:1}],16:[function(t,n,e){var r=t("underscore"),i=t("web3-core-helpers").errors,o=t("oboe"),s=function(t,n){var e=this;this.responseCallbacks={},this.notificationCallbacks=[],this.path=t,this.connection=n.connect({path:this.path}),this.addDefaultEvents();var i=function(t){var n=null;r.isArray(t)?t.forEach(function(t){e.responseCallbacks[t.id]&&(n=t.id)}):n=t.id,n||-1===t.method.indexOf("_subscription")?e.responseCallbacks[n]&&(e.responseCallbacks[n](null,t),delete e.responseCallbacks[n]):e.notificationCallbacks.forEach(function(n){r.isFunction(n)&&n(null,t)})};"Socket"===n.constructor.name?o(this.connection).done(i):this.connection.on("data",function(t){e._parseResponse(t.toString()).forEach(i)})};s.prototype.addDefaultEvents=function(){var t=this;this.connection.on("connect",function(){}),this.connection.on("error",function(){t._timeout()}),this.connection.on("end",function(){t._timeout(),t.notificationCallbacks.forEach(function(t){r.isFunction(t)&&t(new Error("IPC socket connection closed"))})}),this.connection.on("timeout",function(){t._timeout()})},s.prototype._parseResponse=function(t){var n=this,e=[];return t.replace(/\}[\n\r]?\{/g,"}|--|{").replace(/\}\][\n\r]?\[\{/g,"}]|--|[{").replace(/\}[\n\r]?\[\{/g,"}|--|[{").replace(/\}\][\n\r]?\{/g,"}]|--|{").split("|--|").forEach(function(t){n.lastChunk&&(t=n.lastChunk+t);var r=null;try{r=JSON.parse(t)}catch(e){return n.lastChunk=t,clearTimeout(n.lastChunkTimeout),void(n.lastChunkTimeout=setTimeout(function(){throw n._timeout(),i.InvalidResponse(t)},15e3))}clearTimeout(n.lastChunkTimeout),n.lastChunk=null,r&&e.push(r)}),e},s.prototype._addResponseCallback=function(t,n){var e=t.id||t[0].id,r=t.method||t[0].method;this.responseCallbacks[e]=n,this.responseCallbacks[e].method=r},s.prototype._timeout=function(){for(var t in this.responseCallbacks)this.responseCallbacks.hasOwnProperty(t)&&(this.responseCallbacks[t](i.InvalidConnection("on IPC")),delete this.responseCallbacks[t])},s.prototype.reconnect=function(){this.connection.connect({path:this.path})},s.prototype.send=function(t,n){this.connection.writable||this.connection.connect({path:this.path}),this.connection.write(JSON.stringify(t)),this._addResponseCallback(t,n)},s.prototype.on=function(t,n){if("function"!=typeof n)throw new Error("The second parameter callback must be a function.");switch(t){case"data":this.notificationCallbacks.push(n);break;default:this.connection.on(t,n)}},s.prototype.once=function(t,n){if("function"!=typeof n)throw new Error("The second parameter callback must be a function.");this.connection.once(t,n)},s.prototype.removeListener=function(t,n){var e=this;switch(t){case"data":this.notificationCallbacks.forEach(function(t,r){t===n&&e.notificationCallbacks.splice(r,1)});break;default:this.connection.removeListener(t,n)}},s.prototype.removeAllListeners=function(t){switch(t){case"data":this.notificationCallbacks=[];break;default:this.connection.removeAllListeners(t)}},s.prototype.reset=function(){this._timeout(),this.notificationCallbacks=[],this.connection.removeAllListeners("error"),this.connection.removeAllListeners("end"),this.connection.removeAllListeners("timeout"),this.addDefaultEvents()},n.exports=s},{oboe:14,underscore:15,"web3-core-helpers":5}],17:[function(t,n,e){arguments[4][1][0].apply(e,arguments)},{dup:1}],18:[function(t,n,e){var r=function(){return this}(),i=r.WebSocket||r.MozWebSocket,o=t("./version");n.exports={w3cwebsocket:i?function(t,n){return n?new i(t,n):new i(t)}:null,version:o}},{"./version":19}],19:[function(t,n,e){n.exports=t("../package.json").version},{"../package.json":20}],20:[function(t,n,e){n.exports={_args:[[{raw:"websocket@^1.0.24",scope:null,escapedName:"websocket",name:"websocket",rawSpec:"^1.0.24",spec:">=1.0.24 <2.0.0",type:"range"},"/Users/frozeman/Sites/_ethereum/web3/packages/web3-providers-ws"]],_from:"websocket@>=1.0.24 <2.0.0",_id:"[email protected]",_inCache:!0,_location:"/websocket",_nodeVersion:"7.3.0",_npmOperationalInternal:{host:"packages-12-west.internal.npmjs.com",tmp:"tmp/websocket-1.0.24.tgz_1482977757939_0.1858439394272864"},_npmUser:{name:"theturtle32",email:"[email protected]"},_npmVersion:"3.10.10",_phantomChildren:{},_requested:{raw:"websocket@^1.0.24",scope:null,escapedName:"websocket",name:"websocket",rawSpec:"^1.0.24",spec:">=1.0.24 <2.0.0",type:"range"},_requiredBy:["#USER","/"],_resolved:"https://registry.npmjs.org/websocket/-/websocket-1.0.24.tgz",_shasum:"74903e75f2545b6b2e1de1425bc1c905917a1890",_shrinkwrap:null,_spec:"websocket@^1.0.24",_where:"/Users/frozeman/Sites/_ethereum/web3/packages/web3-providers-ws",author:{name:"Brian McKelvey",email:"[email protected]",url:"https://www.worlize.com/"},browser:"lib/browser.js",bugs:{url:"https://github.com/theturtle32/WebSocket-Node/issues"},config:{verbose:!1},contributors:[{name:"Iñaki Baz Castillo",email:"[email protected]",url:"http://dev.sipdoc.net"}],dependencies:{debug:"^2.2.0",nan:"^2.3.3","typedarray-to-buffer":"^3.1.2",yaeti:"^0.0.6"},description:"Websocket Client & Server Library implementing the WebSocket protocol as specified in RFC 6455.",devDependencies:{"buffer-equal":"^1.0.0",faucet:"^0.0.1",gulp:"git+https://github.com/gulpjs/gulp.git#4.0","gulp-jshint":"^2.0.4",jshint:"^2.0.0","jshint-stylish":"^2.2.1",tape:"^4.0.1"},directories:{lib:"./lib"},dist:{shasum:"74903e75f2545b6b2e1de1425bc1c905917a1890",tarball:"https://registry.npmjs.org/websocket/-/websocket-1.0.24.tgz"},engines:{node:">=0.8.0"},gitHead:"0e15f9445953927c39ce84a232cb7dd6e3adf12e",homepage:"https://github.com/theturtle32/WebSocket-Node",keywords:["websocket","websockets","socket","networking","comet","push","RFC-6455","realtime","server","client"],license:"Apache-2.0",main:"index",maintainers:[{name:"theturtle32",email:"[email protected]"}],name:"websocket",optionalDependencies:{},readme:"ERROR: No README data found!",repository:{type:"git",url:"git+https://github.com/theturtle32/WebSocket-Node.git"},scripts:{gulp:"gulp",install:"(node-gyp rebuild 2> builderror.log) || (exit 0)",test:"faucet test/unit"},version:"1.0.24"}},{}],21:[function(t,n,e){var r=t("underscore"),i=t("web3-core-helpers").errors;if("undefined"!=typeof global)var o=t("websocket").w3cwebsocket;var s=function(t){var n=this;this.responseCallbacks={},this.notificationCallbacks=[],this.path=t,this.connection=new o(t),this.addDefaultEvents(),this.connection.onmessage=function(t){var e="string"==typeof t.data?t.data:"";n._parseResponse(e).forEach(function(t){var e=null;r.isArray(t)?t.forEach(function(t){n.responseCallbacks[t.id]&&(e=t.id)}):e=t.id,e||-1===t.method.indexOf("_subscription")?n.responseCallbacks[e]&&(n.responseCallbacks[e](null,t),delete n.responseCallbacks[e]):n.notificationCallbacks.forEach(function(n){r.isFunction(n)&&n(null,t)})})}};s.prototype.addDefaultEvents=function(){var t=this;this.connection.onerror=function(){t._timeout()},this.connection.onclose=function(n){t._timeout();var e=t.notificationCallbacks;t.reset(),e.forEach(function(t){r.isFunction(t)&&t(n)})}},s.prototype._parseResponse=function(t){var n=this,e=[];return t.replace(/\}[\n\r]?\{/g,"}|--|{").replace(/\}\][\n\r]?\[\{/g,"}]|--|[{").replace(/\}[\n\r]?\[\{/g,"}|--|[{").replace(/\}\][\n\r]?\{/g,"}]|--|{").split("|--|").forEach(function(t){n.lastChunk&&(t=n.lastChunk+t);var r=null;try{r=JSON.parse(t)}catch(e){return n.lastChunk=t,clearTimeout(n.lastChunkTimeout),void(n.lastChunkTimeout=setTimeout(function(){throw n._timeout(),i.InvalidResponse(t)},15e3))}clearTimeout(n.lastChunkTimeout),n.lastChunk=null,r&&e.push(r)}),e},s.prototype._addResponseCallback=function(t,n){var e=t.id||t[0].id,r=t.method||t[0].method;this.responseCallbacks[e]=n,this.responseCallbacks[e].method=r},s.prototype._timeout=function(){for(var t in this.responseCallbacks)this.responseCallbacks.hasOwnProperty(t)&&(this.responseCallbacks[t](i.InvalidConnection("on IPC")),delete this.responseCallbacks[t])},s.prototype.send=function(t,n){this.connection.send(JSON.stringify(t)),this._addResponseCallback(t,n)},s.prototype.on=function(t,n){if("function"!=typeof n)throw new Error("The second parameter callback must be a function.");switch(t){case"data":this.notificationCallbacks.push(n);break;case"connect":this.connection.onopen=n;break;case"end":this.connection.onclose=n;break;case"error":this.connection.onerror=n}},s.prototype.removeListener=function(t,n){var e=this;switch(t){case"data":this.notificationCallbacks.forEach(function(t,r){t===n&&e.notificationCallbacks.splice(r,1)})}},s.prototype.removeAllListeners=function(t){switch(t){case"data":this.notificationCallbacks=[];break;case"connect":this.connection.onopen=null;break;case"end":this.connection.onclose=null;break;case"error":this.connection.onerror=null}},s.prototype.reset=function(){this._timeout(),this.notificationCallbacks=[],this.addDefaultEvents()},n.exports=s},{underscore:17,"web3-core-helpers":5,websocket:18}],22:[function(t,n,e){arguments[4][10][0].apply(e,arguments)},{dup:10}],23:[function(t,n,e){function r(t){var n=t?t.toLowerCase():"ether",e=h[n];if("string"!=typeof e)throw new Error("[ethjs-unit] the unit provided "+t+" doesn't exists, please use the one of the following units "+JSON.stringify(h,null,2));return new o(e,10)}function i(t){if("string"==typeof t){if(!t.match(/^-?[0-9.]+$/))throw new Error("while converting number to string, invalid number value '"+t+"', should be a number matching (^-?[0-9.]+).");return t}if("number"==typeof t)return String(t);if("object"===(void 0===t?"undefined":_typeof(t))&&t.toString&&(t.toTwos||t.dividedToIntegerBy))return t.toPrecision?String(t.toPrecision()):t.toString(10);throw new Error("while converting number to string, invalid number value '"+t+"' type "+(void 0===t?"undefined":_typeof(t))+".")}var o=t("bn.js"),s=t("number-to-bn"),u=new o(0),a=new o(-1),h={noether:"0",wei:"1",kwei:"1000",Kwei:"1000",babbage:"1000",femtoether:"1000",mwei:"1000000",Mwei:"1000000",lovelace:"1000000",picoether:"1000000",gwei:"1000000000",Gwei:"1000000000",shannon:"1000000000",nanoether:"1000000000",nano:"1000000000",szabo:"1000000000000",microether:"1000000000000",micro:"1000000000000",finney:"1000000000000000",milliether:"1000000000000000",milli:"1000000000000000",ether:"1000000000000000000",kether:"1000000000000000000000",grand:"1000000000000000000000",mether:"1000000000000000000000000",gether:"1000000000000000000000000000",tether:"1000000000000000000000000000000"};n.exports={unitMap:h,numberToString:i,getValueOfUnit:r,fromWei:function(t,n,e){var i=s(t),o=i.lt(u),l=r(n),c=h[n].length-1||1,f=e||{};o&&(i=i.mul(a));for(var d=i.mod(l).toString(10);d.length<c;)d="0"+d;f.pad||(d=d.match(/^([0-9]*[1-9]|0)(0*)/)[1]);var p=i.div(l).toString(10);f.commify&&(p=p.replace(/\B(?=(\d{3})+(?!\d))/g,","));var m=p+("0"==d?"":"."+d);return o&&(m="-"+m),m},toWei:function(t,n){var e=i(t),s=r(n),u=h[n].length-1||1,l="-"===e.substring(0,1);if(l&&(e=e.substring(1)),"."===e)throw new Error("[ethjs-unit] while converting number "+t+" to wei, invalid value");var c=e.split(".");if(c.length>2)throw new Error("[ethjs-unit] while converting number "+t+" to wei, too many decimal points");var f=c[0],d=c[1];if(f||(f="0"),d||(d="0"),d.length>u)throw new Error("[ethjs-unit] while converting number "+t+" to wei, too many decimal places");for(;d.length<u;)d+="0";f=new o(f),d=new o(d);var p=f.mul(s).add(d);return l&&(p=p.mul(a)),new o(p.toString(10),10)}}},{"bn.js":22,"number-to-bn":26}],24:[function(t,n,e){n.exports=function(t){if("string"!=typeof t)throw new Error("[is-hex-prefixed] value must be type 'string', is currently type "+(void 0===t?"undefined":_typeof(t))+", while checking isHexPrefixed.");return"0x"===t.slice(0,2)}},{}],25:[function(t,n,e){!function(){function t(t,n,e){this.blocks=[],this.s=[],this.padding=n,this.outputBits=e,this.reset=!0,this.block=0,this.start=0,this.blockCount=1600-(t<<1)>>5,this.byteCount=this.blockCount<<2,this.outputBlocks=e>>5,this.extraBytes=(31&e)>>3;for(var r=0;r<50;++r)this.s[r]=0}var e="object"===("undefined"==typeof window?"undefined":_typeof(window))?window:{};!e.JS_SHA3_NO_NODE_JS&&"object"===("undefined"==typeof process?"undefined":_typeof(process))&&process.versions&&process.versions.node&&(e=global);for(var r=!e.JS_SHA3_NO_COMMON_JS&&"object"===(void 0===n?"undefined":_typeof(n))&&n.exports,i="0123456789abcdef".split(""),o=[0,8,16,24],s=[1,0,32898,0,32906,2147483648,2147516416,2147483648,32907,0,2147483649,0,2147516545,2147483648,32777,2147483648,138,0,136,0,2147516425,0,2147483658,0,2147516555,0,139,2147483648,32905,2147483648,32771,2147483648,32770,2147483648,128,2147483648,32778,0,2147483658,2147483648,2147516545,2147483648,32896,2147483648,2147483649,0,2147516424,2147483648],u=[224,256,384,512],a=["hex","buffer","arrayBuffer","array"],h=function(n,e,r){return function(i){return new t(n,e,n).update(i)[r]()}},l=function(n,e,r){return function(i,o){return new t(n,e,o).update(i)[r]()}},c=function(n,e){var r=h(n,e,"hex");r.create=function(){return new t(n,e,n)},r.update=function(t){return r.create().update(t)};for(var i=0;i<a.length;++i){var o=a[i];r[o]=h(n,e,o)}return r},f=[{name:"keccak",padding:[1,256,65536,16777216],bits:u,createMethod:c},{name:"sha3",padding:[6,1536,393216,100663296],bits:u,createMethod:c},{name:"shake",padding:[31,7936,2031616,520093696],bits:[128,256],createMethod:function(n,e){var r=l(n,e,"hex");r.create=function(r){return new t(n,e,r)},r.update=function(t,n){return r.create(n).update(t)};for(var i=0;i<a.length;++i){var o=a[i];r[o]=l(n,e,o)}return r}}],d={},p=[],m=0;m<f.length;++m)for(var v=f[m],g=v.bits,y=0;y<g.length;++y){var w=v.name+"_"+g[y];p.push(w),d[w]=v.createMethod(g[y],v.padding)}t.prototype.update=function(t){var n="string"!=typeof t;n&&t.constructor===ArrayBuffer&&(t=new Uint8Array(t));for(var e,r,i=t.length,s=this.blocks,u=this.byteCount,a=this.blockCount,h=0,l=this.s;h<i;){if(this.reset)for(this.reset=!1,s[0]=this.block,e=1;e<a+1;++e)s[e]=0;if(n)for(e=this.start;h<i&&e<u;++h)s[e>>2]|=t[h]<<o[3&e++];else for(e=this.start;h<i&&e<u;++h)(r=t.charCodeAt(h))<128?s[e>>2]|=r<<o[3&e++]:r<2048?(s[e>>2]|=(192|r>>6)<<o[3&e++],s[e>>2]|=(128|63&r)<<o[3&e++]):r<55296||r>=57344?(s[e>>2]|=(224|r>>12)<<o[3&e++],s[e>>2]|=(128|r>>6&63)<<o[3&e++],s[e>>2]|=(128|63&r)<<o[3&e++]):(r=65536+((1023&r)<<10|1023&t.charCodeAt(++h)),s[e>>2]|=(240|r>>18)<<o[3&e++],s[e>>2]|=(128|r>>12&63)<<o[3&e++],s[e>>2]|=(128|r>>6&63)<<o[3&e++],s[e>>2]|=(128|63&r)<<o[3&e++]);if(this.lastByteIndex=e,e>=u){for(this.start=e-u,this.block=s[a],e=0;e<a;++e)l[e]^=s[e];b(l),this.reset=!0}else this.start=e}return this},t.prototype.finalize=function(){var t=this.blocks,n=this.lastByteIndex,e=this.blockCount,r=this.s;if(t[n>>2]|=this.padding[3&n],this.lastByteIndex===this.byteCount)for(t[0]=t[e],n=1;n<e+1;++n)t[n]=0;for(t[e-1]|=2147483648,n=0;n<e;++n)r[n]^=t[n];b(r)},t.prototype.toString=t.prototype.hex=function(){this.finalize();for(var t,n=this.blockCount,e=this.s,r=this.outputBlocks,o=this.extraBytes,s=0,u=0,a="";u<r;){for(s=0;s<n&&u<r;++s,++u)t=e[s],a+=i[t>>4&15]+i[15&t]+i[t>>12&15]+i[t>>8&15]+i[t>>20&15]+i[t>>16&15]+i[t>>28&15]+i[t>>24&15];u%n==0&&(b(e),s=0)}return o&&(t=e[s],o>0&&(a+=i[t>>4&15]+i[15&t]),o>1&&(a+=i[t>>12&15]+i[t>>8&15]),o>2&&(a+=i[t>>20&15]+i[t>>16&15])),a},t.prototype.arrayBuffer=function(){this.finalize();var t,n=this.blockCount,e=this.s,r=this.outputBlocks,i=this.extraBytes,o=0,s=0,u=this.outputBits>>3;t=i?new ArrayBuffer(r+1<<2):new ArrayBuffer(u);for(var a=new Uint32Array(t);s<r;){for(o=0;o<n&&s<r;++o,++s)a[s]=e[o];s%n==0&&b(e)}return i&&(a[o]=e[o],t=t.slice(0,u)),t},t.prototype.buffer=t.prototype.arrayBuffer,t.prototype.digest=t.prototype.array=function(){this.finalize();for(var t,n,e=this.blockCount,r=this.s,i=this.outputBlocks,o=this.extraBytes,s=0,u=0,a=[];u<i;){for(s=0;s<e&&u<i;++s,++u)t=u<<2,n=r[s],a[t]=255&n,a[t+1]=n>>8&255,a[t+2]=n>>16&255,a[t+3]=n>>24&255;u%e==0&&b(r)}return o&&(t=u<<2,n=r[s],o>0&&(a[t]=255&n),o>1&&(a[t+1]=n>>8&255),o>2&&(a[t+2]=n>>16&255)),a};var b=function(t){var n,e,r,i,o,u,a,h,l,c,f,d,p,m,v,g,y,w,b,M,x,k,_,S,A,C,N,T,E,j,I,B,O,R,P,L,F,H,q,U,z,W,D,Z,J,$,V,X,K,G,Q,Y,tt,nt,et,rt,it,ot,st,ut,at,ht,lt;for(r=0;r<48;r+=2)i=t[0]^t[10]^t[20]^t[30]^t[40],o=t[1]^t[11]^t[21]^t[31]^t[41],u=t[2]^t[12]^t[22]^t[32]^t[42],a=t[3]^t[13]^t[23]^t[33]^t[43],h=t[4]^t[14]^t[24]^t[34]^t[44],l=t[5]^t[15]^t[25]^t[35]^t[45],c=t[6]^t[16]^t[26]^t[36]^t[46],f=t[7]^t[17]^t[27]^t[37]^t[47],n=(d=t[8]^t[18]^t[28]^t[38]^t[48])^(u<<1|a>>>31),e=(p=t[9]^t[19]^t[29]^t[39]^t[49])^(a<<1|u>>>31),t[0]^=n,t[1]^=e,t[10]^=n,t[11]^=e,t[20]^=n,t[21]^=e,t[30]^=n,t[31]^=e,t[40]^=n,t[41]^=e,n=i^(h<<1|l>>>31),e=o^(l<<1|h>>>31),t[2]^=n,t[3]^=e,t[12]^=n,t[13]^=e,t[22]^=n,t[23]^=e,t[32]^=n,t[33]^=e,t[42]^=n,t[43]^=e,n=u^(c<<1|f>>>31),e=a^(f<<1|c>>>31),t[4]^=n,t[5]^=e,t[14]^=n,t[15]^=e,t[24]^=n,t[25]^=e,t[34]^=n,t[35]^=e,t[44]^=n,t[45]^=e,n=h^(d<<1|p>>>31),e=l^(p<<1|d>>>31),t[6]^=n,t[7]^=e,t[16]^=n,t[17]^=e,t[26]^=n,t[27]^=e,t[36]^=n,t[37]^=e,t[46]^=n,t[47]^=e,n=c^(i<<1|o>>>31),e=f^(o<<1|i>>>31),t[8]^=n,t[9]^=e,t[18]^=n,t[19]^=e,t[28]^=n,t[29]^=e,t[38]^=n,t[39]^=e,t[48]^=n,t[49]^=e,m=t[0],v=t[1],$=t[11]<<4|t[10]>>>28,V=t[10]<<4|t[11]>>>28,T=t[20]<<3|t[21]>>>29,E=t[21]<<3|t[20]>>>29,ut=t[31]<<9|t[30]>>>23,at=t[30]<<9|t[31]>>>23,W=t[40]<<18|t[41]>>>14,D=t[41]<<18|t[40]>>>14,R=t[2]<<1|t[3]>>>31,P=t[3]<<1|t[2]>>>31,g=t[13]<<12|t[12]>>>20,y=t[12]<<12|t[13]>>>20,X=t[22]<<10|t[23]>>>22,K=t[23]<<10|t[22]>>>22,j=t[33]<<13|t[32]>>>19,I=t[32]<<13|t[33]>>>19,ht=t[42]<<2|t[43]>>>30,lt=t[43]<<2|t[42]>>>30,nt=t[5]<<30|t[4]>>>2,et=t[4]<<30|t[5]>>>2,L=t[14]<<6|t[15]>>>26,F=t[15]<<6|t[14]>>>26,w=t[25]<<11|t[24]>>>21,b=t[24]<<11|t[25]>>>21,G=t[34]<<15|t[35]>>>17,Q=t[35]<<15|t[34]>>>17,B=t[45]<<29|t[44]>>>3,O=t[44]<<29|t[45]>>>3,S=t[6]<<28|t[7]>>>4,A=t[7]<<28|t[6]>>>4,rt=t[17]<<23|t[16]>>>9,it=t[16]<<23|t[17]>>>9,H=t[26]<<25|t[27]>>>7,q=t[27]<<25|t[26]>>>7,M=t[36]<<21|t[37]>>>11,x=t[37]<<21|t[36]>>>11,Y=t[47]<<24|t[46]>>>8,tt=t[46]<<24|t[47]>>>8,Z=t[8]<<27|t[9]>>>5,J=t[9]<<27|t[8]>>>5,C=t[18]<<20|t[19]>>>12,N=t[19]<<20|t[18]>>>12,ot=t[29]<<7|t[28]>>>25,st=t[28]<<7|t[29]>>>25,U=t[38]<<8|t[39]>>>24,z=t[39]<<8|t[38]>>>24,k=t[48]<<14|t[49]>>>18,_=t[49]<<14|t[48]>>>18,t[0]=m^~g&w,t[1]=v^~y&b,t[10]=S^~C&T,t[11]=A^~N&E,t[20]=R^~L&H,t[21]=P^~F&q,t[30]=Z^~$&X,t[31]=J^~V&K,t[40]=nt^~rt&ot,t[41]=et^~it&st,t[2]=g^~w&M,t[3]=y^~b&x,t[12]=C^~T&j,t[13]=N^~E&I,t[22]=L^~H&U,t[23]=F^~q&z,t[32]=$^~X&G,t[33]=V^~K&Q,t[42]=rt^~ot&ut,t[43]=it^~st&at,t[4]=w^~M&k,t[5]=b^~x&_,t[14]=T^~j&B,t[15]=E^~I&O,t[24]=H^~U&W,t[25]=q^~z&D,t[34]=X^~G&Y,t[35]=K^~Q&tt,t[44]=ot^~ut&ht,t[45]=st^~at<,t[6]=M^~k&m,t[7]=x^~_&v,t[16]=j^~B&S,t[17]=I^~O&A,t[26]=U^~W&R,t[27]=z^~D&P,t[36]=G^~Y&Z,t[37]=Q^~tt&J,t[46]=ut^~ht&nt,t[47]=at^~lt&et,t[8]=k^~m&g,t[9]=_^~v&y,t[18]=B^~S&C,t[19]=O^~A&N,t[28]=W^~R&L,t[29]=D^~P&F,t[38]=Y^~Z&$,t[39]=tt^~J&V,t[48]=ht^~nt&rt,t[49]=lt^~et&it,t[0]^=s[r],t[1]^=s[r+1]};if(r)n.exports=d;else for(m=0;m<p.length;++m)e[p[m]]=d[p[m]]}()},{}],26:[function(t,n,e){var r=t("bn.js"),i=t("strip-hex-prefix");n.exports=function(t){if("string"==typeof t||"number"==typeof t){var n=new r(1),e=String(t).toLowerCase().trim(),o="0x"===e.substr(0,2)||"-0x"===e.substr(0,3),s=i(e);if("-"===s.substr(0,1)&&(s=i(s.slice(1)),n=new r(-1,10)),!(s=""===s?"0":s).match(/^-?[0-9]+$/)&&s.match(/^[0-9A-Fa-f]+$/)||s.match(/^[a-fA-F]+$/)||!0===o&&s.match(/^[0-9A-Fa-f]+$/))return new r(s,16).mul(n);if((s.match(/^-?[0-9]+$/)||""===s)&&!1===o)return new r(s,10).mul(n)}else if("object"===(void 0===t?"undefined":_typeof(t))&&t.toString&&!t.pop&&!t.push&&t.toString(10).match(/^-?[0-9]+$/)&&(t.mul||t.dividedToIntegerBy))return new r(t.toString(10),10);throw new Error("[number-to-bn] while converting number "+JSON.stringify(t)+" to BN.js instance, error: invalid number value. Value must be an integer, hex string, BN or BigNumber instance. Note, decimals are not supported.")}},{"bn.js":22,"strip-hex-prefix":30}],27:[function(t,n,e){n.exports=window.crypto},{}],28:[function(t,n,e){n.exports=t("crypto")},{crypto:27}],29:[function(t,n,e){n.exports=function(n,e){var r=t("./crypto.js"),i="function"==typeof e;if(n>65536){if(!i)throw new Error("Requested too many random bytes.");e(new Error("Requested too many random bytes."))}if(void 0!==r&&r.randomBytes){if(!i)return"0x"+r.randomBytes(n).toString("hex");r.randomBytes(n,function(t,n){t?e(a):e(null,"0x"+n.toString("hex"))})}else{var o;if(void 0!==r?o=r:"undefined"!=typeof msCrypto&&(o=msCrypto),o&&o.getRandomValues){var s=o.getRandomValues(new Uint8Array(n)),u="0x"+Array.from(s).map(function(t){return t.toString(16)}).join("");if(!i)return u;e(null,u)}else{var a=new Error('No "crypto" object available. This Browser doesn\'t support generating secure random bytes.');if(!i)throw a;e(a)}}}},{"./crypto.js":28}],30:[function(t,n,e){var r=t("is-hex-prefixed");n.exports=function(t){return"string"!=typeof t?t:r(t)?t.slice(2):t}},{"is-hex-prefixed":24}],31:[function(t,n,e){arguments[4][1][0].apply(e,arguments)},{dup:1}],32:[function(t,n,e){!function(t){function r(t){for(var n,e,r=[],i=0,o=t.length;i<o;)(n=t.charCodeAt(i++))>=55296&&n<=56319&&i<o?56320==(64512&(e=t.charCodeAt(i++)))?r.push(((1023&n)<<10)+(1023&e)+65536):(r.push(n),i--):r.push(n);return r}function i(t){for(var n,e=t.length,r=-1,i="";++r<e;)(n=t[r])>65535&&(i+=v((n-=65536)>>>10&1023|55296),n=56320|1023&n),i+=v(n);return i}function o(t){if(t>=55296&&t<=57343)throw Error("Lone surrogate U+"+t.toString(16).toUpperCase()+" is not a scalar value")}function s(t,n){return v(t>>n&63|128)}function u(t){if(0==(4294967168&t))return v(t);var n="";return 0==(4294965248&t)?n=v(t>>6&31|192):0==(4294901760&t)?(o(t),n=v(t>>12&15|224),n+=s(t,6)):0==(4292870144&t)&&(n=v(t>>18&7|240),n+=s(t,12),n+=s(t,6)),n+=v(63&t|128)}function a(){if(m>=p)throw Error("Invalid byte index");var t=255&d[m];if(m++,128==(192&t))return 63&t;throw Error("Invalid continuation byte")}function h(){var t,n,e,r,i;if(m>p)throw Error("Invalid byte index");if(m==p)return!1;if(t=255&d[m],m++,0==(128&t))return t;if(192==(224&t)){if(n=a(),(i=(31&t)<<6|n)>=128)return i;throw Error("Invalid continuation byte")}if(224==(240&t)){if(n=a(),e=a(),(i=(15&t)<<12|n<<6|e)>=2048)return o(i),i;throw Error("Invalid continuation byte")}if(240==(248&t)&&(n=a(),e=a(),r=a(),(i=(7&t)<<18|n<<12|e<<6|r)>=65536&&i<=1114111))return i;throw Error("Invalid UTF-8 detected")}var l="object"==(void 0===e?"undefined":_typeof(e))&&e,c="object"==(void 0===n?"undefined":_typeof(n))&&n&&n.exports==l&&n,f="object"==("undefined"==typeof global?"undefined":_typeof(global))&&global;f.global!==f&&f.window!==f||(t=f);var d,p,m,v=String.fromCharCode,g={version:"2.1.2",encode:function(t){for(var n=r(t),e=n.length,i=-1,o="";++i<e;)o+=u(n[i]);return o},decode:function(t){d=r(t),p=d.length,m=0;for(var n,e=[];!1!==(n=h());)e.push(n);return i(e)}};if(l&&!l.nodeType)if(c)c.exports=g;else{var y={}.hasOwnProperty;for(var w in g)y.call(g,w)&&(l[w]=g[w])}else t.utf8=g}(this)},{}],33:[function(t,n,e){var r=t("underscore"),i=t("ethjs-unit"),o=t("./utils.js"),s=t("./soliditySha3.js"),u=t("randomhex"),a=function(t){if(!o.isHex(t))throw new Error("The parameter must be a valid HEX string.");var n="",e=0,r=t.length;for("0x"===t.substring(0,2)&&(e=2);e<r;e+=2){var i=parseInt(t.substr(e,2),16);n+=String.fromCharCode(i)}return n},h=function(t){for(var n="",e=0;e<t.length;e++){var r=t.charCodeAt(e).toString(16);n+=r.length<2?"0"+r:r}return"0x"+n},l=function(t){if(t=t?t.toLowerCase():"ether",!i.unitMap[t])throw new Error('This unit "'+t+"\" doesn't exist, please use the one of the following units"+JSON.stringify(i.unitMap,null,2));return t};n.exports={_fireError:function(t,n,e,i){return!r.isObject(t)||t instanceof Error||!t.data||((r.isObject(t.data)||r.isArray(t.data))&&(t.data=JSON.stringify(t.data,null,2)),t=t.message+"\n"+t.data),r.isString(t)&&(t=new Error(t)),r.isFunction(i)&&i(t),r.isFunction(e)&&(n&&r.isFunction(n.listeners)&&n.listeners("error").length&&r.isFunction(n.suppressUnhandledRejections)&&n.suppressUnhandledRejections(),e(t)),n&&r.isFunction(n.emit)&&(n.emit("error",t),n.removeAllListeners()),n},_jsonInterfaceMethodToString:function(t){if(r.isObject(t)&&t.name&&-1!==t.name.indexOf("("))return t.name;var n=t.inputs.map(function(t){return t.type}).join(",");return t.name+"("+n+")"},randomHex:u,_:r,BN:o.BN,isBN:o.isBN,isBigNumber:o.isBigNumber,isHex:o.isHex,sha3:o.sha3,keccak256:o.sha3,soliditySha3:s,isAddress:o.isAddress,checkAddressChecksum:o.checkAddressChecksum,toChecksumAddress:function(t){if(void 0===t)return"";if(!/^(0x)?[0-9a-f]{40}$/i.test(t))throw new Error('Given address "'+t+'" is not a valid Ethereum address.');t=t.toLowerCase().replace(/^0x/i,"");for(var n=o.sha3(t).replace(/^0x/i,""),e="0x",r=0;r<t.length;r++)parseInt(n[r],16)>7?e+=t[r].toUpperCase():e+=t[r];return e},toHex:o.toHex,toBN:o.toBN,bytesToHex:o.bytesToHex,hexToBytes:o.hexToBytes,hexToNumberString:o.hexToNumberString,hexToNumber:o.hexToNumber,toDecimal:o.hexToNumber,numberToHex:o.numberToHex,fromDecimal:o.numberToHex,hexToUtf8:o.hexToUtf8,hexToString:o.hexToUtf8,toUtf8:o.hexToUtf8,utf8ToHex:o.utf8ToHex,stringToHex:o.utf8ToHex,fromUtf8:o.utf8ToHex,hexToAscii:a,toAscii:a,asciiToHex:h,fromAscii:h,unitMap:i.unitMap,toWei:function(t,n){return n=l(n),o.isBN(t)?i.toWei(t,n):i.toWei(t,n).toString(10)},fromWei:function(t,n){return n=l(n),o.isBN(t)?i.fromWei(t,n):i.fromWei(t,n).toString(10)},padLeft:o.leftPad,leftPad:o.leftPad,padRight:o.rightPad,rightPad:o.rightPad}},{"./soliditySha3.js":34,"./utils.js":35,"ethjs-unit":23,randomhex:29,underscore:31}],34:[function(t,n,e){var r=t("underscore"),i=t("bn.js"),o=t("./utils.js"),s=function(t){return t.startsWith("int[")?"int256"+t.slice(3):"int"===t?"int256":t.startsWith("uint[")?"uint256"+t.slice(4):"uint"===t?"uint256":t.startsWith("fixed[")?"fixed128x128"+t.slice(5):"fixed"===t?"fixed128x128":t.startsWith("ufixed[")?"ufixed128x128"+t.slice(6):"ufixed"===t?"ufixed128x128":t},u=function(t){var n=/^\D+(\d+).*$/.exec(t);return n?parseInt(n[1],10):null},a=function(t){var n=/^\D+\d*\[(\d+)\]$/.exec(t);return n?parseInt(n[1],10):null},h=function(t){var n=void 0===t?"undefined":_typeof(t);if("string"===n)return o.isHex(t)?new i(t.replace(/0x/i,""),16):new i(t,10);if("number"===n)return new i(t);if(o.isBigNumber(t))return new i(t.toString(10));if(o.isBN(t))return t;throw new Error(t+" is not a number")},l=function(t,n,e){var r,a;if("bytes"===(t=s(t))){if(n.replace(/^0x/i,"").length%2!=0)throw new Error("Invalid bytes characters "+n.length);return n}if("string"===t)return o.utf8ToHex(n);if("bool"===t)return n?"01":"00";if(t.startsWith("address")){if(r=e?64:40,!o.isAddress(n))throw new Error(n+" is not a valid address, or the checksum is invalid.");return o.leftPad(n.toLowerCase(),r)}if(r=u(t),t.startsWith("bytes")){if(!r)throw new Error("bytes[] not yet supported in solidity");if(e&&(r=32),r<1||r>32||r<n.replace(/^0x/i,"").length/2)throw new Error("Invalid bytes"+r+" for "+n);return o.rightPad(n,2*r)}if(t.startsWith("uint")){if(r%8||r<8||r>256)throw new Error("Invalid uint"+r+" size");if((a=h(n)).bitLength()>r)throw new Error("Supplied uint exceeds width: "+r+" vs "+a.bitLength());if(a.lt(new i(0)))throw new Error("Supplied uint "+a.toString()+" is negative");return r?o.leftPad(a.toString("hex"),r/8*2):a}if(t.startsWith("int")){if(r%8||r<8||r>256)throw new Error("Invalid int"+r+" size");if((a=h(n)).bitLength()>r)throw new Error("Supplied int exceeds width: "+r+" vs "+a.bitLength());return a.lt(new i(0))?a.toTwos(r).toString("hex"):r?o.leftPad(a.toString("hex"),r/8*2):a}throw new Error("Unsupported or invalid type: "+t)},c=function(t){if(r.isArray(t))throw new Error("Autodetection of array types is not supported.");var n,e,s="";if(r.isObject(t)&&(t.hasOwnProperty("v")||t.hasOwnProperty("t")||t.hasOwnProperty("value")||t.hasOwnProperty("type"))?(n=t.t||t.type,s=t.v||t.value):(n=o.toHex(t,!0),s=o.toHex(t),n.startsWith("int")||n.startsWith("uint")||(n="bytes")),!n.startsWith("int")&&!n.startsWith("uint")||"string"!=typeof s||/^(-)?0x/i.test(s)||(s=new i(s)),r.isArray(s)){if((e=a(n))&&s.length!==e)throw new Error(n+" is not matching the given array "+JSON.stringify(s));e=s.length}return r.isArray(s)?s.map(function(t){return l(n,t,e).toString("hex").replace("0x","")}).join(""):l(n,s,e).toString("hex").replace("0x","")};n.exports=function(){var t=Array.prototype.slice.call(arguments),n=r.map(t,c);return o.sha3("0x"+n.join(""))}},{"./utils.js":35,"bn.js":22,underscore:31}],35:[function(t,n,e){var r=t("underscore"),i=t("bn.js"),o=t("number-to-bn"),s=t("utf8"),u=t("js-sha3"),a=function(t){return t instanceof i||t&&t.constructor&&"BN"===t.constructor.name},h=function(t){return t&&t.constructor&&"BigNumber"===t.constructor.name},l=function(t){try{return o.apply(null,arguments)}catch(n){throw new Error(n+' Given value: "'+t+'"')}},c=function(t){return!!/^(0x)?[0-9a-f]{40}$/i.test(t)&&(!(!/^(0x|0X)?[0-9a-f]{40}$/.test(t)&&!/^(0x|0X)?[0-9A-F]{40}$/.test(t))||f(t))},f=function(t){t=t.replace(/^0x/i,"");for(var n=g(t.toLowerCase()).replace(/^0x/i,""),e=0;e<40;e++)if(parseInt(n[e],16)>7&&t[e].toUpperCase()!==t[e]||parseInt(n[e],16)<=7&&t[e].toLowerCase()!==t[e])return!1;return!0},d=function(t){var n="";t=(t=(t=(t=(t=s.encode(t)).replace(/^(?:\u0000)*/,"")).split("").reverse().join("")).replace(/^(?:\u0000)*/,"")).split("").reverse().join("");for(var e=0;e<t.length;e++){var r=t.charCodeAt(e).toString(16);n+=r.length<2?"0"+r:r}return"0x"+n},p=function(t){if(!isFinite(t)&&!r.isString(t))return t;var n=l(t),e=n.toString(16);return n.lt(new i(0))?"-0x"+e.substr(1):"0x"+e},m=function(t){if(t=t.toString(16),!v(t))throw new Error('Given value "'+t+'" is not a valid hex string.');t=t.replace(/^0x/i,"");for(var n=[],e=0;e<t.length;e+=2)n.push(parseInt(t.substr(e,2),16));return n},v=function(t){return(r.isString(t)||r.isNumber(t))&&/^(-)?0x[0-9a-f]+$/i.test(t)},g=function(t){v(t)&&/^0x/i.test(t.toString())&&(t=m(t));var n="0x"+u.keccak_256(t);return"0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470"===n?null:n};g.jsSha3=u,n.exports={BN:i,isBN:a,isBigNumber:h,toBN:l,isAddress:c,isBloom:function(t){return!(!/^(0x)?[0-9a-f]{512}$/i.test(t)||!/^(0x)?[0-9a-f]{512}$/.test(t)&&!/^(0x)?[0-9A-F]{512}$/.test(t))},isTopic:function(t){return!(!/^(0x)?[0-9a-f]{64}$/i.test(t)||!/^(0x)?[0-9a-f]{64}$/.test(t)&&!/^(0x)?[0-9A-F]{64}$/.test(t))},checkAddressChecksum:f,utf8ToHex:d,hexToUtf8:function(t){if(!v(t))throw new Error('The parameter "'+t+'" must be a valid HEX string.');for(var n="",e=0,r=(t=(t=(t=(t=(t=t.replace(/^0x/i,"")).replace(/^(?:00)*/,"")).split("").reverse().join("")).replace(/^(?:00)*/,"")).split("").reverse().join("")).length,i=0;i<r;i+=2)e=parseInt(t.substr(i,2),16),n+=String.fromCharCode(e);return s.decode(n)},hexToNumber:function(t){return t?l(t).toNumber():t},hexToNumberString:function(t){return t?l(t).toString(10):t},numberToHex:p,toHex:function(t,n){if(c(t))return n?"address":"0x"+t.toLowerCase().replace(/^0x/i,"");if(r.isBoolean(t))return n?"bool":t?"0x01":"0x00";if(r.isObject(t)&&!h(t)&&!a(t))return n?"string":d(JSON.stringify(t));if(r.isString(t)){if(0===t.indexOf("-0x")||0===t.indexOf("-0X"))return n?"int256":p(t);if(0===t.indexOf("0x")||0===t.indexOf("0X"))return n?"bytes":t;if(!isFinite(t))return n?"string":d(t)}return n?t<0?"int256":"uint256":p(t)},hexToBytes:m,bytesToHex:function(t){for(var n=[],e=0;e<t.length;e++)n.push((t[e]>>>4).toString(16)),n.push((15&t[e]).toString(16));return"0x"+n.join("")},isHex:v,leftPad:function(t,n,e){var r=/^0x/i.test(t)||"number"==typeof t,i=n-(t=t.toString(16).replace(/^0x/i,"")).length+1>=0?n-t.length+1:0;return(r?"0x":"")+new Array(i).join(e||"0")+t},rightPad:function(t,n,e){var r=/^0x/i.test(t)||"number"==typeof t,i=n-(t=t.toString(16).replace(/^0x/i,"")).length+1>=0?n-t.length+1:0;return(r?"0x":"")+t+new Array(i).join(e||"0")},sha3:g}},{"bn.js":22,"js-sha3":25,"number-to-bn":26,underscore:31,utf8:32}],BN:[function(t,n,e){arguments[4][10][0].apply(e,arguments)},{dup:10}],RequestManager:[function(t,n,e){var r=t("underscore"),i=t("web3-core-helpers").errors,o=t("./jsonrpc.js"),s=t("./batch.js"),u=t("./givenProvider.js"),a=function t(n){this.provider=null,this.providers=t.providers,this.setProvider(n),this.subscriptions={}};a.givenProvider=u,a.providers={WebsocketProvider:t("web3-providers-ws"),HttpProvider:t("web3-providers-http"),IpcProvider:t("web3-providers-ipc")},a.prototype.send=function(t,n){if(n=n||function(){},!this.provider)return n(i.InvalidProvider());var e=o.toPayload(t.method,t.params);this.provider.send(e,function(t,r){return r.id&&e.id!==r.id?n(new Error('Wrong response id "'+r.id+'" (expected: "'+e.id+'") in '+JSON.stringify(e))):t?n(t):r&&r.error?n(i.ErrorResponse(r)):o.isValidResponse(r)?void n(null,r.result):n(i.InvalidResponse(r))})},a.prototype.sendBatch=function(t,n){if(!this.provider)return n(i.InvalidProvider());var e=o.toBatchPayload(t);this.provider.send(e,function(t,e){return t?n(t):r.isArray(e)?void n(null,e):n(i.InvalidResponse(e))})},a.prototype.addSubscription=function(t,n,e,r){if(!this.provider.on)throw new Error("The provider doesn't support subscriptions: "+this.provider.constructor.name);this.subscriptions[t]={callback:r,type:e,name:n}},a.prototype.removeSubscription=function(t,n){var e=this;this.subscriptions[t]&&(this.send({method:this.subscriptions[t].type+"_unsubscribe",params:[t]},n),delete e.subscriptions[t])},a.prototype.setProvider=function(t,n){var e=this;if(t&&"string"==typeof t&&this.providers)if(/^http:\/\//i.test(t))t=new this.providers.HttpProvider(t);else if(/^ws:\/\//i.test(t))t=new this.providers.WebsocketProvider(t);else if(t&&"object"===(void 0===n?"undefined":_typeof(n))&&"function"==typeof n.connect)t=new this.providers.IpcProvider(t,n);else if(t)throw new Error("Can't autodetect provider for \""+t+'"');this.provider&&this.clearSubscriptions(),this.provider=t,this.provider&&this.provider.on&&this.provider.on("data",function(t,n){t?Object.keys(e.subscriptions).forEach(function(n){e.subscriptions[n].callback&&e.subscriptions[n].callback(t)}):e.subscriptions[n.params.subscription]&&e.subscriptions[n.params.subscription].callback&&e.subscriptions[n.params.subscription].callback(null,n.params.result)})},a.prototype.clearSubscriptions=function(t){var n=this;Object.keys(this.subscriptions).forEach(function(e){t&&"syncing"===n.subscriptions[e].name||n.removeSubscription(e)}),this.provider.reset&&this.provider.reset()},n.exports={Manager:a,BatchManager:s}},{"./batch.js":7,"./givenProvider.js":8,"./jsonrpc.js":9,underscore:6,"web3-core-helpers":5,"web3-providers-http":13,"web3-providers-ipc":16,"web3-providers-ws":21}]},{},["RequestManager"])("RequestManager")}); | ||
elasticsearch.py | # Copyright (c) 2019-2020 SAP SE or an SAP affiliate company. All rights reserved. This file is
# licensed under the Apache Software License, v. 2 except as noted otherwise in the LICENSE file
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import functools
import os
import json
import elasticsearch
import ci.util
import concourse.util
import model.elasticsearch
def default_client_if_available():
if not ci.util._running_on_ci():
return None
cfg_factory = ci.util.ctx().cfg_factory()
cfg_set = cfg_factory.cfg_set(ci.util.current_config_set_name())
es_config = cfg_set.elasticsearch()
return from_cfg(elasticsearch_cfg=es_config)
def from_cfg(
elasticsearch_cfg:model.elasticsearch.ElasticSearchConfig
):
return ElasticSearchClient(
elasticsearch=_from_cfg(elasticsearch_cfg=elasticsearch_cfg)
)
def _from_cfg(
elasticsearch_cfg:model.elasticsearch.ElasticSearchConfig
):
credentials = elasticsearch_cfg.credentials()
return elasticsearch.Elasticsearch(
elasticsearch_cfg.endpoints(),
http_auth=(credentials.username(), credentials.passwd()),
)
@functools.lru_cache()
def _metadata_dict():
# XXX mv to concourse package; deduplicate with notify step
if not ci.util._running_on_ci():
return {}
build = concourse.util.find_own_running_build()
pipeline_metadata = concourse.util.get_pipeline_metadata()
config_set = ci.util.ctx().cfg_factory().cfg_set(pipeline_metadata.current_config_set_name)
concourse_cfg = config_set.concourse()
meta_dict = {
'build-id': build.id(),
'build-name': build.build_number(),
'build-job-name': pipeline_metadata.job_name,
'build-team-name': pipeline_metadata.team_name,
'build-pipeline-name': pipeline_metadata.pipeline_name,
'atc-external-url': concourse_cfg.external_url(),
}
# XXX deduplicate; mv to concourse package
meta_dict['concourse_url'] = ci.util.urljoin(
meta_dict['atc-external-url'],
'teams',
meta_dict['build-team-name'],
'pipelines',
meta_dict['build-pipeline-name'],
'jobs',
meta_dict['build-job-name'],
'builds',
meta_dict['build-name'],
)
# XXX do not hard-code env variables
meta_dict['effective_version'] = os.environ.get('EFFECTIVE_VERSION')
meta_dict['component_name'] = os.environ.get('COMPONENT_NAME')
meta_dict['creation_date'] = datetime.datetime.now().isoformat()
return meta_dict
class ElasticSearchClient:
def __init__(
self,
elasticsearch: elasticsearch.Elasticsearch,
):
self._api = elasticsearch
def store_document(
self,
index: str,
body: dict,
inject_metadata=True,
*args,
**kwargs,
):
ci.util.check_type(index, str)
ci.util.check_type(body, dict)
if 'doc_type' in kwargs:
raise ValueError(
'''
doc_type attribute has been deprecated - see:
https://www.elastic.co/guide/en/elasticsearch/reference/6.0/removal-of-types.html
'''
)
if inject_metadata and _metadata_dict():
md = _metadata_dict()
body['cc_meta'] = md
return self._api.index(
index=index,
doc_type='_doc',
body=body,
*args,
**kwargs,
)
def store_documents(
self,
index: str,
body: [dict],
inject_metadata=True,
*args,
**kwargs,
):
# Bulk-loading uses a special format: A json specifying index name and doc-type |
# The index json does not change for bulk-loading into a single index.
index_json = json.dumps({
'index': {
'_index': index,
'_type': '_doc'
}
})
return self.store_bulk(
body='\n'.join([f'{index_json}\n{json.dumps(d)}' for d in body]),
inject_metadata=inject_metadata,
*args,
**kwargs,
)
def store_bulk(
self,
body: str,
inject_metadata=True,
*args,
**kwargs,
):
ci.util.check_type(body, str)
if inject_metadata and _metadata_dict():
def inject_meta(line):
parsed = json.loads(line)
if 'index' not in parsed:
parsed['cc_meta'] = md
return json.dumps(parsed)
return line
md = _metadata_dict()
patched_body = '\n'.join([inject_meta(line) for line in body.splitlines()])
body = patched_body
return self._api.bulk(
body=body,
*args,
**kwargs,
) | # (always _doc) followed by the actual document json. These pairs (one for each document)
# are then converted to newline delimited json |
itertools_repeat_map.py | #!/usr/bin/env python3
"""Using repeat() and map() |
for i in map(lambda x, y: (x, y, x * y), repeat(2), range(5)):
print('{:d} * {:d} = {:d}'.format(*i)) | """
#end_pymotw_header
from itertools import * |
help.rs | use cargo::util::{CliResult, CliError, Config};
#[derive(Deserialize)]
pub struct Options;
pub const USAGE: &'static str = "
Get some help with a cargo command.
Usage:
cargo help <command> | ";
pub fn execute(_: Options, _: &mut Config) -> CliResult {
// This is a dummy command just so that `cargo help help` works.
// The actual delegation of help flag to subcommands is handled by the
// cargo command.
Err(CliError::new("help command should not be executed directly".into(), 101))
} | cargo help -h | --help
Options:
-h, --help Print this message |
bootstrap.go | package bootstrap
import (
"context"
"fmt"
"strings"
"github.com/go-gorp/gorp"
"github.com/ovh/cds/engine/api/action"
"github.com/ovh/cds/engine/api/environment"
"github.com/ovh/cds/engine/api/group"
"github.com/ovh/cds/engine/api/integration"
"github.com/ovh/cds/engine/api/workflow"
"github.com/ovh/cds/sdk"
) | if err != nil {
return sdk.WithStack(err)
}
defer tx.Rollback() // nolint
if err := group.CreateDefaultGroup(tx, sdk.SharedInfraGroupName); err != nil {
return sdk.WrapError(err, "Cannot setup default %s group", sdk.SharedInfraGroupName)
}
if strings.TrimSpace(defaultValues.DefaultGroupName) != "" {
if err := group.CreateDefaultGroup(tx, defaultValues.DefaultGroupName); err != nil {
return sdk.WrapError(err, "Cannot setup default %s group", defaultValues.DefaultGroupName)
}
}
if err := group.InitializeDefaultGroupName(tx, defaultValues.DefaultGroupName); err != nil {
return sdk.WrapError(err, "Cannot InitializeDefaultGroupName")
}
if err := action.CreateBuiltinActions(tx); err != nil {
return sdk.WrapError(err, "Cannot setup builtin actions")
}
if err := environment.CreateBuiltinEnvironments(tx); err != nil {
return sdk.WrapError(err, "Cannot setup builtin environments")
}
if err := tx.Commit(); err != nil {
return sdk.WithStack(err)
}
if err := workflow.CreateBuiltinWorkflowHookModels(DBFunc()); err != nil {
return fmt.Errorf("cannot setup builtin workflow hook models: %v", err)
}
if err := workflow.CreateBuiltinWorkflowOutgoingHookModels(DBFunc()); err != nil {
return fmt.Errorf("cannot setup builtin workflow outgoing hook models: %v", err)
}
if err := integration.CreateBuiltinModels(DBFunc()); err != nil {
return fmt.Errorf("cannot setup integrations: %v", err)
}
return nil
} |
//InitiliazeDB inits the database
func InitiliazeDB(ctx context.Context, defaultValues sdk.DefaultValues, DBFunc func() *gorp.DbMap) error {
tx, err := DBFunc().Begin() |
ddoscustompolicies.go | package network
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"github.com/Azure/go-autorest/tracing"
"net/http"
)
// DdosCustomPoliciesClient is the network Client
type DdosCustomPoliciesClient struct {
BaseClient
}
// NewDdosCustomPoliciesClient creates an instance of the DdosCustomPoliciesClient client.
func NewDdosCustomPoliciesClient(subscriptionID string) DdosCustomPoliciesClient {
return NewDdosCustomPoliciesClientWithBaseURI(DefaultBaseURI, subscriptionID)
}
// NewDdosCustomPoliciesClientWithBaseURI creates an instance of the DdosCustomPoliciesClient client.
func NewDdosCustomPoliciesClientWithBaseURI(baseURI string, subscriptionID string) DdosCustomPoliciesClient |
// CreateOrUpdate creates or updates a DDoS custom policy.
// Parameters:
// resourceGroupName - the name of the resource group.
// ddosCustomPolicyName - the name of the DDoS custom policy.
// parameters - parameters supplied to the create or update operation.
func (client DdosCustomPoliciesClient) CreateOrUpdate(ctx context.Context, resourceGroupName string, ddosCustomPolicyName string, parameters DdosCustomPolicy) (result DdosCustomPoliciesCreateOrUpdateFuture, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/DdosCustomPoliciesClient.CreateOrUpdate")
defer func() {
sc := -1
if result.Response() != nil {
sc = result.Response().StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.CreateOrUpdatePreparer(ctx, resourceGroupName, ddosCustomPolicyName, parameters)
if err != nil {
err = autorest.NewErrorWithError(err, "network.DdosCustomPoliciesClient", "CreateOrUpdate", nil, "Failure preparing request")
return
}
result, err = client.CreateOrUpdateSender(req)
if err != nil {
err = autorest.NewErrorWithError(err, "network.DdosCustomPoliciesClient", "CreateOrUpdate", result.Response(), "Failure sending request")
return
}
return
}
// CreateOrUpdatePreparer prepares the CreateOrUpdate request.
func (client DdosCustomPoliciesClient) CreateOrUpdatePreparer(ctx context.Context, resourceGroupName string, ddosCustomPolicyName string, parameters DdosCustomPolicy) (*http.Request, error) {
pathParameters := map[string]interface{}{
"ddosCustomPolicyName": autorest.Encode("path", ddosCustomPolicyName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2018-11-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
parameters.Etag = nil
preparer := autorest.CreatePreparer(
autorest.AsContentType("application/json; charset=utf-8"),
autorest.AsPut(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosCustomPolicies/{ddosCustomPolicyName}", pathParameters),
autorest.WithJSON(parameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// CreateOrUpdateSender sends the CreateOrUpdate request. The method will close the
// http.Response Body if it receives an error.
func (client DdosCustomPoliciesClient) CreateOrUpdateSender(req *http.Request) (future DdosCustomPoliciesCreateOrUpdateFuture, err error) {
var resp *http.Response
resp, err = autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
if err != nil {
return
}
future.Future, err = azure.NewFutureFromResponse(resp)
return
}
// CreateOrUpdateResponder handles the response to the CreateOrUpdate request. The method always
// closes the http.Response Body.
func (client DdosCustomPoliciesClient) CreateOrUpdateResponder(resp *http.Response) (result DdosCustomPolicy, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// Delete deletes the specified DDoS custom policy.
// Parameters:
// resourceGroupName - the name of the resource group.
// ddosCustomPolicyName - the name of the DDoS custom policy.
func (client DdosCustomPoliciesClient) Delete(ctx context.Context, resourceGroupName string, ddosCustomPolicyName string) (result DdosCustomPoliciesDeleteFuture, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/DdosCustomPoliciesClient.Delete")
defer func() {
sc := -1
if result.Response() != nil {
sc = result.Response().StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.DeletePreparer(ctx, resourceGroupName, ddosCustomPolicyName)
if err != nil {
err = autorest.NewErrorWithError(err, "network.DdosCustomPoliciesClient", "Delete", nil, "Failure preparing request")
return
}
result, err = client.DeleteSender(req)
if err != nil {
err = autorest.NewErrorWithError(err, "network.DdosCustomPoliciesClient", "Delete", result.Response(), "Failure sending request")
return
}
return
}
// DeletePreparer prepares the Delete request.
func (client DdosCustomPoliciesClient) DeletePreparer(ctx context.Context, resourceGroupName string, ddosCustomPolicyName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"ddosCustomPolicyName": autorest.Encode("path", ddosCustomPolicyName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2018-11-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsDelete(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosCustomPolicies/{ddosCustomPolicyName}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// DeleteSender sends the Delete request. The method will close the
// http.Response Body if it receives an error.
func (client DdosCustomPoliciesClient) DeleteSender(req *http.Request) (future DdosCustomPoliciesDeleteFuture, err error) {
var resp *http.Response
resp, err = autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
if err != nil {
return
}
future.Future, err = azure.NewFutureFromResponse(resp)
return
}
// DeleteResponder handles the response to the Delete request. The method always
// closes the http.Response Body.
func (client DdosCustomPoliciesClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent),
autorest.ByClosing())
result.Response = resp
return
}
// Get gets information about the specified DDoS custom policy.
// Parameters:
// resourceGroupName - the name of the resource group.
// ddosCustomPolicyName - the name of the DDoS custom policy.
func (client DdosCustomPoliciesClient) Get(ctx context.Context, resourceGroupName string, ddosCustomPolicyName string) (result DdosCustomPolicy, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/DdosCustomPoliciesClient.Get")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.GetPreparer(ctx, resourceGroupName, ddosCustomPolicyName)
if err != nil {
err = autorest.NewErrorWithError(err, "network.DdosCustomPoliciesClient", "Get", nil, "Failure preparing request")
return
}
resp, err := client.GetSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.DdosCustomPoliciesClient", "Get", resp, "Failure sending request")
return
}
result, err = client.GetResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.DdosCustomPoliciesClient", "Get", resp, "Failure responding to request")
}
return
}
// GetPreparer prepares the Get request.
func (client DdosCustomPoliciesClient) GetPreparer(ctx context.Context, resourceGroupName string, ddosCustomPolicyName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"ddosCustomPolicyName": autorest.Encode("path", ddosCustomPolicyName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2018-11-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosCustomPolicies/{ddosCustomPolicyName}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// GetSender sends the Get request. The method will close the
// http.Response Body if it receives an error.
func (client DdosCustomPoliciesClient) GetSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// GetResponder handles the response to the Get request. The method always
// closes the http.Response Body.
func (client DdosCustomPoliciesClient) GetResponder(resp *http.Response) (result DdosCustomPolicy, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// UpdateTags update a DDoS custom policy tags
// Parameters:
// resourceGroupName - the name of the resource group.
// ddosCustomPolicyName - the name of the DDoS custom policy.
// parameters - parameters supplied to the update DDoS custom policy resource tags.
func (client DdosCustomPoliciesClient) UpdateTags(ctx context.Context, resourceGroupName string, ddosCustomPolicyName string, parameters TagsObject) (result DdosCustomPoliciesUpdateTagsFuture, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/DdosCustomPoliciesClient.UpdateTags")
defer func() {
sc := -1
if result.Response() != nil {
sc = result.Response().StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.UpdateTagsPreparer(ctx, resourceGroupName, ddosCustomPolicyName, parameters)
if err != nil {
err = autorest.NewErrorWithError(err, "network.DdosCustomPoliciesClient", "UpdateTags", nil, "Failure preparing request")
return
}
result, err = client.UpdateTagsSender(req)
if err != nil {
err = autorest.NewErrorWithError(err, "network.DdosCustomPoliciesClient", "UpdateTags", result.Response(), "Failure sending request")
return
}
return
}
// UpdateTagsPreparer prepares the UpdateTags request.
func (client DdosCustomPoliciesClient) UpdateTagsPreparer(ctx context.Context, resourceGroupName string, ddosCustomPolicyName string, parameters TagsObject) (*http.Request, error) {
pathParameters := map[string]interface{}{
"ddosCustomPolicyName": autorest.Encode("path", ddosCustomPolicyName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2018-11-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsContentType("application/json; charset=utf-8"),
autorest.AsPatch(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ddosCustomPolicies/{ddosCustomPolicyName}", pathParameters),
autorest.WithJSON(parameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// UpdateTagsSender sends the UpdateTags request. The method will close the
// http.Response Body if it receives an error.
func (client DdosCustomPoliciesClient) UpdateTagsSender(req *http.Request) (future DdosCustomPoliciesUpdateTagsFuture, err error) {
var resp *http.Response
resp, err = autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
if err != nil {
return
}
future.Future, err = azure.NewFutureFromResponse(resp)
return
}
// UpdateTagsResponder handles the response to the UpdateTags request. The method always
// closes the http.Response Body.
func (client DdosCustomPoliciesClient) UpdateTagsResponder(resp *http.Response) (result DdosCustomPolicy, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
| {
return DdosCustomPoliciesClient{NewWithBaseURI(baseURI, subscriptionID)}
} |
index.js | import express from 'express'
import featuresController from '../controllers/features'
import productsController from '../controllers/products'
const api = express.Router()
api.post('/feature', featuresController.createFeature) // ✔
api.get('/features', featuresController.getFeatures) // ✔
api.get('/feature/:id', featuresController.getFeature) // ✔ | api.put('/feature/:id', featuresController.updateFeature) // ✔
api.delete('/feature/:id', featuresController.deleteFeature) // ✔
api.post('/product', productsController.createProduct) // ✔
api.get('/products', productsController.getProducts) // ✔
api.get('/product/:id', productsController.getProduct) // ✔
api.get('/product/:id/feature', productsController.getProductFeatures) // ✔
api.put('/product/:id', productsController.updateProduct) // ✔
api.delete('/product/:id', productsController.deleteProduct) // ✔
export default api | api.get('/feature/:id/product', featuresController.getFeatureWithProduct) // ✔ |
testUtils.tsx | import { mount, ReactWrapper } from 'enzyme';
import React from 'react';
import { Provider } from 'react-redux';
import { compose, createStore, combineReducers } from 'redux';
import {
reducer as formReducer,
SubmissionError,
reduxForm,
change,
getFormValues,
} from 'redux-form';
export const withTestStore = (store) => (WrappedComponent) =>
(
<Provider store={store}>
<WrappedComponent />
</Provider>
);
export const withReduxForm = (WrappedComponent) => {
const reducer = combineReducers({ form: formReducer });
const store = createStore(reducer);
return withTestStore(store)(WrappedComponent);
};
export const withTestForm = (WrappedComponent) =>
reduxForm({ form: 'testForm' })(WrappedComponent);
export const mountTestForm: (component: React.ComponentType) => ReactWrapper =
compose(mount, withReduxForm, withTestForm);
export const errorOnSubmit = (error) => {
const formError = new SubmissionError(error);
let onSubmit;
const promise = new Promise<void>((resolve) => { | onSubmit = () => {
resolve();
throw formError;
};
});
return { onSubmit, promise };
};
export const setFieldValue = (wrapper, field, value) =>
wrapper.props().store.dispatch(change('testForm', field, value));
export const getTestFormValues = (wrapper) =>
getFormValues('testForm')(wrapper.props().store.getState()); | |
QuestionJobRunnerComponent.py | import requests
import datetime
import configparser
import json
import copy
from circuits import Component, handler
from events.JobCompleteEvent import JobCompleteEvent
from events.EntityPreprocessedEvent import EntityPreprocessedEvent
class QuestionJobRunnerComponent(Component):
config = configparser.ConfigParser()
with open('./components/jobRunner/api_config.json', 'r') as apiConfig:
config = json.load(apiConfig)
@handler("EntityPreprocessedEvent")
def handleEntityPreprocessedEvent(self, context):
if context.intent == 'question':
self.handleQuestionRequest(context)
def handleQuestionRequest(self, context):
response = requests.get(self.config["DUCKDUCKGO_API"]["URL"]["QUESTION"], params= {
'q': context.message,
'format': 'json' | formattedResponse = response.json()
if formattedResponse["Answer"] != "":
context.result["answer"] = formattedResponse["Answer"]
context.result["source"] = "DuckDuckGo"
elif formattedResponse["Definition"] != "":
context.result["answer"] = formattedResponse["Definition"]
context.result["source"] = formattedResponse["DefinitionSource"]
elif formattedResponse["AbstractText"] != "":
context.result["answer"] = formattedResponse["AbstractText"]
context.result["source"] = formattedResponse["AbstractSource"]
else:
context.intent = 'search-general'
self.fire(EntityPreprocessedEvent(context))
if context.intent == 'question':
self.fire(JobCompleteEvent(copy.deepcopy(context))) | }) |
views.py | from rest_framework.viewsets import ModelViewSet
from .models import Profile, Group
from .serializers import ProfileSerializers, GroupSerializers
from rest_framework.response import Response
from rest_framework.decorators import action
from itertools import chain
class ProfileViewSet(ModelViewSet):
serializer_class = ProfileSerializers
queryset = Profile.objects.all()
filterset_fields = ['id_dispositivo',]
class | (ModelViewSet):
serializer_class = GroupSerializers
queryset = Group.objects.all()
filterset_fields = ['token', ]
@action(methods=['post'], detail=True)
def adicionar_profile(self, request, pk):
profiles = request.data['ids']
group = Group.objects.get(id=pk)
old_profiles = group.profiles.all()
all_profiles = chain(old_profiles, profiles)
group.profiles.set(all_profiles)
group.save()
serializer = self.get_serializer(group)
return Response(serializer.data)
@action(methods=['delete'], detail=True)
def retirar_profile(self, request, pk):
profiles = request.data['ids']
group = Group.objects.get(id=pk)
for id in profiles:
group.profiles.remove(id)
group.save()
serializer = self.get_serializer(group)
return Response(serializer.data)
| GroupViewSet |
RadioButton.js | import React from 'react';
import classNames from 'classnames';
const {any, bool, number, string} = React.PropTypes;
function noop() {}
export class | extends React.Component {
static propTypes = {
className: any,
checked: bool,
disabled: bool,
tabIndex: number,
name: string,
id: string
};
static defaultProps = {
checked: false,
disabled: false
};
state = {focused: false};
onFocus = event => this.setState({focused: true});
onBlur = event => this.setState({focused: false});
componentDidMount() {
const {button, input, label} = this.refs;
input.id = label.htmlFor = this.props.id || Math.random();
button.addEventListener('focus', event => input.focus());
}
render() {
const {id, checked, disabled, tabIndex, name, className, ...rest} = this.props;
return (
<div {...rest}
ref="button"
tabIndex="-1"
className={classNames('radio-button', className, {checked, disabled, focused: this.state.focused})}>
<input ref="input"
type="radio"
name={name}
disabled={disabled}
checked={checked}
onChange={noop}
onFocus={this.onFocus}
onBlur={this.onBlur}
tabIndex={tabIndex}/>
<label ref="label">
{this.props.children}
</label>
</div>
);
}
}
| RadioButton |
dataFetches.js | import { csv } from 'd3-fetch';
import { group, groups } from 'd3-array';
import countyDict from '../data/county_dict.json';
import fipsExceptions from '../util/fipsExceptions';
//this function calculates the number of new cases/deaths for each place
//d is datum, is index of datum in values array, measure is either "totalCases" or "totalDeaths"
const calculateNew = (d, i, arr, measure) => {
if (i === 0) {
//if the first entry for the location, new cases are simply the number of cases
return d[measure];
} else {
//current day's figure minus previous day's figure
const differenceFromPrevious = d[measure] - arr[i - 1][measure];
//if there are more or the same, return new cases/deaths
if (Math.sign(differenceFromPrevious) !== -1) {
return differenceFromPrevious;
//if there are less cases/deaths than the day before, return 0 new cases/deaths
} else if (Math.sign(differenceFromPrevious) === -1) {
return 0;
}
}
}
//add newCases and newDeaths and push to a new array
const createGroupedMap = (oldEntriesMap) => {
//creating new map which will be returned at the end
const groupedMap = new Map();
oldEntriesMap.forEach((municipalityData, municipality) => {
//data which will contain previous data, plus newCases and newDeaths
const newData = [];
municipalityData.forEach((d, i) => {
const newCases = calculateNew(d, i, municipalityData, 'totalCases');
const newDeaths = calculateNew(d, i, municipalityData, 'totalDeaths');
const newEntry = {
...d,
newCases,
newDeaths,
};
//push the new entry to the newData array
newData.push(newEntry);
});
//set the current municipality (key) to the newData array (value)
groupedMap.set(municipality, newData);
});
//return the final grouped map
return groupedMap;
}
export const fetchCountryNyt = async () => {
try {
const url = 'https://raw.githubusercontent.com/nytimes/covid-19-data/master/us.csv';
const countryRes = await csv(url, d => {
//renaming and removing old keys
const entry = {
...d,
totalCases: +d.cases,
totalDeaths: +d.deaths
};
const { cases, deaths, ...formatted } = entry;
return formatted;
});
const withNew = countryRes.map((d, i) => {
const newCases = calculateNew(d, i, countryRes, 'totalCases');
const newDeaths = calculateNew(d, i, countryRes, 'totalDeaths');
return {
...d,
newCases,
newDeaths,
};
});
return withNew;
} catch (e) {
console.error(e);
}
}
export const fetchStateNyt = async () => {
try {
const url = 'https://raw.githubusercontent.com/nytimes/covid-19-data/master/us-states.csv';
const stateRes = await csv(url, d => {
//renaming and removing old keys
const entry = {
...d,
totalCases: +d.cases,
totalDeaths: +d.deaths
};
const { cases, deaths, ...formatted } = entry;
return formatted;
});
const groupByState = group(stateRes, d => d.state);
return createGroupedMap(groupByState);
} catch (e) {
console.error(e);
}
}
export const fetchCountyNyt = async () => {
const url = 'https://raw.githubusercontent.com/nytimes/covid-19-data/master/us-counties.csv';
try {
const countyRes = await csv(url, d => {
//renaming and removing old keys, adding coordinates
const entry = {
...d,
totalCases: +d.cases,
totalDeaths: +d.deaths,
coordinates: countyDict[d.fips]
};
const { cases, deaths, ...formatted } = entry;
//return custom coordinates and fips code
const returnCoordinates = (coordinates, fipsException ) => {
return {
...formatted,
fips: fipsException,
coordinates
}
}
//localities with no fips code but are included in data
if (d.county === 'New York City') {
return returnCoordinates([-74.006, 40.713], fipsExceptions.nyc);
} else if (d.county === 'Kansas City' && d.state === 'Missouri') {
return returnCoordinates([-94.579, 39.100], fipsExceptions.kc)
} else if (d.state === 'Puerto Rico') {
return returnCoordinates([-66.430, 18.222], fipsExceptions.pr);
} else if (d.state === 'Guam') {
return returnCoordinates([144.794, 13.444], fipsExceptions.guam);
} else if (d.state === 'Virgin Islands') { | }
else {
return formatted;
}
});
const haveFips = countyRes.filter(d => d.fips);
const groupByFips = group(haveFips, d => d.fips);
return createGroupedMap(groupByFips);
} catch (e) {
console.error(e);
}
} | return returnCoordinates([-64.896, 18.336], fipsExceptions.vi);
} else if (d.state === 'Northern Mariana Islands') {
return returnCoordinates([145.674, 15.098], fipsExceptions.nmi); |
lib.rs | // This file is part of Webb.
// Copyright (C) 2021 Webb Technologies Inc.
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! # Linkable-tree Module
//!
//! A module for constructing, modifying and inspecting linkable trees.
//!
//! ## Overview
//!
//! The Linkable-tree module provides functionality for the following:
//!
//! * Creating new linkable trees
//! * Inserting new leafs to a specified tree
//! * Adding an edge to a specified tree
//! * Updating an edge to a specified tree
//! * Inspecting a tree's state
//!
//! The supported dispatchable functions are documented in the [`Call`] enum.
//!
//! ### Terminology
//!
//! * **EdgeList**: A map of trees and chain ids to their edge metadata.
//!
//! ### Goals
//!
//! The Linkable-tree in Webb is designed to make the following possible:
//!
//! * Store edges of neighboring anchors’ merkle roots
//! * Store historoical data about neighboring merkle roots
//!
//! ## LinkableTreeInterface Interface
//!
//! `create`: Creates a new linkable tree.
//! `insert_in_order`: Inserts new leaf to the tree specified by provided id.
//! `add_edge`: Adds an edge to tree specified by provided id.
//! `update_edge`: Updates an edge to tree specified by provided id.
//!
//! ## LinkableTreeInspector Interface
//!
//! `get_chain_id`: Creates a new linkable tree.
//! `is_known_root`: Checks if a merkle root is in a tree's cached history or returns.
//! `ensure_known_root`: Ensure that passed root is in history.
//! `get_root`: Gets the merkle root for a tree or returns `TreeDoesntExist`.
//! `get_neighbor_roots`: Gets the merkle root for a tree or returns `TreeDoesntExist`.
//! `is_known_neighbor_root`: Checks if a merkle root is in a tree's cached history or returns
//! `TreeDoesntExist`. `ensure_known_neighbor_roots`: Checks if each root from passed root array is
//! in tree's cached history or returns `InvalidNeighborWithdrawRoot`. `ensure_known_neighbor_root`:
//! Checks if a merkle root is in a tree's cached history or returns `InvalidNeighborWithdrawRoot`.
//! `has_edge`: Check if this linked tree has this edge.
//! `ensure_max_edges`: Check if passed number of roots is the same as max allowed edges or returns
//! `InvalidMerkleRoots`.
// Ensure we're `no_std` when compiling for Wasm.
#![cfg_attr(not(feature = "std"), no_std)]
#[cfg(test)]
pub mod mock;
#[cfg(test)]
mod tests;
mod benchmarking;
pub mod types;
pub mod weights;
use codec::{Decode, Encode};
use frame_support::{ensure, pallet_prelude::DispatchError, traits::Get};
use sp_runtime::traits::{AtLeast32Bit, One, Saturating, Zero};
use sp_std::prelude::*;
use types::*;
use webb_primitives::{
traits::{linkable_tree::*, merkle_tree::*},
utils::compute_chain_id_type,
ElementTrait,
};
pub use weights::WeightInfo;
pub use pallet::*;
#[frame_support::pallet]
pub mod pallet {
use super::*;
use frame_support::{dispatch::DispatchResultWithPostInfo, pallet_prelude::*};
use frame_system::pallet_prelude::*;
#[pallet::pallet]
#[pallet::generate_store(pub(super) trait Store)]
#[pallet::without_storage_info]
pub struct Pallet<T, I = ()>(_);
#[pallet::config]
/// The module configuration trait.
pub trait Config<I: 'static = ()>: frame_system::Config + pallet_mt::Config<I> {
/// The overarching event type.
type Event: From<Event<Self, I>> + IsType<<Self as frame_system::Config>::Event>;
/// ChainID for anchor edges
type ChainId: Encode + Decode + Parameter + AtLeast32Bit + Default + Copy;
/// ChainID type for this chain
#[pallet::constant]
type ChainType: Get<[u8; 2]>;
// Getter of id of the current chain
#[pallet::constant]
type ChainIdentifier: Get<Self::ChainId>;
/// The tree
type Tree: TreeInterface<Self::AccountId, Self::TreeId, Self::Element>
+ TreeInspector<Self::AccountId, Self::TreeId, Self::Element>;
/// The pruning length for neighbor root histories
#[pallet::constant]
type HistoryLength: Get<Self::RootIndex>;
/// Weight info for pallet
type WeightInfo: WeightInfo;
}
/// The map of trees to the maximum number of anchor edges they can have
#[pallet::storage]
#[pallet::getter(fn max_edges)]
pub type MaxEdges<T: Config<I>, I: 'static = ()> =
StorageMap<_, Blake2_128Concat, T::TreeId, u32, ValueQuery>;
/// The map of trees and chain ids to their edge metadata
#[pallet::storage]
#[pallet::getter(fn edge_list)]
pub type EdgeList<T: Config<I>, I: 'static = ()> = StorageDoubleMap<
_,
Blake2_128Concat,
T::TreeId,
Blake2_128Concat,
T::ChainId,
EdgeMetadata<T::ChainId, T::Element, T::LeafIndex>,
ValueQuery,
>;
/// A helper map for denoting whether an tree is bridged to given chain
#[pallet::storage]
#[pallet::getter(fn linkable_tree_has_edge)]
pub type LinkableTreeHasEdge<T: Config<I>, I: 'static = ()> =
StorageMap<_, Blake2_128Concat, (T::TreeId, T::ChainId), bool, ValueQuery>;
/// The map of (tree, chain id) pairs to their latest recorded merkle root
#[pallet::storage]
#[pallet::getter(fn neighbor_roots)]
pub type NeighborRoots<T: Config<I>, I: 'static = ()> = StorageDoubleMap<
_,
Blake2_128Concat,
(T::TreeId, T::ChainId),
Blake2_128Concat,
T::RootIndex,
T::Element,
>;
/// The next neighbor root index to store the merkle root update record
#[pallet::storage]
#[pallet::getter(fn curr_neighbor_root_index)]
pub type CurrentNeighborRootIndex<T: Config<I>, I: 'static = ()> =
StorageMap<_, Blake2_128Concat, (T::TreeId, T::ChainId), T::RootIndex, ValueQuery>;
#[pallet::event]
#[pallet::generate_deposit(pub(super) fn deposit_event)]
pub enum Event<T: Config<I>, I: 'static = ()> {
/// New tree created
LinkableTreeCreation { tree_id: T::TreeId },
}
#[pallet::error]
pub enum Error<T, I = ()> {
// Root is not found in history
UnknownRoot,
/// Invalid Merkle Roots
InvalidMerkleRoots,
/// Invalid neighbor root passed in withdrawal
/// (neighbor root is not in neighbor history)
InvalidNeighborWithdrawRoot,
/// Anchor is at maximum number of edges for the given tree
TooManyEdges,
/// Edge already exists
EdgeAlreadyExists,
/// Edge does not exist
EdgeDoesntExists,
}
#[pallet::hooks]
impl<T: Config<I>, I: 'static> Hooks<BlockNumberFor<T>> for Pallet<T, I> {}
#[pallet::call]
impl<T: Config<I>, I: 'static> Pallet<T, I> {
#[pallet::weight(<T as Config<I>>::WeightInfo::create(*depth as u32, *max_edges))]
pub fn create(
origin: OriginFor<T>,
max_edges: u32,
depth: u8,
) -> DispatchResultWithPostInfo {
ensure_root(origin)?;
let tree_id = <Self as LinkableTreeInterface<_>>::create(None, max_edges, depth)?;
Self::deposit_event(Event::LinkableTreeCreation { tree_id });
Ok(().into())
}
}
}
pub struct LinkableTreeConfigration<T: Config<I>, I: 'static>(
core::marker::PhantomData<T>,
core::marker::PhantomData<I>,
);
impl<T: Config<I>, I: 'static> LinkableTreeConfig for LinkableTreeConfigration<T, I> {
type AccountId = T::AccountId;
type ChainId = T::ChainId;
type Element = T::Element;
type LeafIndex = T::LeafIndex;
type TreeId = T::TreeId;
}
impl<T: Config<I>, I: 'static> LinkableTreeInterface<LinkableTreeConfigration<T, I>>
for Pallet<T, I>
{
fn create(
creator: Option<T::AccountId>,
max_edges: u32,
depth: u8,
) -> Result<T::TreeId, DispatchError> {
let id = T::Tree::create(creator, depth)?;
MaxEdges::<T, I>::insert(id, max_edges);
Ok(id)
}
fn insert_in_order(id: T::TreeId, leaf: T::Element) -> Result<T::Element, DispatchError> {
T::Tree::insert_in_order(id, leaf)
}
fn ad | id: T::TreeId,
src_chain_id: T::ChainId,
root: T::Element,
latest_leaf_index: T::LeafIndex,
target: T::Element,
) -> Result<(), DispatchError> {
// ensure edge doesn't exists
ensure!(
!EdgeList::<T, I>::contains_key(id, src_chain_id),
Error::<T, I>::EdgeAlreadyExists
);
// ensure anchor isn't at maximum edges
let max_edges: u32 = Self::max_edges(id);
let curr_length = EdgeList::<T, I>::iter_prefix_values(id).into_iter().count();
ensure!(max_edges > curr_length as u32, Error::<T, I>::TooManyEdges);
// craft edge
let e_meta = EdgeMetadata { src_chain_id, root, latest_leaf_index, target };
// update historical neighbor list for this edge's root
let neighbor_root_inx = CurrentNeighborRootIndex::<T, I>::get((id, src_chain_id));
CurrentNeighborRootIndex::<T, I>::insert(
(id, src_chain_id),
neighbor_root_inx + T::RootIndex::one() % T::HistoryLength::get(),
);
NeighborRoots::<T, I>::insert((id, src_chain_id), neighbor_root_inx, root);
// Append new edge to the end of the edge list for the given tree
EdgeList::<T, I>::insert(id, src_chain_id, e_meta);
Ok(())
}
fn update_edge(
id: T::TreeId,
src_chain_id: T::ChainId,
root: T::Element,
latest_leaf_index: T::LeafIndex,
target: T::Element,
) -> Result<(), DispatchError> {
ensure!(EdgeList::<T, I>::contains_key(id, src_chain_id), Error::<T, I>::EdgeDoesntExists);
let e_meta = EdgeMetadata { src_chain_id, root, latest_leaf_index, target };
let neighbor_root_inx = (CurrentNeighborRootIndex::<T, I>::get((id, src_chain_id)) +
T::RootIndex::one()) %
T::HistoryLength::get();
CurrentNeighborRootIndex::<T, I>::insert((id, src_chain_id), neighbor_root_inx);
NeighborRoots::<T, I>::insert((id, src_chain_id), neighbor_root_inx, root);
EdgeList::<T, I>::insert(id, src_chain_id, e_meta);
Ok(())
}
}
impl<T: Config<I>, I: 'static> LinkableTreeInspector<LinkableTreeConfigration<T, I>>
for Pallet<T, I>
{
fn get_chain_id() -> T::ChainId {
T::ChainIdentifier::get()
}
fn get_chain_id_type() -> T::ChainId {
T::ChainId::try_from(compute_chain_id_type(T::ChainIdentifier::get(), T::ChainType::get()))
.unwrap_or_default()
}
fn get_chain_type() -> [u8; 2] {
T::ChainType::get()
}
fn get_root(id: T::TreeId) -> Result<T::Element, DispatchError> {
T::Tree::get_root(id)
}
fn is_known_root(id: T::TreeId, root: T::Element) -> Result<bool, DispatchError> {
T::Tree::is_known_root(id, root)
}
fn ensure_known_root(id: T::TreeId, root: T::Element) -> Result<(), DispatchError> {
let known_root = Self::is_known_root(id, root)?;
ensure!(known_root, Error::<T, I>::UnknownRoot);
Ok(())
}
fn get_neighbor_roots(tree_id: T::TreeId) -> Result<Vec<T::Element>, DispatchError> {
let edges = Self::get_neighbor_edges(tree_id)?;
let roots = edges.iter().map(|e| e.root).collect::<Vec<_>>();
Ok(roots)
}
fn is_known_neighbor_root(
tree_id: T::TreeId,
src_chain_id: T::ChainId,
target_root: T::Element,
) -> Result<bool, DispatchError> {
// If the src chain is default (empty edge) ensure that the provided target is the default
// root. This is to allow useres to prove against partial edge lists that aren't at
// capacity, but prevent them from providing their own fake edges / roots.
if src_chain_id == T::ChainId::default() {
return Ok(target_root == T::Tree::get_default_root(tree_id)?)
}
let get_next_inx = |inx: T::RootIndex| {
if inx.is_zero() {
T::HistoryLength::get().saturating_sub(One::one())
} else {
inx.saturating_sub(One::one())
}
};
let curr_root_inx = CurrentNeighborRootIndex::<T, I>::get((tree_id, src_chain_id));
let mut historical_root =
NeighborRoots::<T, I>::get((tree_id, src_chain_id), curr_root_inx)
.unwrap_or_else(|| T::Element::from_bytes(&[0; 32]));
if target_root == historical_root {
return Ok(true)
}
let mut i = get_next_inx(curr_root_inx);
while i != curr_root_inx {
historical_root = NeighborRoots::<T, I>::get((tree_id, src_chain_id), i)
.unwrap_or_else(|| T::Element::from_bytes(&[0; 32]));
if target_root == historical_root {
return Ok(true)
}
if i == Zero::zero() {
i = T::HistoryLength::get();
}
i -= One::one();
}
Ok(false)
}
fn has_edge(id: T::TreeId, src_chain_id: T::ChainId) -> bool {
EdgeList::<T, I>::contains_key(id, src_chain_id)
}
fn ensure_max_edges(id: T::TreeId, num_roots: usize) -> Result<(), DispatchError> {
let m = MaxEdges::<T, I>::get(id) as usize;
ensure!(num_roots == m + 1, Error::<T, I>::InvalidMerkleRoots);
Ok(())
}
fn ensure_known_neighbor_roots(
id: T::TreeId,
neighbor_roots: &Vec<T::Element>,
) -> Result<(), DispatchError> {
let max_edges = MaxEdges::<T, I>::get(id);
ensure!(
neighbor_roots.len() as u32 == max_edges,
Error::<T, I>::InvalidNeighborWithdrawRoot
);
let edges = Self::get_neighbor_edges(id)?;
for (i, edge_metadata) in edges.iter().enumerate() {
Self::ensure_known_neighbor_root(id, edge_metadata.src_chain_id, neighbor_roots[i])?;
}
Ok(())
}
fn ensure_known_neighbor_root(
id: T::TreeId,
src_chain_id: T::ChainId,
target: T::Element,
) -> Result<(), DispatchError> {
let is_known = Self::is_known_neighbor_root(id, src_chain_id, target)?;
ensure!(is_known, Error::<T, I>::InvalidNeighborWithdrawRoot);
Ok(())
}
}
impl<T: Config<I>, I: 'static> Pallet<T, I> {
pub fn get_neighbor_edges(
tree_id: T::TreeId,
) -> Result<Vec<EdgeMetadata<T::ChainId, T::Element, T::LeafIndex>>, DispatchError> {
let mut edges = EdgeList::<T, I>::iter_prefix_values(tree_id)
.into_iter()
.collect::<Vec<EdgeMetadata<_, _, _>>>();
// Add missing and default edges
let max_edges = MaxEdges::<T, I>::get(tree_id);
let default_root = T::Tree::get_default_root(tree_id)?;
while max_edges as usize > edges.len() {
edges.push(EdgeMetadata {
src_chain_id: T::ChainId::default(),
root: default_root,
latest_leaf_index: T::LeafIndex::default(),
target: T::Element::from_bytes(&[0; 32]),
});
}
Ok(edges)
}
}
| d_edge(
|
version.go | package mysql
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
// UserAgent returns the UserAgent string to use when sending http.Requests.
func UserAgent() string {
return "Azure-SDK-For-Go/v12.2.0-beta services"
}
// Version returns the semantic version (see http://semver.org) of the client.
func Version() string {
return "v12.2.0-beta"
} | ||
main.rs | #![deny(unsafe_code)]
#![no_main]
#![no_std]
#![allow(dead_code)]
#![allow(unused_imports)]
extern crate cortex_m;
#[macro_use]
extern crate librobot;
#[macro_use(entry, exception)]
extern crate cortex_m_rt as rt;
#[macro_use(block)]
extern crate nb;
extern crate drs_0x01;
extern crate embedded_hal;
extern crate panic_semihosting;
#[macro_use]
extern crate stm32f446_hal;
use drs_0x01::prelude::*;
mod communicator;
mod robot;
mod servo;
use core::cell::RefCell;
use robot::{init_peripherals, Robot};
use cortex_m::asm;
use embedded_hal::serial::{Read, Write};
use rt::ExceptionFrame;
use stm32f446_hal::prelude::*;
use stm32f446_hal::stm32f446;
use librobot::arrayvec::ArrayVec;
use librobot::trame_reader::TrameReader;
use librobot::Trame;
use drs_0x01::addr::WritableRamAddr;
use servo::ServoManager;
entry!(main);
fn handle_trame(trame: Trame) -> Option<Trame> {
match (
trame.id,
trame.cmd,
&trame.data[0..trame.data_length as usize],
) {
(0...5, 0x0, [0x55]) => Some(trame!(trame.id, 0x00, [0xAA])),
(_, _, _) => None,
}
}
/// Envoie 3 messages d'initialisation aux servomoteurs :
/// * Reboot
/// * Toujours renvoyer des ack (pour le debug)
/// * Activer le couple
fn init_servo(robot: &mut Robot) {
let servos = ServoManager::new();
let m2 = servos[0xFE].reboot();
for b in m2 {
block!(robot.servo_tx.write(b)).unwrap();
}
for _ in 0..5 {
robot.delay.delay_ms(70 as u32);
}
let m2 = servos[0xFE].ram_write(WritableRamAddr::AckPolicy(2));
for b in m2 {
block!(robot.servo_tx.write(b)).unwrap();
}
let m1 = servos[0xFE].enable_torque();
for b in m1 {
block!(robot.servo_tx.write(b)).unwrap();
}
}
fn | () -> ! {
let mut robot = init_peripherals(
stm32f446::Peripherals::take().unwrap(),
cortex_m::Peripherals::take().unwrap(),
);
init_servo(&mut robot);
let mut reader = TrameReader::new();
loop {
let b = block!(robot.pc_rx.read()).unwrap();
reader.step(b);
if let Some(trame) = reader.pop_trame() {
asm::bkpt();
}
/*
let mess = servos[0x05].stat();
for b in mess {
block!(robot.servo_tx.write(b)).unwrap();
}
robot.delay.delay_ms(70 as u16);
*/
/*
if let Ok(byte) = pc_rx.read() {
reader.step(byte);
}
if let Some(trame) = reader.pop_trame() {
if let Some(sent) = handle_trame(trame) {
let (arr, size): ([u8; 15], usize) = sent.into();
for b in arr[0..size].iter() {
block!(pc_tx.write(*b)).unwrap();
}
}
}*/
}
}
interrupt!(USART6, usart_pc);
fn usart_pc() {}
exception!(HardFault, hard_fault);
fn hard_fault(ef: &ExceptionFrame) -> ! {
panic!("Hardfault... : {:#?}", ef);
}
exception!(*, default_handler);
fn default_handler(irqn: i16) {
panic!("Unhandled exception (IRQn = {})", irqn);
}
| main |
resources.go | /*
Copyright 2021 The KubeOne Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package resources
import (
"k8c.io/kubeone/pkg/certificate/cabundle"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
// Names of the internal addons
const (
AddonCCMAws = "ccm-aws"
AddonCCMAzure = "ccm-azure"
AddonCCMDigitalOcean = "ccm-digitalocean"
AddonCCMHetzner = "ccm-hetzner"
AddonCCMOpenStack = "ccm-openstack"
AddonCCMEquinixMetal = "ccm-equinixmetal"
AddonCCMPacket = "ccm-packet" // TODO: Remove after deprecation period.
AddonCCMVsphere = "ccm-vsphere"
AddonCSIAwsEBS = "csi-aws-ebs"
AddonCSIAzureDisk = "csi-azuredisk"
AddonCSIAzureFile = "csi-azurefile"
AddonCSIHetzner = "csi-hetzner"
AddonCSIOpenStackCinder = "csi-openstack-cinder"
AddonCSIVsphere = "csi-vsphere"
AddonCNICanal = "cni-canal"
AddonCNICilium = "cni-cilium"
AddonCNIWeavenet = "cni-weavenet"
AddonMachineController = "machinecontroller"
AddonMetricsServer = "metrics-server"
AddonNodeLocalDNS = "nodelocaldns"
)
const (
NodeLocalDNSVirtualIP = "169.254.20.10"
)
const (
// names used for deployments/labels/etc
MachineControllerName = "machine-controller"
MachineControllerNameSpace = metav1.NamespaceSystem
MachineControllerWebhookName = "machine-controller-webhook"
MetricsServerName = "metrics-server"
MetricsServerNamespace = metav1.NamespaceSystem
VsphereCSIWebhookName = "vsphere-webhook-svc"
VsphereCSIWebhookNamespace = metav1.NamespaceSystem
)
const (
TLSCertName = "cert.pem"
TLSKeyName = "key.pem"
KubernetesCACertName = "ca.pem"
)
const (
KubeletImageRepository = "quay.io/kubermatic/kubelet"
)
func | () map[string]string {
return map[string]string{
"MachineControllerName": MachineControllerName,
"MachineControllerNameSpace": MachineControllerNameSpace,
"MachineControllerWebhookName": MachineControllerWebhookName,
"KubeletImageRepository": KubeletImageRepository,
"NodeLocalDNSVirtualIP": NodeLocalDNSVirtualIP,
"CABundleSSLCertFilePath": cabundle.SSLCertFilePath,
}
}
| All |
jsDocThisTagTests.ts | import { nameof, SyntaxKind } from "@ts-morph/common";
import { expect } from "chai";
import { JSDocThisTag } from "../../../../compiler";
import { getInfoFromTextWithDescendant } from "../../testHelpers";
describe("JSDocThisTag", () => {
function getInfo(text: string) {
return getInfoFromTextWithDescendant<JSDocThisTag>(text, SyntaxKind.JSDocThisTag);
}
describe(nameof<JSDocThisTag>("getTypeExpression"), () => {
function | (text: string, expectedValue: string | undefined) {
const { descendant } = getInfo(text);
expect(descendant.getTypeExpression()?.getTypeNode().getText()).to.equal(expectedValue);
if (expectedValue == null)
expect(() => descendant.getTypeExpressionOrThrow()).to.throw();
else
expect(descendant.getTypeExpressionOrThrow().getTypeNode().getText()).to.equal(expectedValue);
}
it("should get undefined when there is no type given", () => {
doTest("/** @this */\nfunction test() {}", undefined);
});
it("should get when type is given", () => {
doTest("/** @this {boolean} - String */\nfunction test() {}", "boolean");
});
});
});
| doTest |
github.py | try:
import vim
except ImportError:
raise ImportError(
'"vim" is not available. This module require to be loaded from Vim.'
)
#
# NOTE
# Vim use a global namespace for python/python3 so define a unique name
# function and write a code inside of the function to prevent conflicts.
#
def _vim_vital_web_api_github_main():
"""A namespace function for Vital.Web.API.GitHub"""
import re
import sys
import ssl
import collections
from itertools import chain
from threading import Lock, Thread
try:
import json
except ImportError:
import simplejson as json
try:
from urllib.request import urlopen, Request
from urllib.parse import (urlparse, parse_qs, urlencode, urlunparse)
except ImportError:
from urllib2 import urlopen, Request
from urllib import urlencode
from urlparse import (urlparse, parse_qs, urlunparse)
DEFAULT_INDICATOR = (
'Requesting entries and converting into '
'JSON %%(page)d/%(page_count)d ...'
)
def format_exception():
exc_type, exc_obj, tb = sys.exc_info()
f = tb.tb_frame
lineno = tb.tb_lineno
filename = f.f_code.co_filename
return "%s: %s at %s:%d" % (
exc_obj.__class__.__name__,
exc_obj, filename, lineno,
)
def to_vim(obj):
if obj is None:
return ''
elif isinstance(obj, bool):
return int(obj)
elif isinstance(obj, dict):
return dict([to_vim(k), to_vim(v)] for k, v in obj.items())
elif isinstance(obj, (list, tuple)):
return list(to_vim(v) for v in obj)
return obj
def build_headers(token):
return {'Authorization': 'token %s' % token} if token else {}
def build_url(url, **kwargs):
scheme, netloc, path, params, query, fragment = urlparse(url)
p = parse_qs(query)
p.update(kwargs)
return urlunparse([
scheme, netloc, path, params,
urlencode(p, doseq=True), fragment
])
def request(url, headers={}, method=None):
if method:
if sys.version_info.major >= 3:
req = Request(url, headers=headers, method=method)
else:
req = Request(url, headers=headers)
req.get_method = lambda: method
else:
req = Request(url, headers=headers)
context = ssl._create_unverified_context()
res = urlopen(req, context=context)
if not hasattr(res, 'getheader'):
# urllib2 does not have getheader
res.getheader = lambda name, self=res: self.info().getheader(name)
return res
def request_head(url, name, headers={}):
|
def request_json(url, headers={}, **kwargs):
url = build_url(url, **kwargs)
res = request(url, headers=headers)
obj = json.loads(res.read().decode('utf-8'))
return to_vim(obj)
def _request_entries(lock, queue, entries_per_pages, url,
headers, callback=None):
try:
while True:
page, indicator = queue.popleft()
entries = request_json(url, headers=headers, page=page)
entries_per_pages.append([page, entries])
if callback:
message = indicator % {'page': len(entries_per_pages)}
if hasattr(vim, 'async_call'):
with lock:
vim.async_call(callback, message)
else:
with lock:
callback(message)
except IndexError:
pass
except Exception as e:
# clear queue to stop other threads
queue.clear()
entries_per_pages.append(e)
def request_entries(url, token,
indicator=DEFAULT_INDICATOR,
page_start=1, page_end=0,
nprocess=20, callback=None, **kwargs):
# the followings might be str when specified from Vim.
page_start = int(page_start)
page_end = int(page_end)
nprocess = int(nprocess)
url = build_url(url, **kwargs)
headers = build_headers(token)
lock = Lock()
queue = collections.deque()
entries_per_pages = collections.deque()
# figure out the number of pages from HEAD request
if page_end == 0:
if callback:
callback('Requesting the total number of pages ...')
response_link = request_head(url, 'link', headers=headers)
if response_link:
m = re.search(
'<.*?[?&]page=(\d+)[^>]*>; rel="last"', response_link
)
page_end = int(m.group(1)) if m else 1
else:
page_end = 1
# prepare task queue
for page in range(page_start, page_end + 1):
queue.append([page, indicator % {
'url': url,
'page_count': page_end - page_start + 1
}])
# start workers
kwargs = dict(
target=_request_entries,
args=(lock, queue, entries_per_pages, url, headers, callback),
)
workers = [Thread(**kwargs) for n in range(nprocess)]
for worker in workers:
worker.start()
for worker in workers:
worker.join()
# check if sub-thread throw exceptions or not
exceptions = list(
filter(lambda x: not isinstance(x, list), entries_per_pages)
)
if len(exceptions):
raise exceptions[0]
# merge and flatten entries
return list(chain.from_iterable(map(
lambda x: x[1], sorted(entries_per_pages, key=lambda x: x[0])
)))
def echo_status_vim(indicator):
vim.command('redraw | echo "%s"' % indicator)
if sys.version_info < (3, 0, 0):
def ensure_unicode(s, encoding):
if isinstance(s, unicode):
return s
else:
return s.decode(encoding)
else:
def ensure_unicode(s, encoding):
if not isinstance(s, bytes):
return s
else:
return s.decode(encoding)
# Execute a main code
namespace = {}
try:
# Override 'request' with 'pseudo_requst' if exists
try:
request = _vim_vital_web_api_github_test_pseudo_request
except NameError:
pass
encoding = vim.eval('&encoding')
kwargs = vim.eval('kwargs')
kwargs = { ensure_unicode(k, encoding): ensure_unicode(v, encoding)
for k, v in kwargs.items()}
if kwargs.pop('verbose', 1):
kwargs['callback'] = echo_status_vim
entries = request_entries(**kwargs)
namespace['entries'] = entries
except:
namespace['exception'] = format_exception()
return namespace
# Call a namespace function
_vim_vital_web_api_github_response = _vim_vital_web_api_github_main()
| res = request(url, headers=headers, method='HEAD')
return res.getheader(name) |
test.py | import math
import scipy.integrate as integrate
ncalls = 0
def f(x):
|
ncalls +=1
return math.log(x)/math.sqrt(x)
result = integrate.quad(f,0,1)
print("result=", result, "ncalls =",ncalls) | global ncalls |
cymysql.py | # mysql/cymysql.py
# Copyright (C) 2005-2021 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
r"""
.. dialect:: mysql+cymysql
:name: CyMySQL
:dbapi: cymysql
:connectstring: mysql+cymysql://<username>:<password>@<host>/<dbname>[?<options>]
:url: https://github.com/nakagami/CyMySQL
.. note::
The CyMySQL dialect is **not tested as part of SQLAlchemy's continuous
integration** and may have unresolved issues. The recommended MySQL
dialects are mysqlclient and PyMySQL.
""" # noqa
from .base import BIT
from .base import MySQLDialect
from .mysqldb import MySQLDialect_mysqldb
from ... import util
class _cymysqlBIT(BIT):
def result_processor(self, dialect, coltype):
"""Convert MySQL's 64 bit, variable length binary string to a long."""
def process(value):
if value is not None:
v = 0
for i in util.iterbytes(value):
v = v << 8 | i
return v
return value
return process
| class MySQLDialect_cymysql(MySQLDialect_mysqldb):
driver = "cymysql"
supports_statement_cache = True
description_encoding = None
supports_sane_rowcount = True
supports_sane_multi_rowcount = False
supports_unicode_statements = True
colspecs = util.update_copy(MySQLDialect.colspecs, {BIT: _cymysqlBIT})
@classmethod
def dbapi(cls):
return __import__("cymysql")
def _detect_charset(self, connection):
return connection.connection.charset
def _extract_error_code(self, exception):
return exception.errno
def is_disconnect(self, e, connection, cursor):
if isinstance(e, self.dbapi.OperationalError):
return self._extract_error_code(e) in (
2006,
2013,
2014,
2045,
2055,
)
elif isinstance(e, self.dbapi.InterfaceError):
# if underlying connection is closed,
# this is the error you get
return True
else:
return False
dialect = MySQLDialect_cymysql | |
ApplyBindPlatformRequest.ts | namespace Shine
{
/** 角色绑定平台消息(generated by shine) */
export class ApplyBindPlatfor | uest
{
/** 数据类型ID */
public static dataID:number=GameRequestType.ApplyBindPlatform;
public uid:string;
public platform:string;
constructor()
{
super();
this._dataID=GameRequestType.ApplyBindPlatform;
}
protected copyData():void
{
super.copyData();
}
/** 获取数据类名 */
public getDataClassName():string
{
return "ApplyBindPlatformRequest";
}
/** 写入字节流(简版) */
protected toWriteBytesSimple(stream:BytesWriteStream):void
{
super.toWriteBytesSimple(stream);
stream.writeUTF(this.uid);
stream.writeUTF(this.platform);
}
/** 创建实例 */
public static createApplyBindPlatform(uid:string,platform:string):ApplyBindPlatformRequest
{
var re:ApplyBindPlatformRequest=new ApplyBindPlatformRequest();
re.uid=uid;
re.platform=platform;
return re;
}
}
}
| mRequest extends GameReq |
AbstractWebsocketClient.py | import json
import time
from threading import Thread
from websocket import create_connection, WebSocketConnectionClosedException
class AbstractWebsocketClient(object):
thread = None
stop = False
sub_params = {}
keep_alive = None
def start(self):
def _go():
self._connect()
self._listen()
self._disconnect()
self.stop = False
self.on_open()
self.thread = Thread(target=_go)
self.keep_alive = Thread(target=self._keepalive)
self.thread.start()
def _connect(self):
if self.url[-1] == "/":
self.url = self.url[:-1]
self.ws = create_connection(self.url)
self.ws.send(json.dumps(self.sub_params))
def | (self, interval=30):
while self.ws.connected:
self.ws.ping("keepalive")
time.sleep(interval)
def _listen(self):
self.keep_alive.start()
while not self.stop:
try:
data = self.ws.recv()
msg = json.loads(data)
except ValueError as e:
self.on_error(e)
except Exception as e:
self.on_error(e)
else:
self.on_message(msg)
def _disconnect(self):
try:
if self.ws:
self.ws.close()
except WebSocketConnectionClosedException as e:
pass
finally:
self.keep_alive.join()
self.on_close()
def close(self):
self.stop = True
self._disconnect()
self.thread.join()
def on_open(self):
pass
def on_close(self):
pass
def on_message(self, msg):
pass
def on_error(self, e, data=None):
pass
| _keepalive |
endianness.2asm.js | function asmFunc(global, env, buffer) {
"use asm";
var HEAP8 = new global.Int8Array(buffer);
var HEAP16 = new global.Int16Array(buffer);
var HEAP32 = new global.Int32Array(buffer);
var HEAPU8 = new global.Uint8Array(buffer);
var HEAPU16 = new global.Uint16Array(buffer);
var HEAPU32 = new global.Uint32Array(buffer);
var HEAPF32 = new global.Float32Array(buffer);
var HEAPF64 = new global.Float64Array(buffer);
var Math_imul = global.Math.imul;
var Math_fround = global.Math.fround;
var Math_abs = global.Math.abs;
var Math_clz32 = global.Math.clz32;
var Math_min = global.Math.min;
var Math_max = global.Math.max;
var Math_floor = global.Math.floor;
var Math_ceil = global.Math.ceil;
var Math_sqrt = global.Math.sqrt;
var abort = env.abort;
var nan = global.NaN;
var infinity = global.Infinity;
var i64toi32_i32$HIGH_BITS = 0;
function i16_store_little(address, value) {
address = address | 0;
value = value | 0;
var wasm2asm_i32$0 = 0, wasm2asm_i32$1 = 0;
wasm2asm_i32$0 = address;
wasm2asm_i32$1 = value;
HEAP8[wasm2asm_i32$0 >> 0] = wasm2asm_i32$1;
wasm2asm_i32$0 = address + 1 | 0;
wasm2asm_i32$1 = value >>> 8 | 0;
HEAP8[wasm2asm_i32$0 >> 0] = wasm2asm_i32$1;
}
function i32_store_little(address, value) {
address = address | 0;
value = value | 0;
i16_store_little(address | 0, value | 0);
i16_store_little(address + 2 | 0 | 0, value >>> 16 | 0 | 0);
}
function i64_store_little(address, value, value$hi) {
address = address | 0;
value = value | 0;
value$hi = value$hi | 0;
var i64toi32_i32$0 = 0, i64toi32_i32$4 = 0, i64toi32_i32$1 = 0, i64toi32_i32$3 = 0, $9_1 = 0, $6_1 = 0, i64toi32_i32$2 = 0;
i64toi32_i32$0 = value$hi;
i64toi32_i32$0 = i64toi32_i32$0;
i32_store_little(address | 0, value | 0);
$6_1 = address + 4 | 0;
i64toi32_i32$0 = i64toi32_i32$0;
i64toi32_i32$0 = i64toi32_i32$0;
i64toi32_i32$2 = value;
i64toi32_i32$1 = 0;
i64toi32_i32$3 = 32;
i64toi32_i32$4 = i64toi32_i32$3 & 31 | 0;
if (32 >>> 0 <= (i64toi32_i32$3 & 63 | 0) >>> 0) {
i64toi32_i32$1 = 0;
$9_1 = i64toi32_i32$0 >>> i64toi32_i32$4 | 0;
} else {
i64toi32_i32$1 = i64toi32_i32$0 >>> i64toi32_i32$4 | 0;
$9_1 = (((1 << i64toi32_i32$4 | 0) - 1 | 0) & i64toi32_i32$0 | 0) << (32 - i64toi32_i32$4 | 0) | 0 | (i64toi32_i32$2 >>> i64toi32_i32$4 | 0) | 0;
}
i64toi32_i32$1 = i64toi32_i32$1;
i32_store_little($6_1 | 0, $9_1 | 0);
}
function i16_load_little(address) {
address = address | 0;
return HEAPU8[address >> 0] | 0 | ((HEAPU8[(address + 1 | 0) >> 0] | 0) << 8 | 0) | 0 | 0;
}
function i32_load_little(address) {
address = address | 0;
return i16_load_little(address | 0) | 0 | ((i16_load_little(address + 2 | 0 | 0) | 0) << 16 | 0) | 0 | 0;
}
function i64_load_little(address) {
address = address | 0;
var i64toi32_i32$0 = 0, i64toi32_i32$2 = 0, i64toi32_i32$1 = 0, i64toi32_i32$4 = 0, i64toi32_i32$3 = 0, $9_1 = 0, $3 = 0, $3$hi = 0, $8$hi = 0;
i64toi32_i32$0 = 0;
$3 = i32_load_little(address | 0) | 0;
$3$hi = i64toi32_i32$0;
i64toi32_i32$0 = 0;
i64toi32_i32$0 = i64toi32_i32$0;
i64toi32_i32$2 = i32_load_little(address + 4 | 0 | 0) | 0;
i64toi32_i32$1 = 0;
i64toi32_i32$3 = 32;
i64toi32_i32$4 = i64toi32_i32$3 & 31 | 0;
if (32 >>> 0 <= (i64toi32_i32$3 & 63 | 0) >>> 0) {
i64toi32_i32$1 = i64toi32_i32$2 << i64toi32_i32$4 | 0;
$9_1 = 0;
} else {
i64toi32_i32$1 = ((1 << i64toi32_i32$4 | 0) - 1 | 0) & (i64toi32_i32$2 >>> (32 - i64toi32_i32$4 | 0) | 0) | 0 | (i64toi32_i32$0 << i64toi32_i32$4 | 0) | 0;
$9_1 = i64toi32_i32$2 << i64toi32_i32$4 | 0;
}
$8$hi = i64toi32_i32$1;
i64toi32_i32$1 = $3$hi;
i64toi32_i32$0 = $3;
i64toi32_i32$2 = $8$hi;
i64toi32_i32$3 = $9_1;
i64toi32_i32$2 = i64toi32_i32$1 | i64toi32_i32$2 | 0;
i64toi32_i32$2 = i64toi32_i32$2;
i64toi32_i32$0 = i64toi32_i32$0 | i64toi32_i32$3 | 0;
i64toi32_i32$HIGH_BITS = i64toi32_i32$2;
return i64toi32_i32$0 | 0;
}
function $6(value) {
value = value | 0;
i16_store_little(0 | 0, value | 0);
return HEAP16[0 >> 1] | 0 | 0;
}
function $7(value) {
value = value | 0;
i16_store_little(0 | 0, value | 0);
return HEAPU16[0 >> 1] | 0 | 0;
}
function $8(value) {
value = value | 0;
i32_store_little(0 | 0, value | 0);
return HEAPU32[0 >> 2] | 0 | 0;
}
function $9(value, value$hi) {
value = value | 0;
value$hi = value$hi | 0;
var i64toi32_i32$0 = 0, i64toi32_i32$1 = 0;
i64toi32_i32$0 = value$hi;
i64toi32_i32$0 = i64toi32_i32$0;
i16_store_little(0 | 0, value | 0);
i64toi32_i32$0 = HEAP16[0 >> 1] | 0;
i64toi32_i32$1 = i64toi32_i32$0 >> 31 | 0;
i64toi32_i32$1 = i64toi32_i32$1;
i64toi32_i32$1 = i64toi32_i32$1;
i64toi32_i32$1 = i64toi32_i32$1;
i64toi32_i32$0 = i64toi32_i32$0;
i64toi32_i32$HIGH_BITS = i64toi32_i32$1;
return i64toi32_i32$0 | 0;
}
function $10(value, value$hi) {
value = value | 0;
value$hi = value$hi | 0;
var i64toi32_i32$1 = 0, i64toi32_i32$0 = 0;
i64toi32_i32$0 = value$hi;
i64toi32_i32$0 = i64toi32_i32$0;
i16_store_little(0 | 0, value | 0);
i64toi32_i32$0 = HEAPU16[0 >> 1] | 0;
i64toi32_i32$1 = 0;
i64toi32_i32$1 = i64toi32_i32$1;
i64toi32_i32$1 = i64toi32_i32$1;
i64toi32_i32$1 = i64toi32_i32$1;
i64toi32_i32$0 = i64toi32_i32$0;
i64toi32_i32$HIGH_BITS = i64toi32_i32$1;
return i64toi32_i32$0 | 0;
}
function $11(value, value$hi) {
value = value | 0;
value$hi = value$hi | 0;
var i64toi32_i32$0 = 0, i64toi32_i32$1 = 0;
i64toi32_i32$0 = value$hi;
i64toi32_i32$0 = i64toi32_i32$0;
i32_store_little(0 | 0, value | 0);
i64toi32_i32$0 = HEAP32[0 >> 2] | 0;
i64toi32_i32$1 = i64toi32_i32$0 >> 31 | 0;
i64toi32_i32$1 = i64toi32_i32$1;
i64toi32_i32$1 = i64toi32_i32$1;
i64toi32_i32$1 = i64toi32_i32$1;
i64toi32_i32$0 = i64toi32_i32$0;
i64toi32_i32$HIGH_BITS = i64toi32_i32$1;
return i64toi32_i32$0 | 0;
}
function $12(value, value$hi) {
value = value | 0;
value$hi = value$hi | 0;
var i64toi32_i32$1 = 0, i64toi32_i32$0 = 0;
i64toi32_i32$0 = value$hi;
i64toi32_i32$0 = i64toi32_i32$0;
i32_store_little(0 | 0, value | 0);
i64toi32_i32$0 = HEAPU32[0 >> 2] | 0;
i64toi32_i32$1 = 0;
i64toi32_i32$1 = i64toi32_i32$1;
i64toi32_i32$1 = i64toi32_i32$1;
i64toi32_i32$1 = i64toi32_i32$1;
i64toi32_i32$0 = i64toi32_i32$0;
i64toi32_i32$HIGH_BITS = i64toi32_i32$1;
return i64toi32_i32$0 | 0;
}
function $13(value, value$hi) {
value = value | 0;
value$hi = value$hi | 0;
var i64toi32_i32$0 = 0, i64toi32_i32$1 = 0, i64toi32_i32$2 = 0, wasm2asm_i32$0 = 0;
i64toi32_i32$0 = value$hi;
i64toi32_i32$0 = i64toi32_i32$0;
i64_store_little(0 | 0, value | 0, i64toi32_i32$0 | 0);
i64toi32_i32$2 = 0;
i64toi32_i32$0 = HEAPU32[i64toi32_i32$2 >> 2] | 0;
i64toi32_i32$1 = (wasm2asm_i32$0 = i64toi32_i32$2, HEAPU8[(wasm2asm_i32$0 + 4 | 0) >> 0] | 0 | 0 | (HEAPU8[(wasm2asm_i32$0 + 5 | 0) >> 0] | 0 | 0) << 8 | (HEAPU8[(wasm2asm_i32$0 + 6 | 0) >> 0] | 0 | 0) << 16 | (HEAPU8[(wasm2asm_i32$0 + 7 | 0) >> 0] | 0 | 0) << 24);
i64toi32_i32$1 = i64toi32_i32$1;
i64toi32_i32$1 = i64toi32_i32$1;
i64toi32_i32$1 = i64toi32_i32$1;
i64toi32_i32$0 = i64toi32_i32$0;
i64toi32_i32$HIGH_BITS = i64toi32_i32$1;
return i64toi32_i32$0 | 0;
}
function $14(value) {
value = Math_fround(value);
i32_store_little(0 | 0, (HEAPF32[0] = value, HEAP32[0] | 0) | 0);
return Math_fround(Math_fround(HEAPF32[0 >> 2]));
}
function $15(value) {
value = +value;
var i64toi32_i32$0 = 0, wasm2asm_i32$0 = 0, wasm2asm_f64$0 = 0.0;
wasm2asm_i32$0 = 0;
wasm2asm_f64$0 = value;
HEAPF64[wasm2asm_i32$0 >> 3] = wasm2asm_f64$0;
i64toi32_i32$0 = HEAP32[(0 + 4 | 0) >> 2] | 0;
i64toi32_i32$0 = i64toi32_i32$0;
i64_store_little(0 | 0, HEAP32[0 >> 2] | 0 | 0, i64toi32_i32$0 | 0);
return +(+HEAPF64[0 >> 3]);
}
function | (value) {
value = value | 0;
var wasm2asm_i32$0 = 0, wasm2asm_i32$1 = 0;
wasm2asm_i32$0 = 0;
wasm2asm_i32$1 = value;
HEAP16[wasm2asm_i32$0 >> 1] = wasm2asm_i32$1;
return i16_load_little(0 | 0) | 0 | 0;
}
function $17(value) {
value = value | 0;
var wasm2asm_i32$0 = 0, wasm2asm_i32$1 = 0;
wasm2asm_i32$0 = 0;
wasm2asm_i32$1 = value;
HEAP32[wasm2asm_i32$0 >> 2] = wasm2asm_i32$1;
return i32_load_little(0 | 0) | 0 | 0;
}
function $18(value, value$hi) {
value = value | 0;
value$hi = value$hi | 0;
var i64toi32_i32$0 = 0, i64toi32_i32$1 = 0, wasm2asm_i32$0 = 0, wasm2asm_i32$1 = 0;
i64toi32_i32$0 = value$hi;
i64toi32_i32$0 = i64toi32_i32$0;
wasm2asm_i32$0 = 0;
wasm2asm_i32$1 = value;
HEAP16[wasm2asm_i32$0 >> 1] = wasm2asm_i32$1;
i64toi32_i32$0 = 0;
i64toi32_i32$0 = i64toi32_i32$0;
i64toi32_i32$0 = i64toi32_i32$0;
i64toi32_i32$0 = i64toi32_i32$0;
i64toi32_i32$1 = i16_load_little(0 | 0) | 0;
i64toi32_i32$HIGH_BITS = i64toi32_i32$0;
return i64toi32_i32$1 | 0;
}
function $19(value, value$hi) {
value = value | 0;
value$hi = value$hi | 0;
var i64toi32_i32$0 = 0, i64toi32_i32$1 = 0, wasm2asm_i32$0 = 0, wasm2asm_i32$1 = 0;
i64toi32_i32$0 = value$hi;
i64toi32_i32$0 = i64toi32_i32$0;
wasm2asm_i32$0 = 0;
wasm2asm_i32$1 = value;
HEAP32[wasm2asm_i32$0 >> 2] = wasm2asm_i32$1;
i64toi32_i32$0 = 0;
i64toi32_i32$0 = i64toi32_i32$0;
i64toi32_i32$0 = i64toi32_i32$0;
i64toi32_i32$0 = i64toi32_i32$0;
i64toi32_i32$1 = i32_load_little(0 | 0) | 0;
i64toi32_i32$HIGH_BITS = i64toi32_i32$0;
return i64toi32_i32$1 | 0;
}
function $20(value, value$hi) {
value = value | 0;
value$hi = value$hi | 0;
var i64toi32_i32$1 = 0, i64toi32_i32$0 = 0, wasm2asm_i32$0 = 0, wasm2asm_i32$1 = 0, wasm2asm_i32$2 = 0, wasm2asm_i32$3 = 0;
i64toi32_i32$0 = value$hi;
i64toi32_i32$1 = 0;
i64toi32_i32$0 = i64toi32_i32$0;
wasm2asm_i32$0 = i64toi32_i32$1;
wasm2asm_i32$1 = value;
HEAP32[wasm2asm_i32$0 >> 2] = wasm2asm_i32$1;
wasm2asm_i32$0 = i64toi32_i32$1;
wasm2asm_i32$1 = i64toi32_i32$0;
(wasm2asm_i32$2 = wasm2asm_i32$0, wasm2asm_i32$3 = wasm2asm_i32$1), ((HEAP8[(wasm2asm_i32$2 + 4 | 0) >> 0] = wasm2asm_i32$3 & 255 | 0, HEAP8[(wasm2asm_i32$2 + 5 | 0) >> 0] = (wasm2asm_i32$3 >>> 8 | 0) & 255 | 0), HEAP8[(wasm2asm_i32$2 + 6 | 0) >> 0] = (wasm2asm_i32$3 >>> 16 | 0) & 255 | 0), HEAP8[(wasm2asm_i32$2 + 7 | 0) >> 0] = (wasm2asm_i32$3 >>> 24 | 0) & 255 | 0;
i64toi32_i32$0 = i64_load_little(0 | 0) | 0;
i64toi32_i32$1 = i64toi32_i32$HIGH_BITS;
i64toi32_i32$1 = i64toi32_i32$1;
i64toi32_i32$1 = i64toi32_i32$1;
i64toi32_i32$1 = i64toi32_i32$1;
i64toi32_i32$0 = i64toi32_i32$0;
i64toi32_i32$HIGH_BITS = i64toi32_i32$1;
return i64toi32_i32$0 | 0;
}
function $21(value) {
value = Math_fround(value);
var wasm2asm_i32$0 = 0, wasm2asm_f32$0 = Math_fround(0);
wasm2asm_i32$0 = 0;
wasm2asm_f32$0 = value;
HEAPF32[wasm2asm_i32$0 >> 2] = wasm2asm_f32$0;
return Math_fround((HEAP32[0] = i32_load_little(0 | 0) | 0, HEAPF32[0]));
}
function $22(value) {
value = +value;
var i64toi32_i32$1 = 0, i64toi32_i32$0 = 0, wasm2asm_i32$0 = 0, wasm2asm_f64$0 = 0.0, wasm2asm_i32$1 = 0;
wasm2asm_i32$0 = 0;
wasm2asm_f64$0 = value;
HEAPF64[wasm2asm_i32$0 >> 3] = wasm2asm_f64$0;
i64toi32_i32$0 = i64_load_little(0 | 0) | 0;
i64toi32_i32$1 = i64toi32_i32$HIGH_BITS;
i64toi32_i32$1 = i64toi32_i32$1;
wasm2asm_i32$0 = 0;
wasm2asm_i32$1 = i64toi32_i32$0;
HEAP32[wasm2asm_i32$0 >> 2] = wasm2asm_i32$1;
wasm2asm_i32$0 = 0;
wasm2asm_i32$1 = i64toi32_i32$1;
HEAP32[(wasm2asm_i32$0 + 4 | 0) >> 2] = wasm2asm_i32$1;
return +(+HEAPF64[0 >> 3]);
}
return {
i32_load16_s: $6,
i32_load16_u: $7,
i32_load: $8,
i64_load16_s: $9,
i64_load16_u: $10,
i64_load32_s: $11,
i64_load32_u: $12,
i64_load: $13,
f32_load: $14,
f64_load: $15,
i32_store16: $16,
i32_store: $17,
i64_store16: $18,
i64_store32: $19,
i64_store: $20,
f32_store: $21,
f64_store: $22
};
}
| $16 |
passenger-search.component.ts | import { Component, OnInit, OnDestroy } from '@angular/core';
import { Http } from '@angular/http';
import { Observable } from 'rxjs/Observable';
import { BehaviorSubject } from 'rxjs/BehaviorSubject';
import { Subscription } from 'rxjs/Subscription';
import 'rxjs/add/observable/interval';
import 'rxjs/add/observable/from';
import 'rxjs/add/observable/fromEvent';
import 'rxjs/add/operator/startWith';
import 'rxjs/add/operator/map';
import 'rxjs/add/operator/switchMap';
import 'rxjs/add/operator/do';
import 'rxjs/add/operator/catch';
import 'rxjs/add/operator/debounceTime';
import 'rxjs/add/operator/distinctUntilChanged';
import 'rxjs/add/observable/combineLatest';
import { AuthService } from '../../shared/auth/auth.service';
class Producer {
interval: any;
private listener = [];
addListener = fn => this.listener.push(fn);
constructor(id) {
let count = 0;
this.interval = setInterval(() => {
count++;
console.log('Producer ' + id + ': ' + count);
this.listener.forEach(fn => fn(count));
}, 1000);
}
}
@Component({
templateUrl: './passenger-search.component.html'
})
export class | implements OnInit, OnDestroy {
blackList: Array<any>;
timeSubscription: Subscription;
producer = new Producer('Hot-Observable');
hotSubscription: Subscription;
coldSubscription: Subscription;
timeObservable: Observable<any>;
inputSubject: BehaviorSubject<any>;
listObservable: Observable<any>;
loading: Boolean = true;
name: string;
hotColdTimeout: any;
constructor(authService: AuthService, private http: Http) {
this.name = authService['userName'];
// Dem Observable wird ein Array mit dem Operator .from bereitgestellt
Observable.from(['Claudia', 'Fritz', 'Peter'])
// Mit dem Operator .map wird der Wert verändert
.map(person => person.toUpperCase())
.subscribe({
// Jeder Array Wert wird nach Durchlauf der Sequenz der Callback-Funktion next übergeben
next: (person) => {
console.log('Person in Großbuchstaben: ' + person);
},
// Wenn alle Personen durchlaufen sind ist der Stream beendet und complete wird aufgerufen
complete: () => {
console.log('Alle Personen durchlaufen');
}
});
}
ngOnInit() {
// Teil 1
const observer = {
next: resp => this.blackList = resp,
error: err => console.error('Observer erhält einen Fehler: ' + err),
complete: () => console.log('Abarbeitung der Flugverbotsliste ist abgeschlossen'),
};
Observable.create(obsrv => {
obsrv.next('Claudia');
obsrv.next('Fritz');
setTimeout(() => {
obsrv.next('Peter');
obsrv.complete();
}, 1000);
})
.do(res => console.log(new Date().getSeconds(), res))
.map((resp, index) => {
return {
id: index,
name: resp
};
})
.toArray()
.subscribe(observer);
// ----
// Teil 2
this.timeObservable = Observable.interval(1000)
.startWith(0)
.map(resp => new Date())
.do(resp => console.log('Observable mit interval in Millisekunde: ' + resp.getMilliseconds()))
.share();
this.timeSubscription = this.timeObservable.subscribe(resp => {
console.log('Observer erhält ein Datum in Millisekunde: ' + resp.getMilliseconds() + '\n--');
});
this.hotColdTimeout = setTimeout(() => {
// Cold Observable
const coldObservable = Observable.create(obsrv => {
const producer = new Producer('Cold-Observable');
producer.addListener(value => obsrv.next(value));
return () => clearInterval(producer.interval);
});
this.coldSubscription = coldObservable.subscribe(resp => console.log('Zähler Cold-Observable: ' + resp));
// Hot Observable
const hotObservable = Observable.create(obsrv => {
this.producer.addListener(value => obsrv.next(value));
});
this.hotSubscription = hotObservable.subscribe(resp => console.log('Zähler Hot-Observable: ' + resp));
}, 4000);
// ----
// Teil 3
const passengersHttp = (searchTerm) => {
return this.http.get('/wrong/passengers.json?name=' + searchTerm)
.catch(err => this.http.get('/assets/passengers.json?name=' + searchTerm))
// Simulation Backend
.delay(2000)
.map(resp => {
return resp.json().filter(passenger => {
const search = passenger.name.toLowerCase() + passenger.lastName.toLowerCase();
return search.indexOf(searchTerm.toLowerCase()) !== -1;
});
});
// ---
};
passengersHttp('dan').subscribe(resp => console.log(resp));
this.inputSubject = new BehaviorSubject({target: {value: this.name}});
this.listObservable = this.inputSubject
.asObservable()
.debounceTime(300)
.map(event => event.target.value)
.distinctUntilChanged()
.do(() => this.loading = true)
.switchMap(searchTerm => passengersHttp(searchTerm))
.do(() => this.loading = false);
// ----
}
ngOnDestroy() {
// Teil 2
this.timeSubscription.unsubscribe();
clearTimeout(this.hotColdTimeout);
if (this.hotSubscription) {
this.hotSubscription.unsubscribe();
}
if (this.coldSubscription) {
this.coldSubscription.unsubscribe();
}
clearInterval(this.producer.interval);
}
}
| PassengerSearchComponent |
pyAero_geometry.py | #!/usr/local/bin/python
'''
pyAero_geometry
Holds the Python Aerodynamic Analysis Classes (base and inherited).
Copyright (c) 2008 by Dr. Ruben E. Perez
All rights reserved. Not to be used for commercial purposes.
Revision: 1.1 $Date: 21/05/2008 21:00$
Developers:
-----------
- Dr. Ruben E. Perez (RP)
History
-------
v. 1.0 - Initial Class Creation (RP, 2008)
'''
__version__ = '$Revision: $'
'''
To Do:
-
'''
# =============================================================================
# Standard Python modules
# =============================================================================
import os, sys
import pdb
# =============================================================================
# External Python modules
# =============================================================================
import numpy
# =============================================================================
# Extension modules
# =============================================================================
# =============================================================================
# Misc Definitions
# =============================================================================
# =============================================================================
# Geometry Class
# =============================================================================
class | (object):
'''
Abstract Class for Geometry Object
'''
def __init__(self, name={},CGPercent = 0.25,ForeSparPercent = 0.25,
RearSparPercent = 0.75,StaticMarginPercent=0.05,
ForeThickCon = 0.01, RearThickCon = 0.99,
rootOffset = 0.01, tipOffset=0.01,
xRootec=0.0, yRootec=0.0, zRootec=0.0,
*args, **kwargs):
'''
Flow Class Initialization
Keyword Arguments:
------------------
name -> STRING: Geometry Instance Name
Attributes:
-----------
Documentation last updated: May. 21, 2008 - Ruben E. Perez
'''
#
self.name = name
self.CGPercent = CGPercent
self.ForeSparPercent = ForeSparPercent
self.RearSparPercent = RearSparPercent
self.StaticMarginPercent = StaticMarginPercent
self.ForeThickCon = ForeThickCon
self.RearThickCon = RearThickCon
self.tipOffset = tipOffset
self.rootOffset = rootOffset
self.xRootec = xRootec
self.yRootec = yRootec
self.zRootec = zRootec
def ListAttributes(self):
'''
Print Structured Attributes List
Documentation last updated: May. 21, 2008 - Ruben E. Perez
'''
ListAttributes(self)
def __str__(self):
'''
Print Structured List of Variable
Documentation last updated: May. 21, 2008 - Ruben E. Perez
'''
return ('name \n'+' '+str(self.name).center(9) )
#==============================================================================
#
#==============================================================================
def ListAttributes(self):
'''
Print Structured Attributes List
Documentation last updated: March. 24, 2008 - Ruben E. Perez
'''
print('\n')
print('Attributes List of: ' + repr(self.__dict__['name']) + ' - ' + self.__class__.__name__ + ' Instance\n')
self_keys = self.__dict__.keys()
self_keys.sort()
for key in self_keys:
if key != 'name':
print(str(key) + ' : ' + repr(self.__dict__[key]))
#end
#end
print('\n')
#==============================================================================
# Flow Test
#==============================================================================
if __name__ == '__main__':
print('Testing ...')
# Test Variable
geo = Geometry(name = 'test')
geo.ListAttributes()
print(geo)
| Geometry |
__init__.py | """
"""
import sys
import os
import pkgutil
from ast import NodeTransformer
from abc import ABCMeta, abstractproperty
from inspect import isabstract
from importlib import import_module
from ..utils import classproperty
__all__ =\
[ "BasicOptimization"
, "ASTOptimization"
, "ByteCodeOptimization"
, "all_optimizations"
, "ast_optimizations"
, "bytecode_optimizations"
, "install"
, "uninstall"
]
class BasicOptimization(object, metaclass = ABCMeta):
"""
"""
@classproperty
def id(cls) -> str:
"""
:return:
"""
return cls.__name__
@abstractproperty
def name(self) -> str:
"""
:return: name of optimization
"""
return NotImplemented
@abstractproperty
def description(self) -> str:
|
@abstractproperty
def level(self) -> int:
"""
:return: severity level for optimization
"""
return NotImplemented
@classproperty
def optimizations(cls):
"""
:return:
"""
if not hasattr(cls, '_optimizations'):
cls._optimizations = {}
package = sys.modules[BasicOptimization.__module__]
path = os.path.dirname(package.__file__)
for loader, module_name, is_pkg in pkgutil.iter_modules([path]):
if module_name.startswith('__'):
continue
module = import_module('.' + module_name, package.__name__)
for _type in vars(module).values():
if not isinstance(_type, type):
continue
if isabstract(_type):
continue
if not issubclass(_type, cls):
continue
try:
obj = _type()
cls._optimizations[obj.id] = obj
except:
pass
return cls._optimizations
@classmethod
def _check_optimization(cls, optimization):
if not optimization:
raise ValueError('Not specified value')
if isinstance(optimization, type):
try:
optimization = optimization()
except TypeError as exc:
raise TypeError('Unexpected type for optimization: {}'.format(str(optimization))) from exc
if not isinstance(optimization, cls):
raise TypeError('Unexpected type for optimization: {}'.format(str(type(optimization))))
return optimization
@classmethod
def install(cls, optimization):
"""
"""
optimization = cls._check_optimization(optimization)
cls.optimizations[optimization.id] = optimization
@classmethod
def uninstall(cls, optimization):
"""
"""
if not isinstance(optimization, str):
optimization = cls._check_optimization(optimization)
optimization = optimization.id
del cls.optimizations[optimization]
class ASTOptimization(BasicOptimization, NodeTransformer):
"""
"""
class ByteCodeOptimization(BasicOptimization):
"""
"""
all_optimizations = BasicOptimization.optimizations
ast_optimizations = ASTOptimization.optimizations
bytecode_optimizations = ByteCodeOptimization.optimizations
install = BasicOptimization.install
uninstall = BasicOptimization.uninstall
| """
:return: description of optimization
"""
return NotImplemented |
channel.rs | #![cfg(feature = "unstable")]
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
use std::time::Duration;
use async_std::sync::channel;
use async_std::task;
use rand::{thread_rng, Rng};
fn ms(ms: u64) -> Duration {
Duration::from_millis(ms)
}
#[test]
fn smoke() {
task::block_on(async {
let (s, r) = channel(1);
s.send(7).await;
assert_eq!(r.recv().await, Some(7));
s.send(8).await;
assert_eq!(r.recv().await, Some(8));
drop(s);
assert_eq!(r.recv().await, None);
})
}
#[test]
fn capacity() {
for i in 1..10 {
let (s, r) = channel::<()>(i);
assert_eq!(s.capacity(), i);
assert_eq!(r.capacity(), i);
}
}
#[test]
fn len_empty_full() {
#![allow(clippy::cognitive_complexity)]
task::block_on(async {
let (s, r) = channel(2);
assert_eq!(s.len(), 0);
assert_eq!(s.is_empty(), true);
assert_eq!(s.is_full(), false);
assert_eq!(r.len(), 0);
assert_eq!(r.is_empty(), true);
assert_eq!(r.is_full(), false);
s.send(()).await;
assert_eq!(s.len(), 1);
assert_eq!(s.is_empty(), false);
assert_eq!(s.is_full(), false);
assert_eq!(r.len(), 1);
assert_eq!(r.is_empty(), false);
assert_eq!(r.is_full(), false);
s.send(()).await;
assert_eq!(s.len(), 2);
assert_eq!(s.is_empty(), false);
assert_eq!(s.is_full(), true);
assert_eq!(r.len(), 2);
assert_eq!(r.is_empty(), false);
assert_eq!(r.is_full(), true);
r.recv().await;
assert_eq!(s.len(), 1);
assert_eq!(s.is_empty(), false);
assert_eq!(s.is_full(), false);
assert_eq!(r.len(), 1);
assert_eq!(r.is_empty(), false);
assert_eq!(r.is_full(), false);
})
}
#[test]
fn recv() {
task::block_on(async {
let (s, r) = channel(100);
task::spawn(async move {
assert_eq!(r.recv().await, Some(7));
task::sleep(ms(1000)).await;
assert_eq!(r.recv().await, Some(8));
task::sleep(ms(1000)).await;
assert_eq!(r.recv().await, Some(9));
assert_eq!(r.recv().await, None);
});
task::sleep(ms(1500)).await;
s.send(7).await;
s.send(8).await;
s.send(9).await;
})
}
#[test]
fn send() {
task::block_on(async {
let (s, r) = channel(1);
task::spawn(async move {
s.send(7).await;
task::sleep(ms(1000)).await;
s.send(8).await;
task::sleep(ms(1000)).await;
s.send(9).await;
task::sleep(ms(1000)).await;
s.send(10).await;
});
task::sleep(ms(1500)).await;
assert_eq!(r.recv().await, Some(7));
assert_eq!(r.recv().await, Some(8));
assert_eq!(r.recv().await, Some(9));
})
}
#[test]
fn recv_after_disconnect() {
task::block_on(async {
let (s, r) = channel(100);
s.send(1).await;
s.send(2).await;
s.send(3).await;
drop(s);
assert_eq!(r.recv().await, Some(1));
assert_eq!(r.recv().await, Some(2));
assert_eq!(r.recv().await, Some(3));
assert_eq!(r.recv().await, None);
})
}
#[test]
fn len() {
const COUNT: usize = 25_000;
const CAP: usize = 1000;
task::block_on(async {
let (s, r) = channel(CAP);
assert_eq!(s.len(), 0);
assert_eq!(r.len(), 0);
for _ in 0..CAP / 10 {
for i in 0..50 {
s.send(i).await;
assert_eq!(s.len(), i + 1);
}
for i in 0..50 {
r.recv().await;
assert_eq!(r.len(), 50 - i - 1);
}
}
assert_eq!(s.len(), 0);
assert_eq!(r.len(), 0);
for i in 0..CAP {
s.send(i).await;
assert_eq!(s.len(), i + 1);
}
for _ in 0..CAP {
r.recv().await.unwrap();
}
assert_eq!(s.len(), 0);
assert_eq!(r.len(), 0);
let child = task::spawn({
let r = r.clone();
async move {
for i in 0..COUNT {
assert_eq!(r.recv().await, Some(i));
let len = r.len();
assert!(len <= CAP);
}
}
});
for i in 0..COUNT {
s.send(i).await;
let len = s.len();
assert!(len <= CAP);
}
child.await;
assert_eq!(s.len(), 0);
assert_eq!(r.len(), 0);
})
}
#[test]
fn disconnect_wakes_receiver() {
task::block_on(async {
let (s, r) = channel::<()>(1);
let child = task::spawn(async move {
assert_eq!(r.recv().await, None);
});
task::sleep(ms(1000)).await;
drop(s);
child.await;
})
}
#[test]
fn spsc() {
const COUNT: usize = 100_000;
task::block_on(async {
let (s, r) = channel(3);
let child = task::spawn(async move {
for i in 0..COUNT {
assert_eq!(r.recv().await, Some(i));
}
assert_eq!(r.recv().await, None);
});
for i in 0..COUNT {
s.send(i).await;
}
drop(s);
child.await;
})
}
#[test]
fn mpmc() {
const COUNT: usize = 25_000; |
task::block_on(async {
let (s, r) = channel::<usize>(3);
let v = (0..COUNT).map(|_| AtomicUsize::new(0)).collect::<Vec<_>>();
let v = Arc::new(v);
let mut tasks = Vec::new();
for _ in 0..TASKS {
let r = r.clone();
let v = v.clone();
tasks.push(task::spawn(async move {
for _ in 0..COUNT {
let n = r.recv().await.unwrap();
v[n].fetch_add(1, Ordering::SeqCst);
}
}));
}
for _ in 0..TASKS {
let s = s.clone();
tasks.push(task::spawn(async move {
for i in 0..COUNT {
s.send(i).await;
}
}));
}
for t in tasks {
t.await;
}
for c in v.iter() {
assert_eq!(c.load(Ordering::SeqCst), TASKS);
}
});
}
#[test]
fn oneshot() {
const COUNT: usize = 10_000;
task::block_on(async {
for _ in 0..COUNT {
let (s, r) = channel(1);
let c1 = task::spawn(async move { r.recv().await.unwrap() });
let c2 = task::spawn(async move { s.send(0).await });
c1.await;
c2.await;
}
})
}
#[test]
fn drops() {
const RUNS: usize = 100;
static DROPS: AtomicUsize = AtomicUsize::new(0);
#[derive(Debug, PartialEq)]
struct DropCounter;
impl Drop for DropCounter {
fn drop(&mut self) {
DROPS.fetch_add(1, Ordering::SeqCst);
}
}
let mut rng = thread_rng();
for _ in 0..RUNS {
task::block_on(async {
let steps = rng.gen_range(0, 10_000);
let additional = rng.gen_range(0, 50);
DROPS.store(0, Ordering::SeqCst);
let (s, r) = channel::<DropCounter>(50);
let child = task::spawn({
let r = r.clone();
async move {
for _ in 0..steps {
r.recv().await.unwrap();
}
}
});
for _ in 0..steps {
s.send(DropCounter).await;
}
child.await;
for _ in 0..additional {
s.send(DropCounter).await;
}
assert_eq!(DROPS.load(Ordering::SeqCst), steps);
drop(s);
drop(r);
assert_eq!(DROPS.load(Ordering::SeqCst), steps + additional);
})
}
} | const TASKS: usize = 4; |
_violin.py | import _plotly_utils.basevalidators
class ViolinValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(self, plotly_name="violin", parent_name="", **kwargs):
super(ViolinValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Violin"),
data_docs=kwargs.pop(
"data_docs",
"""
alignmentgroup
Set several traces linked to the same position
axis or matching axes to the same
alignmentgroup. This controls whether bars
compute their positional range dependently or
independently.
bandwidth
Sets the bandwidth used to compute the kernel
density estimate. By default, the bandwidth is
determined by Silverman's rule of thumb.
box
:class:`plotly.graph_objects.violin.Box`
instance or dict with compatible properties
customdata
Assigns extra data each datum. This may be
useful when listening to hover, click and
selection events. Note that, "scatter" traces
also appends customdata items in the markers
DOM elements
customdatasrc
Sets the source reference on Chart Studio Cloud
for customdata .
fillcolor
Sets the fill color. Defaults to a half-
transparent variant of the line color, marker
color, or marker line color, whichever is
available.
hoverinfo
Determines which trace information appear on
hover. If `none` or `skip` are set, no
information is displayed upon hovering. But, if
`none` is set, click and hover events are still
fired.
hoverinfosrc
Sets the source reference on Chart Studio Cloud
for hoverinfo .
hoverlabel
:class:`plotly.graph_objects.violin.Hoverlabel`
instance or dict with compatible properties
hoveron
Do the hover effects highlight individual
violins or sample points or the kernel density
estimate or any combination of them?
hovertemplate
Template string used for rendering the
information that appear on hover box. Note that
this will override `hoverinfo`. Variables are
inserted using %{variable}, for example "y:
%{y}". Numbers are formatted using d3-format's
syntax %{variable:d3-format}, for example
"Price: %{y:$.2f}".
https://github.com/d3/d3-3.x-api-
reference/blob/master/Formatting.md#d3_format
for details on the formatting syntax. Dates are
formatted using d3-time-format's syntax
%{variable|d3-time-format}, for example "Day:
%{2019-01-01|%A}".
https://github.com/d3/d3-time-
format#locale_format for details on the date
formatting syntax. The variables available in
`hovertemplate` are the ones emitted as event
data described at this link
https://plotly.com/javascript/plotlyjs-
events/#event-data. Additionally, every
attributes that can be specified per-point (the
ones that are `arrayOk: true`) are available.
Anything contained in tag `<extra>` is
displayed in the secondary box, for example
"<extra>{fullData.name}</extra>". To hide the
secondary box completely, use an empty tag
`<extra></extra>`.
hovertemplatesrc
Sets the source reference on Chart Studio Cloud
for hovertemplate .
hovertext
Same as `text`.
hovertextsrc
Sets the source reference on Chart Studio Cloud
for hovertext .
ids
Assigns id labels to each datum. These ids for
object constancy of data points during
animation. Should be an array of strings, not
numbers or any other type.
idssrc
Sets the source reference on Chart Studio Cloud
for ids .
jitter
Sets the amount of jitter in the sample points
drawn. If 0, the sample points align along the
distribution axis. If 1, the sample points are
drawn in a random jitter of width equal to the
width of the violins.
legendgroup
Sets the legend group for this trace. Traces
part of the same legend group hide/show at the
same time when toggling legend items.
line
:class:`plotly.graph_objects.violin.Line`
instance or dict with compatible properties
marker
:class:`plotly.graph_objects.violin.Marker`
instance or dict with compatible properties
meanline
:class:`plotly.graph_objects.violin.Meanline`
instance or dict with compatible properties
meta
Assigns extra meta information associated with
this trace that can be used in various text
attributes. Attributes such as trace `name`,
graph, axis and colorbar `title.text`,
annotation `text` `rangeselector`,
`updatemenues` and `sliders` `label` text all
support `meta`. To access the trace `meta`
values in an attribute in the same trace,
simply use `%{meta[i]}` where `i` is the index
or key of the `meta` item in question. To
access trace `meta` in layout attributes, use
`%{data[n[.meta[i]}` where `i` is the index or
key of the `meta` and `n` is the trace index.
metasrc
Sets the source reference on Chart Studio Cloud
for meta .
name
Sets the trace name. The trace name appear as
the legend item and on hover. For violin
traces, the name will also be used for the
position coordinate, if `x` and `x0` (`y` and
`y0` if horizontal) are missing and the
position axis is categorical. Note that the
trace name is also used as a default value for
attribute `scalegroup` (please see its
description for details).
offsetgroup
Set several traces linked to the same position
axis or matching axes to the same offsetgroup
where bars of the same position coordinate will
line up.
opacity
Sets the opacity of the trace.
orientation
Sets the orientation of the violin(s). If "v"
("h"), the distribution is visualized along the
vertical (horizontal).
pointpos
Sets the position of the sample points in
relation to the violins. If 0, the sample
points are places over the center of the
violins. Positive (negative) values correspond
to positions to the right (left) for vertical
violins and above (below) for horizontal
violins.
points
If "outliers", only the sample points lying
outside the whiskers are shown If
"suspectedoutliers", the outlier points are
shown and points either less than 4*Q1-3*Q3 or
greater than 4*Q3-3*Q1 are highlighted (see
`outliercolor`) If "all", all sample points are
shown If False, only the violins are shown with
no sample points. Defaults to
"suspectedoutliers" when `marker.outliercolor`
or `marker.line.outliercolor` is set, otherwise
defaults to "outliers".
scalegroup
If there are multiple violins that should be
sized according to to some metric (see
`scalemode`), link them by providing a non-
empty group id here shared by every trace in
the same group. If a violin's `width` is
undefined, `scalegroup` will default to the
trace's name. In this case, violins with the
same names will be linked together
scalemode
Sets the metric by which the width of each
violin is determined."width" means each violin
has the same (max) width*count* means the | instance or dict with compatible properties
selectedpoints
Array containing integer indices of selected
points. Has an effect only for traces that
support selections. Note that an empty array
means an empty selection where the `unselected`
are turned on for all points, whereas, any
other non-array values means no selection all
where the `selected` and `unselected` styles
have no effect.
showlegend
Determines whether or not an item corresponding
to this trace is shown in the legend.
side
Determines on which side of the position value
the density function making up one half of a
violin is plotted. Useful when comparing two
violin traces under "overlay" mode, where one
trace has `side` set to "positive" and the
other to "negative".
span
Sets the span in data space for which the
density function will be computed. Has an
effect only when `spanmode` is set to "manual".
spanmode
Sets the method by which the span in data space
where the density function will be computed.
"soft" means the span goes from the sample's
minimum value minus two bandwidths to the
sample's maximum value plus two bandwidths.
"hard" means the span goes from the sample's
minimum to its maximum value. For custom span
settings, use mode "manual" and fill in the
`span` attribute.
stream
:class:`plotly.graph_objects.violin.Stream`
instance or dict with compatible properties
text
Sets the text elements associated with each
sample value. If a single string, the same
string appears over all the data points. If an
array of string, the items are mapped in order
to the this trace's (x,y) coordinates. To be
seen, trace `hoverinfo` must contain a "text"
flag.
textsrc
Sets the source reference on Chart Studio Cloud
for text .
uid
Assign an id to this trace, Use this to provide
object constancy between traces during
animations and transitions.
uirevision
Controls persistence of some user-driven
changes to the trace: `constraintrange` in
`parcoords` traces, as well as some `editable:
true` modifications such as `name` and
`colorbar.title`. Defaults to
`layout.uirevision`. Note that other user-
driven trace attribute changes are controlled
by `layout` attributes: `trace.visible` is
controlled by `layout.legend.uirevision`,
`selectedpoints` is controlled by
`layout.selectionrevision`, and
`colorbar.(x|y)` (accessible with `config:
{editable: true}`) is controlled by
`layout.editrevision`. Trace changes are
tracked by `uid`, which only falls back on
trace index if no `uid` is provided. So if your
app can add/remove traces before the end of the
`data` array, such that the same trace has a
different index, you can still preserve user-
driven changes if you give each trace a `uid`
that stays with it as it moves.
unselected
:class:`plotly.graph_objects.violin.Unselected`
instance or dict with compatible properties
visible
Determines whether or not this trace is
visible. If "legendonly", the trace is not
drawn, but can appear as a legend item
(provided that the legend itself is visible).
width
Sets the width of the violin in data
coordinates. If 0 (default value) the width is
automatically selected based on the positions
of other violin traces in the same subplot.
x
Sets the x sample data or coordinates. See
overview for more info.
x0
Sets the x coordinate for single-box traces or
the starting coordinate for multi-box traces
set using q1/median/q3. See overview for more
info.
xaxis
Sets a reference between this trace's x
coordinates and a 2D cartesian x axis. If "x"
(the default value), the x coordinates refer to
`layout.xaxis`. If "x2", the x coordinates
refer to `layout.xaxis2`, and so on.
xsrc
Sets the source reference on Chart Studio Cloud
for x .
y
Sets the y sample data or coordinates. See
overview for more info.
y0
Sets the y coordinate for single-box traces or
the starting coordinate for multi-box traces
set using q1/median/q3. See overview for more
info.
yaxis
Sets a reference between this trace's y
coordinates and a 2D cartesian y axis. If "y"
(the default value), the y coordinates refer to
`layout.yaxis`. If "y2", the y coordinates
refer to `layout.yaxis2`, and so on.
ysrc
Sets the source reference on Chart Studio Cloud
for y .
""",
),
**kwargs
) | violins are scaled by the number of sample
points makingup each violin.
selected
:class:`plotly.graph_objects.violin.Selected` |
linker_script.rs | use std::{fs::OpenOptions, io::Write};
use crate::{port::LinkerScriptConstants, Configuration};
use anyhow::{anyhow, Result};
/// Generates the linker script `memory.x`, which describes the amount and location
/// of flash and RAM memory available to a particular Loadstone instance.
pub fn generate_linker_script(configuration: &Configuration) -> Result<()> {
let mut file = OpenOptions::new().write(true).create(true).truncate(true).open("memory.x")?;
#[allow(unused_mut)]
let mut constants = configuration
.port
.linker_script_constants()
.ok_or(anyhow!("Current board doesn't have linker script constants defined."))?;
if std::env::var("CARGO_FEATURE_RELOCATE_TO_BOOTABLE_BANK").is_ok() {
relocate_to_bootable_bank(&mut constants, configuration)?;
}
write!(
file,
"MEMORY\n\
{{\n\
FLASH : ORIGIN = 0x{:08X}, LENGTH = {}K\n\
RAM : ORIGIN = 0x{:08X}, LENGTH = {}K\n\
}}\n",
constants.flash.origin,
constants.flash.size / 1024,
constants.ram.origin,
constants.ram.size / 1024,
)?;
Ok(())
}
#[allow(unused)]
fn relocate_to_bootable_bank(
constants: &mut LinkerScriptConstants,
configuration: &Configuration,
) -> Result<()> {
let bootable_address = configuration.memory_configuration.bootable_address().ok_or(anyhow!(
"Impossible to relocate: bootable bank is undefined in configuration file." | constants.flash.origin = bootable_address;
Ok(())
} | ))?;
let offset = bootable_address - constants.flash.origin;
constants.flash.size = constants.flash.size.saturating_sub(offset as usize); |
main.go | package main
import (
"log"
| "github.com/spf13/cobra"
)
// @title go-app API
// @version 0.1
// @description go-app API doc
// @contact.name Xing
// @contact.email [email protected]
// @host localhost:8080
// @BasePath
func main() {
root := &cobra.Command{
Use: "go-app.exe",
Short: "Example application",
Long: "An example application to show how to use the service-framework",
Version: "0.2.0",
}
root.AddCommand(serve.NewCommand())
if err := root.Execute(); err != nil {
log.Fatal(err)
}
} | "go-app-template/cmd/serve"
_ "go-app-template/docs"
|
await_block.ts | import { is_promise } from './utils';
import { check_outros, group_outros, transition_in, transition_out } from './transitions';
import { flush } from './scheduler';
import { get_current_component, set_current_component } from './lifecycle';
export function handle_promise(promise, info) {
const token = info.token = {};
function | (type, index, key?, value?) {
if (info.token !== token) return;
info.resolved = value;
let child_ctx = info.ctx;
if (key !== undefined) {
child_ctx = child_ctx.slice();
child_ctx[key] = value;
}
const block = type && (info.current = type)(child_ctx);
let needs_flush = false;
if (info.block) {
if (info.blocks) {
info.blocks.forEach((block, i) => {
if (i !== index && block) {
group_outros();
transition_out(block, 1, 1, () => {
info.blocks[i] = null;
});
check_outros();
}
});
} else {
info.block.d(1);
}
block.c();
transition_in(block, 1);
block.m(info.mount(), info.anchor);
needs_flush = true;
}
info.block = block;
if (info.blocks) info.blocks[index] = block;
if (needs_flush) {
flush();
}
}
if (is_promise(promise)) {
const current_component = get_current_component();
promise.then(value => {
set_current_component(current_component);
update(info.then, 1, info.value, value);
set_current_component(null);
}, error => {
set_current_component(current_component);
update(info.catch, 2, info.error, error);
set_current_component(null);
});
// if we previously had a then/catch block, destroy it
if (info.current !== info.pending) {
update(info.pending, 0);
return true;
}
} else {
if (info.current !== info.then) {
update(info.then, 1, info.value, promise);
return true;
}
info.resolved = promise;
}
}
| update |
tiler.py | # Purpose: takes a list of filenames AND/OR publically accessible urls.
# Returns a tiled image file of tiles SIZExSIZE, separated by spaces of width
# DIFF, in rows if length ROWSIZE.
# files that can't be retrieved are returned blank.
import os
import numpy as np
from PIL import Image
import urllib.request
import validators
# global vars
EMPTY = "empty"
'''
Crops given 'Image' object to the largest square possible.
Centers the image.
'''
def crop_square(im):
# crops to square, based on smallest length.
width, height = im.size
side_length = min(width, height)
width_pad = (width - side_length) // 2
height_pad = (height - side_length) // 2
left = width_pad
top = height_pad
right = width - width_pad
bottom = height - height_pad
return im.crop((left, top, right, bottom))
'''
Purpose: transparent-pads images to precisely correct size. Robustness for images that are too small, or one pixel off the correct size.
Input: a = a numpy array representing thumbnail.
Assumption: thumbnail x, y dim are NO GREATER than size.
side = desired side length.
Returns thumbnail of precisely (SIZE x SIZE x 4).
Padding will CENTRE the thumbnail.
'''
def | (a, side):
ax, ay, az = a.shape
if (ax < side): # not tall enough. add row of (padx x y).
padx = side - ax
x1 = padx // 2
x2 = x1 + padx % 2
row1 = np.full((x1, ay, 4), [255, 255, 255, 0], np.uint8)
row2 = np.full((x2, ay, 4), [255, 255, 255, 0], np.uint8)
a = np.concatenate((row1, a, row2))
elif (ax > side): # too tall, crop.
cutoff = side - ax
a = a[:cutoff]
if (ay < side): # not wide enough. add col of (pady x side)
pady = side - ay
y1 = pady // 2
y2 = y1 + pady % 2
col1 = np.full((side, y1, 4), [255, 255, 255, 0], np.uint8)
col2 = np.full((side, y2, 4), [255, 255, 255, 0], np.uint8)
a = np.hstack((col1, a, col2))
elif (ay > side): # too wide, crop.
cutoff = side - ay
a = a[:, :cutoff]
return a
'''
Opens image file from local directory.
Returns as an np array of thumbnail SIZE, in 4dim RGBA format.
'''
def gen_thumbnail(filename, tileSize, default):
if (filename == EMPTY):
return default
# save from web into folder if url.
if (validators.url(filename)):
try:
urllib.request.urlretrieve(filename, ".temp_web_images/temp_image")
filename = ".temp_web_images/temp_image"
except:
print("error: url could not be retrieved.")
return default # if image can't be retrieved.
with Image.open(filename) as im:
im = im.convert("RGBA") # add transparency
x, y = im.size # scale down to thumbnail.
tsize = int(tileSize * (max(x, y) / min(x, y)))
im.thumbnail((tsize, tsize), Image.ANTIALIAS)
im = crop_square(im) # THIS LINE: toggle to change whether square or original aspect ratio.
a = np.asarray(im) # create np array from values.
a = pad_thumbnail(a, tileSize) # for robustness.
# delete temp saved image
if (filename == ".temp_web_images/temp_image"):
os.remove(".temp_web_images/temp_image")
return a
'''
Main functionality. Converts list of filenames into a tiled grid of thumbnails.
Returns as Image object.
'''
def tile_images(files, tileSize, rowLength, space):
# initialise transparent padding
row_space = np.full((space, tileSize, 4), [255, 255, 255, 0], np.uint8)
col_space = np.full((tileSize, space, 4), [255, 255, 255, 0], np.uint8)
square = np.full((tileSize, tileSize, 4), [255, 255, 255, 0], np.uint8)
row_div = np.full((space, tileSize*rowLength + space*(rowLength-1), 4), [255, 255, 255, 0], np.uint8)
# initialise folder to save web images into
if not os.path.exists('.temp_web_images'):
os.makedirs('.temp_web_images')
# reshape 1D file list into 2D structured grid of row length rowLength
to_add = rowLength - (len(files) % rowLength)
if to_add != rowLength:
files.extend([EMPTY]*to_add)
arr = np.array(files)
newFiles = arr.reshape(len(files) // rowLength, rowLength)
# create each row array and add to list.
rowList = []
for row in newFiles:
thisRow = []
for file in row:
thisRow.extend([gen_thumbnail(file, tileSize, square), col_space])
rowArr = np.hstack([np.array(i) for i in thisRow[:-1]])
rowList.extend([rowArr, row_div])
# concat row arrays into a single grid array
arr = np.concatenate([np.array(i) for i in rowList[:-1]]) # elegant numpy approach: from https://stackoverflow.com/questions/10346336/list-of-lists-into-numpy-array
im = Image.fromarray(arr)
return im
if __name__ == "__main__":
print("hello world!! im tilebot")
files = [
"./pic/bamboo.jpg",
"./pic/coconut.png",
"./pic/fish.png",
"./pic/shiro.jpg",
"./pic/calico-cat.png",
"./pic/ghost.png",
"./pic/field.jpg",
"./pic/blue.gif",
"./pic/boy.jpg"
]
urls = [
"./pic/bamboo.jpg",
"./pic/coconut.png",
"./pic/fish.png",
"./pic/shiro.jpg",
"https://cdn.i-scmp.com/sites/default/files/styles/1200x800/public/d8/images/methode/2020/10/30/8caac9de-1a82-11eb-8f67-a484f6db61a1_image_hires_175647.jpg?itok=T-dFsg-A&v=1604051814",
"https://www.nme.com/wp-content/uploads/2021/03/Genshin-Impact-miHoYo.jpg",
"https://www.indiewire.com/wp-content/uploads/2020/12/genshin1.jpg",
"./pic/calico-cat.png",
"./pic/ghost.png",
"./pic/field.jpg",
"./pic/blue.gif",
"./pic/boy.jpg",
"https://blog.playstation.com/tachyon/2020/11/Featured-Image-Genshin-Impact-update-out-tomorrow.jpg?fit=1024,720",
"https://cdn.vox-cdn.com/thumbor/pot2y4VQxXpzedEZ8eDMrFR2wLg=/0x308:7680x4320/1200x800/filters:focal(3413x728:4641x1956)/cdn.vox-cdn.com/uploads/chorus_image/image/67716030/ba84dbaad79d15323968a64863c1e069.0.jpg",
"https://gamerbraves.sgp1.cdn.digitaloceanspaces.com/2020/01/arknights-feature-c.jpg",
"https://webusstatic.yo-star.com/uy0news/ae/19c9d44c8cf7d7bc770ee588b52dc2e0.png"
]
# doesn't work - these urls aren't publically accessible.
disc_urls = [
"https://cdn.discordapp.com/attachments/841255574330408981/841266535376486460/EzFyC5ZVcAA1-_m.jpg",
"https://cdn.discordapp.com/attachments/841255574330408981/841266037214806046/Elu0GiWVkAEzrHm.png",
"https://cdn.discordapp.com/attachments/841255574330408981/841265455237824512/tumblr_nayd2yGcBC1rscimho1_500.png"
]
tilesize = 136
rowlength = 6
spacing = 4
im = tile_images(files, tilesize, rowlength, spacing)
im.save("./pic/merge-GRID.png", "PNG")
im = tile_images(urls, tilesize, rowlength, spacing)
im.save("./pic/url_merged_2.png", "PNG")
| pad_thumbnail |
pgp_stuff.py | from commons import *
import os
def | ():
init_directory('./temp')
# gpg must exist on your system
status = os.system('gpg --version')
if status==0:
print_up('gpg is found')
else:
print_err('can\'t find gpg')
def verify_publickey_message(pk, msg):
# obtain a temp filename
fn = get_random_hex_string(10)
# save the public key file and the message file
pkfn = f'./temp/{fn}.pk'
pkbinfn = pkfn+'.gpg'
msgfn = f'./temp/{fn}.msg'
writefile(pkfn, pk, mode='w', encoding='utf-8')
writefile(msgfn, msg, mode='w', encoding='utf-8')
def cleanup():
removefile(pkfn)
removefile(msgfn)
removefile(pkbinfn)
# remove armor
status = os.system(f'gpg --dearmor {pkfn}')
if status != 0:
qprint('status:', status)
cleanup()
raise Exception('failed to dearmor the public key (there might be something wrong with your public key)')
# verify
status = os.system(f'gpg --no-default-keyring --keyring {pkbinfn} --verify {msgfn}')
if status != 0:
qprint('status:', status)
cleanup()
raise Exception('failed to verify the message (your public key is okay but the signature you supplied does not match the public key, or is of a wrong format)')
cleanup()
return True
| pgp_check |
lib.rs | use std::io::Cursor;
use wasm_bindgen::prelude::*;
use yew::prelude::*;
use yew::services::reader::ReaderTask;
mod choose;
mod load;
#[wasm_bindgen(start)]
pub fn run_app() {
#[cfg(debug_assertions)]
console_error_panic_hook::set_once();
App::<Main>::new().mount_to_body();
}
struct Main {
link: ComponentLink<Self>,
state: State,
}
impl Component for Main {
type Message = Message;
type Properties = Properties;
fn create(_: Self::Properties, link: ComponentLink<Self>) -> Self {
Self {
link,
state: State::Choose,
}
}
fn update(&mut self, msg: Message) -> ShouldRender {
match msg {
Message::Choose(file) => {
let file_size = file.size() as u128;
let handle = load::start_read_file(file, &self.link).expect("Failed to read file");
self.state = State::Loading {
file_size,
_handle: handle,
};
true
}
Message::PharLoad(phar) => {
self.state = State::Browse(phar);
true
}
Message::Err(err) => {
self.state = State::Error(err);
true
}
}
}
fn change(&mut self, _: Self::Properties) -> ShouldRender |
fn view(&self) -> Html {
match &self.state {
State::Choose => html! {
<choose::Comp
on_choose=self.link.callback(|file| Message::Choose(file))/>
},
State::Loading { file_size, .. } => html! {
<p>{ format!("Reading {} of data...", byte_unit::Byte::from_bytes(*file_size).get_appropriate_unit(true)) }</p>
},
State::Browse(_) => html! {},
State::Error(err) => html! {
<div>
<h1>{ "Error" }</h1>
<p>{ err }</p>
</div>
},
}
}
}
enum State {
Choose,
Loading {
file_size: u128,
_handle: ReaderTask,
},
Browse(Box<Phar>),
Error(anyhow::Error),
}
enum Message {
Choose(web_sys::File),
PharLoad(Box<Phar>),
Err(anyhow::Error),
}
type Properties = ();
type Phar = phar::Reader<Cursor<Vec<u8>>>;
| {
false
} |
inline.go | //
// Blackfriday Markdown Processor
// Available at http://github.com/russross/blackfriday
//
// Copyright © 2011 Russ Ross <[email protected]>.
// Distributed under the Simplified BSD License.
// See README.md for details.
//
//
// Functions to parse inline elements.
//
package blackfriday
import (
"bytes"
"regexp"
"strconv"
)
var (
urlRe = `((https?|ftp):\/\/|\/)[-A-Za-z0-9+&@#\/%?=~_|!:,.;\(\)]+`
anchorRe = regexp.MustCompile(`^(<a\shref="` + urlRe + `"(\stitle="[^"<>]+")?\s?>` + urlRe + `<\/a>)`)
)
// Functions to parse text within a block
// Each function returns the number of chars taken care of
// data is the complete block being rendered
// offset is the number of valid chars before the current cursor
func (p *parser) inline(out *bytes.Buffer, data []byte) {
// this is called recursively: enforce a maximum depth
if p.nesting >= p.maxNesting {
return
}
p.nesting++
i, end := 0, 0
for i < len(data) {
// copy inactive chars into the output
for end < len(data) && p.inlineCallback[data[end]] == nil {
end++
}
p.r.NormalText(out, data[i:end])
if end >= len(data) {
break
}
i = end
// call the trigger
handler := p.inlineCallback[data[end]]
if consumed := handler(p, out, data, i); consumed == 0 {
// no action from the callback; buffer the byte for later
end = i + 1
} else {
// skip past whatever the callback used
i += consumed
end = i
}
}
p.nesting--
}
// single and double emphasis parsing
func emphasis(p *parser, out *bytes.Buffer, data []byte, offset int) int {
data = data[offset:]
c := data[0]
ret := 0
if len(data) > 2 && data[1] != c {
// whitespace cannot follow an opening emphasis;
// strikethrough only takes two characters '~~'
if c == '~' || isspace(data[1]) {
return 0
}
if ret = helperEmphasis(p, out, data[1:], c); ret == 0 {
return 0
}
return ret + 1
}
if len(data) > 3 && data[1] == c && data[2] != c {
if isspace(data[2]) {
return 0
}
if ret = helperDoubleEmphasis(p, out, data[2:], c); ret == 0 {
return 0
}
return ret + 2
}
if len(data) > 4 && data[1] == c && data[2] == c && data[3] != c {
if c == '~' || isspace(data[3]) {
return 0
}
if ret = helperTripleEmphasis(p, out, data, 3, c); ret == 0 {
return 0
}
return ret + 3
}
return 0
}
func codeSpan(p *parser, out *bytes.Buffer, data []byte, offset int) int {
data = data[offset:]
nb := 0
// count the number of backticks in the delimiter
for nb < len(data) && data[nb] == '`' {
nb++
}
// find the next delimiter
i, end := 0, 0
for end = nb; end < len(data) && i < nb; end++ {
if data[end] == '`' {
i++
} else {
i = 0
}
}
// no matching delimiter?
if i < nb && end >= len(data) {
return 0
}
// trim outside whitespace
fBegin := nb
for fBegin < end && data[fBegin] == ' ' {
fBegin++
}
fEnd := end - nb
for fEnd > fBegin && data[fEnd-1] == ' ' {
fEnd--
}
// render the code span
if fBegin != fEnd {
p.r.CodeSpan(out, data[fBegin:fEnd])
}
return end
}
// newline preceded by two spaces becomes <br>
// newline without two spaces works when EXTENSION_HARD_LINE_BREAK is enabled
func lineBreak(p *parser, out *bytes.Buffer, data []byte, offset int) int {
// remove trailing spaces from out
outBytes := out.Bytes()
end := len(outBytes)
eol := end
for eol > 0 && outBytes[eol-1] == ' ' {
eol--
}
out.Truncate(eol)
precededByTwoSpaces := offset >= 2 && data[offset-2] == ' ' && data[offset-1] == ' '
// should there be a hard line break here?
if p.flags&EXTENSION_HARD_LINE_BREAK == 0 && !precededByTwoSpaces {
return 0
}
p.r.LineBreak(out)
return 1
}
type linkType int
const (
linkNormal linkType = iota
linkImg
linkDeferredFootnote
linkInlineFootnote
)
// '[': parse a link or an image or a footnote
func link(p *parser, out *bytes.Buffer, data []byte, offset int) int {
// no links allowed inside regular links, footnote, and deferred footnotes
if p.insideLink && (offset > 0 && data[offset-1] == '[' || len(data)-1 > offset && data[offset+1] == '^') {
return 0
}
// [text] == regular link
// ![alt] == image
// ^[text] == inline footnote
// [^refId] == deferred footnote
var t linkType
if offset > 0 && data[offset-1] == '!' {
t = linkImg
} else if p.flags&EXTENSION_FOOTNOTES != 0 {
if offset > 0 && data[offset-1] == '^' {
t = linkInlineFootnote
} else if len(data)-1 > offset && data[offset+1] == '^' {
t = linkDeferredFootnote
}
}
data = data[offset:]
var (
i = 1
noteId int
title, link []byte
textHasNl = false
)
if t == linkDeferredFootnote {
i++
}
// look for the matching closing bracket
for level := 1; level > 0 && i < len(data); i++ {
switch {
case data[i] == '\n':
textHasNl = true
case data[i-1] == '\\':
continue
case data[i] == '[':
level++
case data[i] == ']':
level--
if level <= 0 {
i-- // compensate for extra i++ in for loop
}
}
}
if i >= len(data) {
return 0
}
txtE := i
i++
// skip any amount of whitespace or newline
// (this is much more lax than original markdown syntax)
for i < len(data) && isspace(data[i]) {
i++
}
// inline style link
switch {
case i < len(data) && data[i] == '(':
// skip initial whitespace
i++
for i < len(data) && isspace(data[i]) {
i++
}
linkB := i
// look for link end: ' " )
findlinkend:
for i < len(data) {
switch {
case data[i] == '\\':
i += 2
case data[i] == ')' || data[i] == '\'' || data[i] == '"':
break findlinkend
default:
i++
}
}
if i >= len(data) {
return 0
}
linkE := i
// look for title end if present
titleB, titleE := 0, 0
if data[i] == '\'' || data[i] == '"' {
i++
titleB = i
findtitleend:
for i < len(data) {
switch {
case data[i] == '\\':
i += 2
case data[i] == ')':
break findtitleend
default:
i++
}
}
if i >= len(data) {
return 0
}
// skip whitespace after title
titleE = i - 1
for titleE > titleB && isspace(data[titleE]) {
titleE--
}
// check for closing quote presence
if data[titleE] != '\'' && data[titleE] != '"' {
titleB, titleE = 0, 0
linkE = i
}
}
// remove whitespace at the end of the link
for linkE > linkB && isspace(data[linkE-1]) {
linkE--
}
// remove optional angle brackets around the link
if data[linkB] == '<' {
linkB++
}
if data[linkE-1] == '>' {
linkE--
}
// build escaped link and title
if linkE > linkB {
link = data[linkB:linkE]
}
if titleE > titleB {
title = data[titleB:titleE]
}
i++
// reference style link
case i < len(data) && data[i] == '[':
var id []byte
// look for the id
i++
linkB := i
for i < len(data) && data[i] != ']' {
i++
}
if i >= len(data) {
return 0
}
linkE := i
// find the reference
if linkB == linkE {
if textHasNl {
var b bytes.Buffer
for j := 1; j < txtE; j++ {
switch {
case data[j] != '\n':
b.WriteByte(data[j])
case data[j-1] != ' ':
b.WriteByte(' ')
}
}
id = b.Bytes()
} else {
id = data[1:txtE]
}
} else {
id = data[linkB:linkE]
}
// find the reference with matching id (ids are case-insensitive)
key := string(bytes.ToLower(id))
lr, ok := p.refs[key]
if !ok {
return 0
}
// keep link and title from reference
link = lr.link
title = lr.title
i++
// shortcut reference style link or reference or inline footnote
default:
var id []byte
// craft the id
if textHasNl {
var b bytes.Buffer
for j := 1; j < txtE; j++ {
switch {
case data[j] != '\n':
b.WriteByte(data[j])
case data[j-1] != ' ':
b.WriteByte(' ')
}
}
id = b.Bytes()
} else {
if t == linkDeferredFootnote {
id = data[2:txtE] // get rid of the ^
} else {
id = data[1:txtE]
}
}
key := string(bytes.ToLower(id))
if t == linkInlineFootnote {
// create a new reference
noteId = len(p.notes) + 1
var fragment []byte
if len(id) > 0 {
if len(id) < 16 {
fragment = make([]byte, len(id))
} else {
fragment = make([]byte, 16)
}
copy(fragment, slugify(id))
} else {
fragment = append([]byte("footnote-"), []byte(strconv.Itoa(noteId))...)
}
ref := &reference{
noteId: noteId,
hasBlock: false,
link: fragment,
title: id,
}
p.notes = append(p.notes, ref)
link = ref.link
title = ref.title
} else {
// find the reference with matching id
lr, ok := p.refs[key]
if !ok {
return 0
}
if t == linkDeferredFootnote {
lr.noteId = len(p.notes) + 1
p.notes = append(p.notes, lr)
}
// keep link and title from reference
link = lr.link
// if inline footnote, title == footnote contents
title = lr.title
noteId = lr.noteId
}
// rewind the whitespace
i = txtE + 1
}
// build content: img alt is escaped, link content is parsed
var content bytes.Buffer
if txtE > 1 {
if t == linkImg {
content.Write(data[1:txtE])
} else {
// links cannot contain other links, so turn off link parsing temporarily
insideLink := p.insideLink
p.insideLink = true
p.inline(&content, data[1:txtE])
p.insideLink = insideLink
}
}
var uLink []byte
if t == linkNormal || t == linkImg {
if len(link) > 0 {
var uLinkBuf bytes.Buffer
unescapeText(&uLinkBuf, link)
uLink = uLinkBuf.Bytes()
}
// links need something to click on and somewhere to go
if len(uLink) == 0 || (t == linkNormal && content.Len() == 0) {
return 0
}
}
// call the relevant rendering function
switch t {
case linkNormal:
p.r.Link(out, uLink, title, content.Bytes())
case linkImg:
outSize := out.Len()
outBytes := out.Bytes()
if outSize > 0 && outBytes[outSize-1] == '!' {
out.Truncate(outSize - 1)
}
p.r.Image(out, uLink, title, content.Bytes())
case linkInlineFootnote:
outSize := out.Len()
outBytes := out.Bytes()
if outSize > 0 && outBytes[outSize-1] == '^' {
out.Truncate(outSize - 1)
}
p.r.FootnoteRef(out, link, noteId)
case linkDeferredFootnote:
p.r.FootnoteRef(out, link, noteId)
default:
return 0
}
return i
}
// '<' when tags or autolinks are allowed
func leftAngle(p *parser, out *bytes.Buffer, data []byte, offset int) int {
data = data[offset:]
altype := LINK_TYPE_NOT_AUTOLINK
end := tagLength(data, &altype)
if end > 2 {
if altype != LINK_TYPE_NOT_AUTOLINK {
var uLink bytes.Buffer
unescapeText(&uLink, data[1:end+1-2])
if uLink.Len() > 0 {
p.r.AutoLink(out, uLink.Bytes(), altype)
}
} else {
p.r.RawHtmlTag(out, data[:end])
}
}
return end
}
// '\\' backslash escape
var escapeChars = []byte("\\`*_{}[]()#+-.!:|&<>~")
func escape(p *parser, out *bytes.Buffer, data []byte, offset int) int {
data = data[offset:]
if len(data) > 1 {
if bytes.IndexByte(escapeChars, data[1]) < 0 {
return 0
}
p.r.NormalText(out, data[1:2])
}
return 2
}
func unescapeText(ob *bytes.Buffer, src []byte) {
i := 0
for i < len(src) {
org := i
for i < len(src) && src[i] != '\\' {
i++
}
if i > org {
ob.Write(src[org:i])
}
if i+1 >= len(src) {
break
}
ob.WriteByte(src[i+1])
i += 2
}
}
// '&' escaped when it doesn't belong to an entity
// valid entities are assumed to be anything matching &#?[A-Za-z0-9]+;
func entity(p *parser, out *bytes.Buffer, data []byte, offset int) int {
data = data[offset:]
end := 1
if end < len(data) && data[end] == '#' {
end++
}
for end < len(data) && isalnum(data[end]) {
end++
}
if end < len(data) && data[end] == ';' {
end++ // real entity
} else {
return 0 // lone '&'
}
p.r.Entity(out, data[:end])
return end
}
func linkEndsWithEntity(data []byte, linkEnd int) bool {
entityRanges := htmlEntity.FindAllIndex(data[:linkEnd], -1)
if entityRanges != nil && entityRanges[len(entityRanges)-1][1] == linkEnd {
return true
}
return false
}
func autoLink(p *parser, out *bytes.Buffer, data []byte, offset int) int {
// quick check to rule out most false hits on ':'
if p.insideLink || len(data) < offset+3 || data[offset+1] != '/' || data[offset+2] != '/' {
return 0
}
// Now a more expensive check to see if we're not inside an anchor element
anchorStart := offset
offsetFromAnchor := 0
for anchorStart > 0 && data[anchorStart] != '<' {
anchorStart--
offsetFromAnchor++
}
anchorStr := anchorRe.Find(data[anchorStart:])
if anchorStr != nil {
out.Write(anchorStr[offsetFromAnchor:])
return len(anchorStr) - offsetFromAnchor
}
// scan backward for a word boundary
rewind := 0
for offset-rewind > 0 && rewind <= 7 && isletter(data[offset-rewind-1]) {
rewind++
}
if rewind > 6 { // longest supported protocol is "mailto" which has 6 letters
return 0
}
origData := data
data = data[offset-rewind:]
if !isSafeLink(data) {
return 0
}
linkEnd := 0
for linkEnd < len(data) && !isEndOfLink(data[linkEnd]) {
linkEnd++
}
// Skip punctuation at the end of the link
if (data[linkEnd-1] == '.' || data[linkEnd-1] == ',') && data[linkEnd-2] != '\\' {
linkEnd--
}
// But don't skip semicolon if it's a part of escaped entity:
if data[linkEnd-1] == ';' && data[linkEnd-2] != '\\' && !linkEndsWithEntity(data, linkEnd) {
linkEnd--
}
// See if the link finishes with a punctuation sign that can be closed.
var copen byte
switch data[linkEnd-1] {
case '"':
copen = '"'
case '\'':
copen = '\''
case ')':
copen = '('
case ']':
copen = '['
case '}':
copen = '{'
default:
copen = 0
}
if copen != 0 {
bufEnd := offset - rewind + linkEnd - 2
openDelim := 1
/* Try to close the final punctuation sign in this same line;
* if we managed to close it outside of the URL, that means that it's
* not part of the URL. If it closes inside the URL, that means it
* is part of the URL.
*
* Examples:
*
* foo http://www.pokemon.com/Pikachu_(Electric) bar
* => http://www.pokemon.com/Pikachu_(Electric)
*
* foo (http://www.pokemon.com/Pikachu_(Electric)) bar
* => http://www.pokemon.com/Pikachu_(Electric)
*
* foo http://www.pokemon.com/Pikachu_(Electric)) bar
* => http://www.pokemon.com/Pikachu_(Electric))
*
* (foo http://www.pokemon.com/Pikachu_(Electric)) bar
* => foo http://www.pokemon.com/Pikachu_(Electric)
*/
for bufEnd >= 0 && origData[bufEnd] != '\n' && openDelim != 0 {
if origData[bufEnd] == data[linkEnd-1] {
openDelim++
}
if origData[bufEnd] == copen {
openDelim--
}
bufEnd--
}
if openDelim == 0 {
linkEnd--
}
}
// we were triggered on the ':', so we need to rewind the output a bit
if out.Len() >= rewind {
out.Truncate(len(out.Bytes()) - rewind)
}
var uLink bytes.Buffer
unescapeText(&uLink, data[:linkEnd])
if uLink.Len() > 0 {
p.r.AutoLink(out, uLink.Bytes(), LINK_TYPE_NORMAL)
}
return linkEnd - rewind
}
func isEndOfLink(char byte) bool {
return isspace(char) || char == '<'
}
var validUris = [][]byte{[]byte("http://"), []byte("https://"), []byte("ftp://"), []byte("mailto://"), []byte("/")}
func isSafeLink(link []byte) bool {
for _, prefix := range validUris {
// TODO: handle unicode here
// case-insensitive prefix test
if len(link) > len(prefix) && bytes.Equal(bytes.ToLower(link[:len(prefix)]), prefix) && isalnum(link[len(prefix)]) {
return true
}
}
return false
}
// return the length of the given tag, or 0 is it's not valid
func tagLength(data []byte, autolink *int) int {
var i, j int
// a valid tag can't be shorter than 3 chars
if len(data) < 3 {
return 0
}
// begins with a '<' optionally followed by '/', followed by letter or number
if data[0] != '<' {
return 0
}
if data[1] == '/' {
i = 2
} else {
i = 1
}
if !isalnum(data[i]) {
return 0
}
// scheme test
*autolink = LINK_TYPE_NOT_AUTOLINK
// try to find the beginning of an URI
for i < len(data) && (isalnum(data[i]) || data[i] == '.' || data[i] == '+' || data[i] == '-') {
i++
}
if i > 1 && i < len(data) && data[i] == '@' {
if j = isMailtoAutoLink(data[i:]); j != 0 {
*autolink = LINK_TYPE_EMAIL
return i + j
}
}
if i > 2 && i < len(data) && data[i] == ':' {
*autolink = LINK_TYPE_NORMAL
i++
}
// complete autolink test: no whitespace or ' or "
switch {
case i >= len(data):
*autolink = LINK_TYPE_NOT_AUTOLINK
case *autolink != 0:
j = i
for i < len(data) {
if data[i] == '\\' {
i += 2
} else if data[i] == '>' || data[i] == '\'' || data[i] == '"' || isspace(data[i]) {
break
} else {
i++
}
}
if i >= len(data) {
return 0
}
if i > j && data[i] == '>' {
return i + 1
}
// one of the forbidden chars has been found
*autolink = LINK_TYPE_NOT_AUTOLINK
}
// look for something looking like a tag end
for i < len(data) && data[i] != '>' {
i++
}
if i >= len(data) {
return 0
}
return i + 1
}
// look for the address part of a mail autolink and '>'
// this is less strict than the original markdown e-mail address matching
func isMailtoAutoLink(data []byte) int {
nb := 0
// address is assumed to be: [-@._a-zA-Z0-9]+ with exactly one '@'
for i := 0; i < len(data); i++ {
if isalnum(data[i]) {
continue
}
switch data[i] {
case '@':
nb++
case '-', '.', '_':
break
case '>':
if nb == 1 {
return i + 1
} else {
return 0
}
default:
return 0
}
}
return 0
}
// look for the next emph char, skipping other constructs
func helperFindEmphChar(data []byte, c byte) int {
i := 1
for i < len(data) {
for i < len(data) && data[i] != c && data[i] != '`' && data[i] != '[' {
i++
}
if i >= len(data) {
return 0
}
if data[i] == c {
return i
}
// do not count escaped chars
if i != 0 && data[i-1] == '\\' {
i++
continue
}
if data[i] == '`' {
// skip a code span
tmpI := 0
i++
for i < len(data) && data[i] != '`' {
if tmpI == 0 && data[i] == c {
tmpI = i
}
i++
}
if i >= len(data) {
return tmpI
}
i++
} else if data[i] == '[' {
// skip a link
tmpI := 0
i++
for i < len(data) && data[i] != ']' {
if tmpI == 0 && data[i] == c {
tmpI = i
}
i++
}
i++
for i < len(data) && (data[i] == ' ' || data[i] == '\n') {
i++
}
if i >= len(data) {
return tmpI
}
if data[i] != '[' && data[i] != '(' { // not a link
if tmpI > 0 {
return tmpI
} else {
continue
}
}
cc := data[i]
i++
for i < len(data) && data[i] != cc {
if tmpI == 0 && data[i] == c {
return i
}
i++
}
if i >= len(data) {
return tmpI
}
i++
}
}
return 0
}
func helperEmphasis(p *parser, out *bytes.Buffer, data []byte, c byte) int {
i := 0
// skip one symbol if coming from emph3
if len(data) > 1 && data[0] == c && data[1] == c {
i = 1
}
for i < len(data) {
length := helperFindEmphChar(data[i:], c)
if length == 0 {
return 0
}
i += length
if i >= len(data) {
return 0
}
if i+1 < len(data) && data[i+1] == c {
i++
continue
}
if data[i] == c && !isspace(data[i-1]) {
if p.flags&EXTENSION_NO_INTRA_EMPHASIS != 0 {
if !(i+1 == len(data) || isspace(data[i+1]) || ispunct(data[i+1])) {
continue
}
}
var work bytes.Buffer
p.inline(&work, data[:i])
p.r.Emphasis(out, work.Bytes())
return i + 1
}
}
return 0
}
func helperDoubleEmphasis(p *parser, out *bytes.Buffer, data []byte, c byte) int {
i := 0
for i < len(data) {
length := helperFindEmphChar(data[i:], c)
if length == 0 {
return 0
}
i += length
if i+1 < len(data) && data[i] == c && data[i+1] == c && i > 0 && !isspace(data[i-1]) {
var work bytes.Buffer
p.inline(&work, data[:i])
if work.Len() > 0 {
// pick the right renderer
if c == '~' {
p.r.StrikeThrough(out, work.Bytes())
} else {
p.r.DoubleEmphasis(out, work.Bytes())
}
}
return i + 2
}
i++
}
return 0
}
func helperTripleEmphasis(p *parser, out *bytes.Buffer, data []byte, offset int, c byte) int { |
i := 0
origData := data
data = data[offset:]
for i < len(data) {
length := helperFindEmphChar(data[i:], c)
if length == 0 {
return 0
}
i += length
// skip whitespace preceded symbols
if data[i] != c || isspace(data[i-1]) {
continue
}
switch {
case i+2 < len(data) && data[i+1] == c && data[i+2] == c:
// triple symbol found
var work bytes.Buffer
p.inline(&work, data[:i])
if work.Len() > 0 {
p.r.TripleEmphasis(out, work.Bytes())
}
return i + 3
case (i+1 < len(data) && data[i+1] == c):
// double symbol found, hand over to emph1
length = helperEmphasis(p, out, origData[offset-2:], c)
if length == 0 {
return 0
} else {
return length - 2
}
default:
// single symbol found, hand over to emph2
length = helperDoubleEmphasis(p, out, origData[offset-1:], c)
if length == 0 {
return 0
} else {
return length - 1
}
}
}
return 0
}
|
|
create-web-component-html.js | "use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const path_1 = require("path");
const vamtiger_create_directory_1 = require("vamtiger-create-directory");
const vamtiger_create_file_1 = require("vamtiger-create-file");
const export_empty_string_1 = require("./snippet/export-empty-string");
const { cwd } = process;
const folder = path_1.resolve(cwd(), 'source', 'html');
const htmlPath = path_1.resolve(folder, 'index.html');
const modulePath = path_1.resolve(folder, 'index.ts');
async function default_1() {
await vamtiger_create_directory_1.default(folder);
await Promise.all([
vamtiger_create_file_1.default(htmlPath, ''),
vamtiger_create_file_1.default(modulePath, export_empty_string_1.default)
]); | }
exports.default = default_1;
//# sourceMappingURL=create-web-component-html.js.map | |
instrumentation.py | # orm/instrumentation.py
# Copyright (C) 2005-2020 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Defines SQLAlchemy's system of class instrumentation.
This module is usually not directly visible to user applications, but
defines a large part of the ORM's interactivity.
instrumentation.py deals with registration of end-user classes
for state tracking. It interacts closely with state.py
and attributes.py which establish per-instance and per-class-attribute
instrumentation, respectively.
The class instrumentation system can be customized on a per-class
or global basis using the :mod:`sqlalchemy.ext.instrumentation`
module, which provides the means to build and specify
alternate instrumentation forms.
.. versionchanged: 0.8
The instrumentation extension system was moved out of the
ORM and into the external :mod:`sqlalchemy.ext.instrumentation`
package. When that package is imported, it installs
itself within sqlalchemy.orm so that its more comprehensive
resolution mechanics take effect.
"""
from . import base
from . import collections
from . import exc
from . import interfaces
from . import state
from .. import util
_memoized_key_collection = util.group_expirable_memoized_property()
class ClassManager(dict):
"""tracks state information at the class level."""
MANAGER_ATTR = base.DEFAULT_MANAGER_ATTR
STATE_ATTR = base.DEFAULT_STATE_ATTR
_state_setter = staticmethod(util.attrsetter(STATE_ATTR))
deferred_scalar_loader = None
original_init = object.__init__
factory = None
def __init__(self, class_):
self.class_ = class_
self.info = {}
self.new_init = None
self.local_attrs = {}
self.originals = {}
self._bases = [
mgr
for mgr in [
manager_of_class(base)
for base in self.class_.__bases__
if isinstance(base, type)
]
if mgr is not None
]
for base_ in self._bases:
self.update(base_)
self.dispatch._events._new_classmanager_instance(class_, self)
# events._InstanceEventsHold.populate(class_, self)
for basecls in class_.__mro__:
mgr = manager_of_class(basecls)
if mgr is not None:
self.dispatch._update(mgr.dispatch)
self.manage()
self._instrument_init()
if "__del__" in class_.__dict__:
util.warn(
"__del__() method on class %s will "
"cause unreachable cycles and memory leaks, "
"as SQLAlchemy instrumentation often creates "
"reference cycles. Please remove this method." % class_
)
def __hash__(self):
return id(self)
def __eq__(self, other):
return other is self
@property
def is_mapped(self):
return "mapper" in self.__dict__
@_memoized_key_collection
def _all_key_set(self):
return frozenset(self)
@_memoized_key_collection
def _collection_impl_keys(self):
return frozenset(
[attr.key for attr in self.values() if attr.impl.collection]
)
@_memoized_key_collection
def _scalar_loader_impls(self):
return frozenset(
[
attr.impl
for attr in self.values()
if attr.impl.accepts_scalar_loader
]
)
@util.memoized_property
def mapper(self):
# raises unless self.mapper has been assigned
raise exc.UnmappedClassError(self.class_)
def _all_sqla_attributes(self, exclude=None):
"""return an iterator of all classbound attributes that are
implement :class:`.InspectionAttr`.
This includes :class:`.QueryableAttribute` as well as extension
types such as :class:`.hybrid_property` and
:class:`.AssociationProxy`.
"""
if exclude is None:
exclude = set()
for supercls in self.class_.__mro__:
for key in set(supercls.__dict__).difference(exclude):
exclude.add(key)
val = supercls.__dict__[key]
if (
isinstance(val, interfaces.InspectionAttr)
and val.is_attribute
):
yield key, val
def _get_class_attr_mro(self, key, default=None):
"""return an attribute on the class without tripping it."""
for supercls in self.class_.__mro__:
if key in supercls.__dict__:
return supercls.__dict__[key]
else:
return default
def _attr_has_impl(self, key):
"""Return True if the given attribute is fully initialized.
i.e. has an impl.
"""
return key in self and self[key].impl is not None
def _subclass_manager(self, cls):
"""Create a new ClassManager for a subclass of this ClassManager's
class.
This is called automatically when attributes are instrumented so that
the attributes can be propagated to subclasses against their own
class-local manager, without the need for mappers etc. to have already
pre-configured managers for the full class hierarchy. Mappers
can post-configure the auto-generated ClassManager when needed.
"""
manager = manager_of_class(cls)
if manager is None:
manager = _instrumentation_factory.create_manager_for_cls(cls)
return manager
def _instrument_init(self):
# TODO: self.class_.__init__ is often the already-instrumented
# __init__ from an instrumented superclass. We still need to make
# our own wrapper, but it would
# be nice to wrap the original __init__ and not our existing wrapper
# of such, since this adds method overhead.
self.original_init = self.class_.__init__
self.new_init = _generate_init(self.class_, self)
self.install_member("__init__", self.new_init)
def _uninstrument_init(self):
if self.new_init:
self.uninstall_member("__init__")
self.new_init = None
@util.memoized_property
def _state_constructor(self):
self.dispatch.first_init(self, self.class_)
return state.InstanceState
def manage(self):
"""Mark this instance as the manager for its class."""
setattr(self.class_, self.MANAGER_ATTR, self)
def dispose(self):
"""Dissasociate this manager from its class."""
delattr(self.class_, self.MANAGER_ATTR)
@util.hybridmethod
def manager_getter(self):
return _default_manager_getter
@util.hybridmethod
def state_getter(self):
"""Return a (instance) -> InstanceState callable.
"state getter" callables should raise either KeyError or
AttributeError if no InstanceState could be found for the
instance.
"""
return _default_state_getter
@util.hybridmethod
def dict_getter(self):
return _default_dict_getter
def instrument_attribute(self, key, inst, propagated=False):
if propagated:
if key in self.local_attrs:
return # don't override local attr with inherited attr
else:
self.local_attrs[key] = inst
self.install_descriptor(key, inst)
_memoized_key_collection.expire_instance(self)
self[key] = inst
for cls in self.class_.__subclasses__():
manager = self._subclass_manager(cls)
manager.instrument_attribute(key, inst, True)
def subclass_managers(self, recursive):
for cls in self.class_.__subclasses__():
mgr = manager_of_class(cls)
if mgr is not None and mgr is not self:
yield mgr
if recursive:
for m in mgr.subclass_managers(True):
yield m
def post_configure_attribute(self, key):
_instrumentation_factory.dispatch.attribute_instrument(
self.class_, key, self[key]
)
def uninstrument_attribute(self, key, propagated=False):
if key not in self:
return
if propagated:
if key in self.local_attrs:
return # don't get rid of local attr
else:
del self.local_attrs[key]
self.uninstall_descriptor(key)
_memoized_key_collection.expire_instance(self)
del self[key]
for cls in self.class_.__subclasses__():
manager = manager_of_class(cls)
if manager:
manager.uninstrument_attribute(key, True)
def unregister(self):
"""remove all instrumentation established by this ClassManager."""
self._uninstrument_init()
self.mapper = self.dispatch = None
self.info.clear()
for key in list(self):
if key in self.local_attrs:
self.uninstrument_attribute(key)
def install_descriptor(self, key, inst):
if key in (self.STATE_ATTR, self.MANAGER_ATTR):
raise KeyError(
"%r: requested attribute name conflicts with "
"instrumentation attribute of the same name." % key
)
setattr(self.class_, key, inst)
def uninstall_descriptor(self, key):
delattr(self.class_, key)
def install_member(self, key, implementation):
if key in (self.STATE_ATTR, self.MANAGER_ATTR):
raise KeyError(
"%r: requested attribute name conflicts with "
"instrumentation attribute of the same name." % key
)
self.originals.setdefault(key, getattr(self.class_, key, None))
setattr(self.class_, key, implementation)
def uninstall_member(self, key):
original = self.originals.pop(key, None)
if original is not None:
setattr(self.class_, key, original)
def instrument_collection_class(self, key, collection_class):
return collections.prepare_instrumentation(collection_class)
def initialize_collection(self, key, state, factory):
user_data = factory()
adapter = collections.CollectionAdapter(
self.get_impl(key), state, user_data
)
return adapter, user_data
def is_instrumented(self, key, search=False):
if search:
return key in self
else:
return key in self.local_attrs
def get_impl(self, key):
return self[key].impl
@property
def attributes(self):
return iter(self.values())
# InstanceState management
def new_instance(self, state=None):
instance = self.class_.__new__(self.class_)
if state is None:
state = self._state_constructor(instance, self)
self._state_setter(instance, state)
return instance
def setup_instance(self, instance, state=None):
if state is None:
state = self._state_constructor(instance, self)
self._state_setter(instance, state)
def teardown_instance(self, instance):
delattr(instance, self.STATE_ATTR)
def _serialize(self, state, state_dict):
return _SerializeManager(state, state_dict)
def _new_state_if_none(self, instance):
"""Install a default InstanceState if none is present.
A private convenience method used by the __init__ decorator.
"""
if hasattr(instance, self.STATE_ATTR):
return False
elif self.class_ is not instance.__class__ and self.is_mapped:
# this will create a new ClassManager for the
# subclass, without a mapper. This is likely a
# user error situation but allow the object
# to be constructed, so that it is usable
# in a non-ORM context at least.
return self._subclass_manager(
instance.__class__
)._new_state_if_none(instance)
else:
state = self._state_constructor(instance, self)
self._state_setter(instance, state)
return state
def has_state(self, instance):
return hasattr(instance, self.STATE_ATTR)
def has_parent(self, state, key, optimistic=False):
"""TODO"""
return self.get_impl(key).hasparent(state, optimistic=optimistic)
def __bool__(self):
"""All ClassManagers are non-zero regardless of attribute state."""
return True
__nonzero__ = __bool__
def __repr__(self):
return "<%s of %r at %x>" % (
self.__class__.__name__,
self.class_,
id(self),
)
class _SerializeManager(object):
"""Provide serialization of a :class:`.ClassManager`.
The :class:`.InstanceState` uses ``__init__()`` on serialize
and ``__call__()`` on deserialize.
"""
def __init__(self, state, d):
self.class_ = state.class_
manager = state.manager
manager.dispatch.pickle(state, d)
def __call__(self, state, inst, state_dict):
state.manager = manager = manager_of_class(self.class_)
if manager is None:
raise exc.UnmappedInstanceError(
inst,
"Cannot deserialize object of type %r - "
"no mapper() has "
"been configured for this class within the current "
"Python process!" % self.class_,
)
elif manager.is_mapped and not manager.mapper.configured:
manager.mapper._configure_all()
# setup _sa_instance_state ahead of time so that
# unpickle events can access the object normally.
# see [ticket:2362]
if inst is not None:
|
manager.dispatch.unpickle(state, state_dict)
class InstrumentationFactory(object):
"""Factory for new ClassManager instances."""
def create_manager_for_cls(self, class_):
assert class_ is not None
assert manager_of_class(class_) is None
# give a more complicated subclass
# a chance to do what it wants here
manager, factory = self._locate_extended_factory(class_)
if factory is None:
factory = ClassManager
manager = factory(class_)
self._check_conflicts(class_, factory)
manager.factory = factory
self.dispatch.class_instrument(class_)
return manager
def _locate_extended_factory(self, class_):
"""Overridden by a subclass to do an extended lookup."""
return None, None
def _check_conflicts(self, class_, factory):
"""Overridden by a subclass to test for conflicting factories."""
return
def unregister(self, class_):
manager = manager_of_class(class_)
manager.unregister()
manager.dispose()
self.dispatch.class_uninstrument(class_)
if ClassManager.MANAGER_ATTR in class_.__dict__:
delattr(class_, ClassManager.MANAGER_ATTR)
# this attribute is replaced by sqlalchemy.ext.instrumentation
# when importred.
_instrumentation_factory = InstrumentationFactory()
# these attributes are replaced by sqlalchemy.ext.instrumentation
# when a non-standard InstrumentationManager class is first
# used to instrument a class.
instance_state = _default_state_getter = base.instance_state
instance_dict = _default_dict_getter = base.instance_dict
manager_of_class = _default_manager_getter = base.manager_of_class
def register_class(class_):
"""Register class instrumentation.
Returns the existing or newly created class manager.
"""
manager = manager_of_class(class_)
if manager is None:
manager = _instrumentation_factory.create_manager_for_cls(class_)
return manager
def unregister_class(class_):
"""Unregister class instrumentation."""
_instrumentation_factory.unregister(class_)
def is_instrumented(instance, key):
"""Return True if the given attribute on the given instance is
instrumented by the attributes package.
This function may be used regardless of instrumentation
applied directly to the class, i.e. no descriptors are required.
"""
return manager_of_class(instance.__class__).is_instrumented(
key, search=True
)
def _generate_init(class_, class_manager):
"""Build an __init__ decorator that triggers ClassManager events."""
# TODO: we should use the ClassManager's notion of the
# original '__init__' method, once ClassManager is fixed
# to always reference that.
original__init__ = class_.__init__
assert original__init__
# Go through some effort here and don't change the user's __init__
# calling signature, including the unlikely case that it has
# a return value.
# FIXME: need to juggle local names to avoid constructor argument
# clashes.
func_body = """\
def __init__(%(apply_pos)s):
new_state = class_manager._new_state_if_none(%(self_arg)s)
if new_state:
return new_state._initialize_instance(%(apply_kw)s)
else:
return original__init__(%(apply_kw)s)
"""
func_vars = util.format_argspec_init(original__init__, grouped=False)
func_text = func_body % func_vars
if util.py2k:
func = getattr(original__init__, "im_func", original__init__)
func_defaults = getattr(func, "func_defaults", None)
else:
func_defaults = getattr(original__init__, "__defaults__", None)
func_kw_defaults = getattr(original__init__, "__kwdefaults__", None)
env = locals().copy()
exec(func_text, env)
__init__ = env["__init__"]
__init__.__doc__ = original__init__.__doc__
__init__._sa_original_init = original__init__
if func_defaults:
__init__.__defaults__ = func_defaults
if not util.py2k and func_kw_defaults:
__init__.__kwdefaults__ = func_kw_defaults
return __init__
| manager.setup_instance(inst, state) |
ecdsa.go |
//<developer>
// <name>linapex 曹一峰</name>
// <email>[email protected]</email>
// <wx>superexc</wx>
// <qqgroup>128148617</qqgroup>
// <url>https://jsq.ink</url>
// <role>pku engineer</role>
// <date>2019-03-16 19:39:52</date>
//</624455938677411840>
/*
版权所有IBM公司。保留所有权利。
SPDX许可证标识符:Apache-2.0
**/
package utils
import (
"crypto/ecdsa"
"crypto/elliptic"
"encoding/asn1"
"errors"
"fmt"
"math/big"
)
type ECDSASignature struct {
R, S *big.Int
}
var (
//curvehalforders包含预计算的曲线组阶数减半。
//它用于确保签名的值小于或等于
//曲线组顺序减半。我们只接受低S签名。
//由于效率原因,它们是预先计算的。
curveHalfOrders = map[elliptic.Curve]*big.Int{
elliptic.P224(): new(big.Int).Rsh(elliptic.P224().Params().N, 1),
elliptic.P256(): new(big.Int).Rsh(elliptic.P256().Params().N, 1),
elliptic.P384(): new(big.Int).Rsh(elliptic.P384().Params().N, 1),
elliptic.P521(): new(big.Int).Rsh(elliptic.P521().Params().N, 1),
}
)
func GetCurveHalfOrdersAt(c elliptic.Curve) *big.Int {
return big.NewInt(0).Set(curveHalfOrders[c])
}
func MarshalECDSASignature(r, s *big.Int) ([]byte, error) {
return asn1.Marshal(ECDSASignature{r, s})
}
func UnmarshalECDSASignature(raw []byte) (*big.Int, *big.Int, error) {
//散集
sig := new(ECDSASignature)
_, err := asn1.Unmarshal(raw, sig)
if err != nil {
return nil, nil, fmt.Errorf("failed unmashalling signature [%s]", err)
}
//验证S | d, err := ToLowS(k, s)
if err != nil {
return nil, err
}
if modified {
return MarshalECDSASignature(r, s)
}
return signature, nil
}
//Islow检查S是否为低S
func IsLowS(k *ecdsa.PublicKey, s *big.Int) (bool, error) {
halfOrder, ok := curveHalfOrders[k.Curve]
if !ok {
return false, fmt.Errorf("curve not recognized [%s]", k.Curve)
}
return s.Cmp(halfOrder) != 1, nil
}
func ToLowS(k *ecdsa.PublicKey, s *big.Int) (*big.Int, bool, error) {
lowS, err := IsLowS(k, s)
if err != nil {
return nil, false, err
}
if !lowS {
//将s设置为n-s,然后在签名空间的下部
//小于或等于半阶
s.Sub(k.Params().N, s)
return s, true, nil
}
return s, false, nil
}
| IG
if sig.R == nil {
return nil, nil, errors.New("invalid signature, R must be different from nil")
}
if sig.S == nil {
return nil, nil, errors.New("invalid signature, S must be different from nil")
}
if sig.R.Sign() != 1 {
return nil, nil, errors.New("invalid signature, R must be larger than zero")
}
if sig.S.Sign() != 1 {
return nil, nil, errors.New("invalid signature, S must be larger than zero")
}
return sig.R, sig.S, nil
}
func SignatureToLowS(k *ecdsa.PublicKey, signature []byte) ([]byte, error) {
r, s, err := UnmarshalECDSASignature(signature)
if err != nil {
return nil, err
}
s, modifie |
iVisual.d.ts | /*
* Power BI Visualizations
*
* Copyright (c) Microsoft Corporation
* All rights reserved.
* MIT License
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the ""Software""), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
/// <reference path="../_references.ts"/>
declare module powerbi {
import DataViewObjectDescriptor = powerbi.data.DataViewObjectDescriptor;
import DataViewObjectDescriptors = powerbi.data.DataViewObjectDescriptors;
import Selector = powerbi.data.Selector;
import IPoint = powerbi.visuals.IPoint;
import ISemanticFilter = powerbi.data.ISemanticFilter;
import ISQExpr = powerbi.data.ISQExpr;
import IStringResourceProvider = jsCommon.IStringResourceProvider;
import IRect = powerbi.visuals.IRect;
/**
* Represents a visualization displayed within an application (PowerBI dashboards, ad-hoc reporting, etc.).
* This interface does not make assumptions about the underlying JS/HTML constructs the visual uses to render itself.
*/
export interface IVisual {
/**
* Initializes an instance of the IVisual.
*
* @param options Initialization options for the visual.
*/
init(options: VisualInitOptions): void;
/** Notifies the visual that it is being destroyed, and to do any cleanup necessary (such as unsubscribing event handlers). */
destroy?(): void;
/**
* Notifies the IVisual of an update (data, viewmode, size change).
*/
update?(options: VisualUpdateOptions): void;
/**
* Notifies the IVisual to resize.
*
* @param finalViewport This is the viewport that the visual will eventually be resized to.
* @param resized true on on final call when resizing is complete.
*/
onResizing?(finalViewport: IViewport, resizeMode?: ResizeMode): void;
/**
* Notifies the IVisual of new data being provided.
* This is an optional method that can be omitted if the visual is in charge of providing its own data.
*/
onDataChanged?(options: VisualDataChangedOptions): void;
/** Notifies the IVisual to change view mode if applicable. */
onViewModeChanged?(viewMode: ViewMode): void;
/** Notifies the IVisual to clear any selection. */
onClearSelection?(): void;
/** Gets a value indicating whether the IVisual can be resized to the given viewport. */
canResizeTo?(viewport: IViewport): boolean;
/** Gets the set of objects that the visual is currently displaying. */
enumerateObjectInstances?(options: EnumerateVisualObjectInstancesOptions): VisualObjectInstanceEnumeration;
/** Gets the set of object repetitions that the visual can display. */
enumerateObjectRepetition?(): VisualObjectRepetition[];
}
/** Parameters available to a CustomizeQueryMethod */
export interface CustomizeQueryOptions {
/**
* The data view mapping for this visual with some additional information. CustomizeQueryMethod implementations
* are expected to edit this in-place.
*/
dataViewMappings: data.CompiledDataViewMapping[];
/**
* Visual should prefer to request a higher volume of data.
*/
preferHigherDataVolume?: boolean;
}
/** Parameters available to a sortable visual candidate */
export interface VisualSortableOptions {
/* The data view mapping for this visual with some additional information.*/
dataViewMappings: data.CompiledDataViewMapping[];
}
/** An imperative way for a visual to influence query generation beyond just its declared capabilities. */
export interface CustomizeQueryMethod {
(options: CustomizeQueryOptions): void;
}
/** Defines the visual filtering capability for a particular filter kind. */
export interface VisualFilterMapping {
/** Specifies what data roles are used to control the filter semantics for this filter kind. */
targetRoles: string[];
}
/**
* Defines the visual filtering capabilities for various filter kinds.
* By default all visuals support attribute filters and measure filters in their innermost scope.
*/
export interface VisualFilterMappings {
measureFilter?: VisualFilterMapping;
}
/** Defines the capabilities of an IVisual. */
export interface VisualCapabilities {
/** Defines what roles the visual expects, and how those roles should be populated. This is useful for visual generation/editing. */
dataRoles?: VisualDataRole[];
/** Defines the set of objects supported by this IVisual. */
objects?: DataViewObjectDescriptors;
/** Defines how roles that the visual understands map to the DataView. This is useful for query generation. */
dataViewMappings?: DataViewMapping[];
/** Defines how filters are understood by the visual. This is used by query generation */
filterMappings?: VisualFilterMappings;
/** Indicates whether cross-highlight is supported by the visual. This is useful for query generation. */
supportsHighlight?: boolean;
/** Indicates whether the visual uses onSelected function for data selections. Default is true. */
supportsSelection?: boolean;
/** Indicates whether sorting is supported by the visual. This is useful for query generation */
sorting?: VisualSortingCapabilities;
/** Indicates whether a default title should be displayed. Visuals with self-describing layout can omit this. */
suppressDefaultTitle?: boolean;
/** Indicates whether a default padding should be applied. */
suppressDefaultPadding?: boolean;
/** Indicates whether drilling is supported by the visual. */
drilldown?: VisualDrillCapabilities;
/** Indicates whether rotating is supported by the visual. */
canRotate?: boolean;
/** Indicates whether showing the data underlying this visual would be helpful. Visuals that already show raw data can specify this. */
disableVisualDetails?: boolean;
/** Indicates whether focus mode is supported for the visual. Visuals that would not benefit from focus mode (such as non-data-bound ones) can set it to true. */
disableFocusMode?: boolean;
}
/** Defines the visual sorting capability. */
export interface VisualSortingCapabilities {
/** When specified, indicates that the IVisual wants default sorting behavior. */
default?: {};
/** When specified, indicates that the IVisual wants to control sort interactivity. */
custom?: {};
/** When specified, indicates sorting that is inherently implied by the IVisual. This is useful to automatically sort. */
implicit?: VisualImplicitSorting;
}
/** Defines the visual's drill capability. */
export interface VisualDrillCapabilities {
/** Returns the drillable role names for this visual **/
roles?: string[];
}
/** Defines implied sorting behaviour for an IVisual. */
export interface VisualImplicitSorting {
clauses: VisualImplicitSortingClause[];
}
export interface VisualImplicitSortingClause {
role: string;
direction: SortDirection;
}
/** Defines the capabilities of an IVisual. */
export interface VisualInitOptions {
/** The DOM element the visual owns. */
element: JQuery;
/** The set of services provided by the visual hosting layer. */
host: IVisualHostServices;
/** Style information. */
style: IVisualStyle;
/** The initial viewport size. */
viewport: IViewport;
/** Animation options. */
animation?: AnimationOptions;
/** Interactivity options. */
interactivity?: InteractivityOptions;
}
export interface VisualUpdateOptions {
viewport: IViewport;
dataViews: DataView[];
suppressAnimations?: boolean;
viewMode?: ViewMode;
resizeMode?: ResizeMode;
type?: VisualUpdateType;
/** Indicates what type of update has been performed on the data.
The default operation kind is Create.*/
operationKind?: VisualDataChangeOperationKind;
}
export interface VisualDataChangedOptions {
dataViews: DataView[];
/** Optionally prevent animation transitions */
suppressAnimations?: boolean;
/** Indicates what type of update has been performed on the data.
The default operation kind is Create.*/
operationKind?: VisualDataChangeOperationKind;
}
export interface CustomSortEventArgs {
sortDescriptors: SortableFieldDescriptor[];
}
export interface SortableFieldDescriptor {
queryName: string;
sortDirection?: SortDirection;
}
export interface IVisualErrorMessage {
message: string;
title: string;
detail: string;
}
export interface IVisualWarning {
code: string;
getMessages(resourceProvider: IStringResourceProvider): IVisualErrorMessage;
}
/** Animation options for visuals. */
export interface AnimationOptions {
/** Indicates whether all transition frames should be flushed immediately, effectively "disabling" any visual transitions. */
transitionImmediate: boolean;
}
/** Interactivity options for visuals. */
export interface InteractivityOptions {
/** Indicates that dragging of data points should be permitted. */
dragDataPoint?: boolean;
/** Indicates that data points should be selectable. */
selection?: boolean;
/** Indicates that the chart and the legend are interactive */
isInteractiveLegend?: boolean;
/** Indicates overflow behavior. Values are CSS oveflow strings */
overflow?: string;
}
export interface VisualDragPayload extends DragPayload {
data?: Selector;
field?: {};
}
export interface DragEventArgs {
event: DragEvent;
data: VisualDragPayload;
}
/** Defines geocoding services. */
export interface GeocodeOptions {
/** promise that should abort the request when resolved */
timeout?: IPromise<any>;
}
export interface IGeocoder {
geocode(query: string, category?: string, options?: GeocodeOptions): IPromise<IGeocodeCoordinate>;
geocodeBoundary(latitude: number, longitude: number, category: string, levelOfDetail?: number, maxGeoData?: number, options?: GeocodeOptions): IPromise<IGeocodeBoundaryCoordinate>;
geocodePoint(latitude: number, longitude: number, options?: GeocodeOptions): IPromise<IGeocodeResource>;
/** returns data immediately if it is locally available (e.g. in cache), null if not in cache */
tryGeocodeImmediate(query: string, category?: string): IGeocodeCoordinate;
tryGeocodeBoundaryImmediate(latitude: number, longitude: number, category: string, levelOfDetail?: number, maxGeoData?: number): IGeocodeBoundaryCoordinate;
}
export interface IGeocodeCoordinate {
latitude: number;
longitude: number;
}
export interface IGeocodeBoundaryCoordinate {
latitude?: number;
longitude?: number;
locations?: IGeocodeBoundaryPolygon[]; // one location can have multiple boundary polygons
}
export interface IGeocodeResource extends IGeocodeCoordinate {
addressLine: string;
locality: string;
neighborhood: string;
adminDistrict: string;
adminDistrict2: string;
formattedAddress: string;
postalCode: string;
countryRegionIso2: string;
countryRegion: string;
landmark: string;
name: string;
}
export interface IGeocodeBoundaryPolygon {
nativeBing: string;
/** array of lat/long pairs as [lat1, long1, lat2, long2,...] */
geographic?: Float64Array;
/** array of absolute pixel position pairs [x1,y1,x2,y2,...]. It can be used by the client for cache the data. */
absolute?: Float64Array;
absoluteBounds?: IRect;
/** string of absolute pixel position pairs "x1 y1 x2 y2...". It can be used by the client for cache the data. */
absoluteString?: string;
}
export interface SelectorForColumn { |
export interface SelectorsByColumn {
/** Data-bound repetition selection. */
dataMap?: SelectorForColumn;
/** Metadata-bound repetition selection. Refers to a DataViewMetadataColumn queryName. */
metadata?: string;
/** User-defined repetition selection. */
id?: string;
}
export interface SelectingEventArgs {
visualObjects: VisualObject[];
action?: VisualInteractivityAction;
}
export interface SelectEventArgs {
visualObjects: VisualObject[];
selectors?: Selector[]; // An array of selectors used in place of visualObjects for certain backwards compatibility cases
}
export interface VisualObject {
/** The name of the object (as defined in object descriptors). */
objectName: string;
/** Data-bound repitition selection */
selectorsByColumn: SelectorsByColumn;
}
export interface ContextMenuArgs {
data: SelectorsByColumn[];
/** Absolute coordinates for the top-left anchor of the context menu. */
position: IPoint;
}
export interface SelectObjectEventArgs {
object: DataViewObjectDescriptor;
}
export interface FilterAnalyzerOptions {
dataView: DataView;
/** The DataViewObjectPropertyIdentifier for default value */
defaultValuePropertyId: DataViewObjectPropertyIdentifier;
/** The filter that will be analyzed */
filter: ISemanticFilter;
/** The field SQExprs used in the filter */
fieldSQExprs: ISQExpr[];
}
export interface AnalyzedFilter {
/** The default value of the slicer selected item and it can be undefined if there is no default value */
defaultValue?: DefaultValueDefinition;
/** Indicates the filter has Not condition. */
isNotFilter: boolean;
/** The selected filter values. */
selectedIdentities: DataViewScopeIdentity[];
/** The filter after analyzed. It will be the default filter if it has defaultValue and the pre-analyzed filter is undefined. */
filter: ISemanticFilter;
}
export interface VisualTooltipShowEventArgs extends VisualTooltipMoveEventArgs {
dataItems: VisualTooltipDataItem[];
}
export interface VisualTooltipMoveEventArgs {
coordinates: number[];
isTouchEvent: boolean;
dataItems?: VisualTooltipDataItem[];
identities: SelectorsByColumn[];
}
export interface VisualTooltipHideEventArgs {
isTouchEvent: boolean;
immediately: boolean;
}
export interface VisualTooltipDataItem {
displayName: string;
value: string;
color?: string;
header?: string;
opacity?: string;
}
export interface IVisualHostTooltipService {
/** Show a tooltip. */
show(args: VisualTooltipShowEventArgs): void;
/** Move a visible tooltip. */
move(args: VisualTooltipMoveEventArgs): void;
/** Hide a tooltip. */
hide(args: VisualTooltipHideEventArgs): void;
/** Gets the container that tooltip elements will be appended to. */
container(): Element;
/** Indicates if tooltips are enabled or not. */
enabled(): boolean;
}
/** Defines behavior for IVisual interaction with the host environment. */
export interface IVisualHostServices {
/** Returns the localized form of a string. */
getLocalizedString(stringId: string): string;
/** Notifies of a DragStart event. */
onDragStart(args: DragEventArgs): void;
///** Indicates whether the drag payload is compatible with the IVisual's data role. This is useful when dropping to a particular drop area within the visual (e.g., dropping on a legend). */
//canDropAs(payload: DragPayload, dataRole?: string): boolean;
///** Notifies of a Drop event. */
//onDrop(args: DragEventArgs, dataRole?: string);
/** Gets a value indicating whether the given selection is valid. */
canSelect(args: SelectEventArgs): boolean;
/** Notifies of the execution of a select event. */
onSelecting(args: SelectingEventArgs): void;
/** Notifies of the selection state changing. */
onSelect(args: SelectEventArgs): void;
/** Notifies of a request for a context menu. */
onContextMenu(args: ContextMenuArgs): void;
/** Check if selection is sticky or otherwise. */
shouldRetainSelection(): boolean;
/** Notifies that properties of the IVisual have changed. */
persistProperties(changes: VisualObjectInstance[]): void;
persistProperties(changes: VisualObjectInstancesToPersist): void;
///** This information will be part of the query. */
//onDataRangeChanged(range: {
// categorical: { // TODO: this structure is affected by the reduction algorithm as well as the data view type
// categories?: {
// /** Index of the category. */
// index: number;
// lower?: DataViewScopeIdentity;
// upper?: DataViewScopeIdentity;
// }[]
// }
// });
///** Notifies of a drill down on the specified data point. */
//onDrillDown(data: DataViewScopeIdentity): void;
/** Requests more data to be loaded. */
loadMoreData(): void;
/** Notification to sort on the specified column */
onCustomSort(args: CustomSortEventArgs): void;
/** Indicates which view mode the host is in. */
getViewMode(): ViewMode;
/** Notify any warning that happened during update of the visual. */
setWarnings(clientWarnings: IVisualWarning[]): void;
/** Sets a toolbar on the host. */
setToolbar($selector: JQuery): void;
/** Gets Geocoding Service. */
geocoder(): IGeocoder;
/** Gets IGeolocation Service */
geolocation(): IGeolocation;
/** Gets the locale string */
locale?(): string;
/** Gets the promise factory. */
promiseFactory(): IPromiseFactory;
/** Gets filter analyzer */
analyzeFilter(options: FilterAnalyzerOptions): AnalyzedFilter;
/** Gets display name for the identities */
getIdentityDisplayNames(identities: DataViewScopeIdentity[]): DisplayNameIdentityPair[];
/** Set the display names for their corresponding DataViewScopeIdentity */
setIdentityDisplayNames(displayNamesIdentityPairs: DisplayNameIdentityPair[]): void;
visualCapabilitiesChanged?(): void;
/**
* Gets the tooltip service.
* NOTE: This is a preview API.
*/
tooltips(): IVisualHostTooltipService;
}
export interface DisplayNameIdentityPair {
displayName: string;
identity: DataViewScopeIdentity;
}
} | [queryName: string]: data.DataRepetitionSelector;
} |
qualify_consts.rs | //! A pass that qualifies constness of temporaries in constants,
//! static initializers and functions and also drives promotion.
//!
//! The Qualif flags below can be used to also provide better
//! diagnostics as to why a constant rvalue wasn't promoted.
use rustc_data_structures::bit_set::BitSet;
use rustc_data_structures::indexed_vec::IndexVec;
use rustc_data_structures::fx::FxHashSet;
use rustc_target::spec::abi::Abi;
use rustc::hir;
use rustc::hir::def_id::DefId;
use rustc::traits::{self, TraitEngine};
use rustc::ty::{self, TyCtxt, Ty, TypeFoldable};
use rustc::ty::cast::CastTy;
use rustc::ty::query::Providers;
use rustc::mir::*;
use rustc::mir::interpret::ConstValue;
use rustc::mir::traversal::ReversePostorder;
use rustc::mir::visit::{PlaceContext, Visitor, MutatingUseContext, NonMutatingUseContext};
use rustc::middle::lang_items;
use rustc::session::config::nightly_options;
use syntax::ast::LitKind;
use syntax::feature_gate::{emit_feature_err, GateIssue};
use syntax::symbol::sym;
use syntax_pos::{Span, DUMMY_SP};
use std::borrow::Cow;
use std::cell::Cell;
use std::fmt;
use std::ops::{Deref, Index, IndexMut};
use std::usize;
use rustc::hir::HirId;
use crate::transform::{MirPass, MirSource};
use super::promote_consts::{self, Candidate, TempState};
/// What kind of item we are in.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
enum Mode {
/// A `static` item.
Static,
/// A `static mut` item.
StaticMut,
/// A `const fn` item.
ConstFn,
/// A `const` item or an anonymous constant (e.g. in array lengths).
Const,
/// Other type of `fn`.
NonConstFn,
}
impl Mode {
/// Determine whether we have to do full const-checking because syntactically, we
/// are required to be "const".
#[inline]
fn requires_const_checking(self) -> bool {
self != Mode::NonConstFn
}
}
impl fmt::Display for Mode {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Mode::Const => write!(f, "constant"),
Mode::Static | Mode::StaticMut => write!(f, "static"),
Mode::ConstFn => write!(f, "constant function"),
Mode::NonConstFn => write!(f, "function")
}
}
}
const QUALIF_COUNT: usize = 4;
// FIXME(eddyb) once we can use const generics, replace this array with
// something like `IndexVec` but for fixed-size arrays (`IndexArray`?).
#[derive(Copy, Clone, Default)]
struct PerQualif<T>([T; QUALIF_COUNT]);
impl<T: Clone> PerQualif<T> {
fn new(x: T) -> Self {
PerQualif([x.clone(), x.clone(), x.clone(), x])
}
}
impl<T> PerQualif<T> {
fn as_mut(&mut self) -> PerQualif<&mut T> {
let [x0, x1, x2, x3] = &mut self.0;
PerQualif([x0, x1, x2, x3])
}
fn zip<U>(self, other: PerQualif<U>) -> PerQualif<(T, U)> {
let [x0, x1, x2, x3] = self.0;
let [y0, y1, y2, y3] = other.0;
PerQualif([(x0, y0), (x1, y1), (x2, y2), (x3, y3)])
}
}
impl PerQualif<bool> {
fn encode_to_bits(self) -> u8 {
self.0.iter().enumerate().fold(0, |bits, (i, &qualif)| {
bits | ((qualif as u8) << i)
})
}
fn decode_from_bits(bits: u8) -> Self {
let mut qualifs = Self::default();
for (i, qualif) in qualifs.0.iter_mut().enumerate() {
*qualif = (bits & (1 << i)) != 0;
}
qualifs
}
}
impl<Q: Qualif, T> Index<Q> for PerQualif<T> {
type Output = T;
fn index(&self, _: Q) -> &T {
&self.0[Q::IDX]
}
}
impl<Q: Qualif, T> IndexMut<Q> for PerQualif<T> {
fn index_mut(&mut self, _: Q) -> &mut T {
&mut self.0[Q::IDX]
}
}
struct ConstCx<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
mode: Mode,
body: &'a Body<'tcx>,
per_local: PerQualif<BitSet<Local>>,
}
impl<'a, 'tcx> ConstCx<'a, 'tcx> {
fn is_const_panic_fn(&self, def_id: DefId) -> bool {
Some(def_id) == self.tcx.lang_items().panic_fn() ||
Some(def_id) == self.tcx.lang_items().begin_panic_fn()
}
}
#[derive(Copy, Clone, Debug)]
enum ValueSource<'a, 'tcx> {
Rvalue(&'a Rvalue<'tcx>),
DropAndReplace(&'a Operand<'tcx>),
Call {
callee: &'a Operand<'tcx>,
args: &'a [Operand<'tcx>],
return_ty: Ty<'tcx>,
},
}
/// A "qualif"(-ication) is a way to look for something "bad" in the MIR that would disqualify some
/// code for promotion or prevent it from evaluating at compile time. So `return true` means
/// "I found something bad, no reason to go on searching". `false` is only returned if we
/// definitely cannot find anything bad anywhere.
///
/// The default implementations proceed structurally.
trait Qualif {
const IDX: usize;
/// Return the qualification that is (conservatively) correct for any value
/// of the type, or `None` if the qualification is not value/type-based.
fn in_any_value_of_ty(_cx: &ConstCx<'_, 'tcx>, _ty: Ty<'tcx>) -> Option<bool> {
None
}
/// Return a mask for the qualification, given a type. This is `false` iff
/// no value of that type can have the qualification.
fn mask_for_ty(cx: &ConstCx<'_, 'tcx>, ty: Ty<'tcx>) -> bool {
Self::in_any_value_of_ty(cx, ty).unwrap_or(true)
}
fn in_local(cx: &ConstCx<'_, '_>, local: Local) -> bool {
cx.per_local.0[Self::IDX].contains(local)
}
fn in_static(_cx: &ConstCx<'_, 'tcx>, _static: &Static<'tcx>) -> bool {
// FIXME(eddyb) should we do anything here for value properties?
false
}
fn in_projection_structurally(
cx: &ConstCx<'_, 'tcx>,
place: PlaceRef<'_, 'tcx>,
) -> bool {
if let [proj_base @ .., elem] = place.projection {
let base_qualif = Self::in_place(cx, PlaceRef {
base: place.base,
projection: proj_base,
});
let qualif = base_qualif && Self::mask_for_ty(
cx,
Place::ty_from(place.base, proj_base, cx.body, cx.tcx)
.projection_ty(cx.tcx, elem)
.ty,
);
match elem {
ProjectionElem::Deref |
ProjectionElem::Subslice { .. } |
ProjectionElem::Field(..) |
ProjectionElem::ConstantIndex { .. } |
ProjectionElem::Downcast(..) => qualif,
ProjectionElem::Index(local) => qualif || Self::in_local(cx, *local),
}
} else {
bug!("This should be called if projection is not empty");
}
}
fn in_projection(
cx: &ConstCx<'_, 'tcx>,
place: PlaceRef<'_, 'tcx>,
) -> bool {
Self::in_projection_structurally(cx, place)
}
fn in_place(cx: &ConstCx<'_, 'tcx>, place: PlaceRef<'_, 'tcx>) -> bool {
match place {
PlaceRef {
base: PlaceBase::Local(local),
projection: [],
} => Self::in_local(cx, *local),
PlaceRef {
base: PlaceBase::Static(box Static {
kind: StaticKind::Promoted(..),
..
}),
projection: [],
} => bug!("qualifying already promoted MIR"),
PlaceRef {
base: PlaceBase::Static(static_),
projection: [],
} => {
Self::in_static(cx, static_)
},
PlaceRef {
base: _,
projection: [.., _],
} => Self::in_projection(cx, place),
}
}
fn in_operand(cx: &ConstCx<'_, 'tcx>, operand: &Operand<'tcx>) -> bool {
match *operand {
Operand::Copy(ref place) |
Operand::Move(ref place) => Self::in_place(cx, place.as_ref()),
Operand::Constant(ref constant) => {
if let ConstValue::Unevaluated(def_id, _) = constant.literal.val {
// Don't peek inside trait associated constants.
if cx.tcx.trait_of_item(def_id).is_some() {
Self::in_any_value_of_ty(cx, constant.literal.ty).unwrap_or(false)
} else {
let (bits, _) = cx.tcx.at(constant.span).mir_const_qualif(def_id);
let qualif = PerQualif::decode_from_bits(bits).0[Self::IDX];
// Just in case the type is more specific than
// the definition, e.g., impl associated const
// with type parameters, take it into account.
qualif && Self::mask_for_ty(cx, constant.literal.ty)
}
} else {
false
}
}
}
}
fn in_rvalue_structurally(cx: &ConstCx<'_, 'tcx>, rvalue: &Rvalue<'tcx>) -> bool {
match *rvalue {
Rvalue::NullaryOp(..) => false,
Rvalue::Discriminant(ref place) |
Rvalue::Len(ref place) => Self::in_place(cx, place.as_ref()),
Rvalue::Use(ref operand) |
Rvalue::Repeat(ref operand, _) |
Rvalue::UnaryOp(_, ref operand) |
Rvalue::Cast(_, ref operand, _) => Self::in_operand(cx, operand),
Rvalue::BinaryOp(_, ref lhs, ref rhs) |
Rvalue::CheckedBinaryOp(_, ref lhs, ref rhs) => {
Self::in_operand(cx, lhs) || Self::in_operand(cx, rhs)
}
Rvalue::Ref(_, _, ref place) => {
// Special-case reborrows to be more like a copy of the reference.
if let box [proj_base @ .., elem] = &place.projection {
if ProjectionElem::Deref == *elem {
let base_ty = Place::ty_from(&place.base, proj_base, cx.body, cx.tcx).ty;
if let ty::Ref(..) = base_ty.sty {
return Self::in_place(cx, PlaceRef {
base: &place.base,
projection: proj_base,
});
}
}
}
Self::in_place(cx, place.as_ref())
}
Rvalue::Aggregate(_, ref operands) => {
operands.iter().any(|o| Self::in_operand(cx, o))
}
}
}
fn in_rvalue(cx: &ConstCx<'_, 'tcx>, rvalue: &Rvalue<'tcx>) -> bool {
Self::in_rvalue_structurally(cx, rvalue)
}
fn in_call(
cx: &ConstCx<'_, 'tcx>,
_callee: &Operand<'tcx>,
_args: &[Operand<'tcx>],
return_ty: Ty<'tcx>,
) -> bool {
// Be conservative about the returned value of a const fn.
Self::in_any_value_of_ty(cx, return_ty).unwrap_or(false)
}
fn in_value(cx: &ConstCx<'_, 'tcx>, source: ValueSource<'_, 'tcx>) -> bool {
match source {
ValueSource::Rvalue(rvalue) => Self::in_rvalue(cx, rvalue),
ValueSource::DropAndReplace(source) => Self::in_operand(cx, source),
ValueSource::Call { callee, args, return_ty } => {
Self::in_call(cx, callee, args, return_ty)
}
}
}
}
/// Constant containing interior mutability (`UnsafeCell<T>`).
/// This must be ruled out to make sure that evaluating the constant at compile-time
/// and at *any point* during the run-time would produce the same result. In particular,
/// promotion of temporaries must not change program behavior; if the promoted could be
/// written to, that would be a problem.
struct HasMutInterior;
impl Qualif for HasMutInterior {
const IDX: usize = 0;
fn in_any_value_of_ty(cx: &ConstCx<'_, 'tcx>, ty: Ty<'tcx>) -> Option<bool> {
Some(!ty.is_freeze(cx.tcx, cx.param_env, DUMMY_SP))
}
fn in_rvalue(cx: &ConstCx<'_, 'tcx>, rvalue: &Rvalue<'tcx>) -> bool {
match *rvalue {
// Returning `true` for `Rvalue::Ref` indicates the borrow isn't
// allowed in constants (and the `Checker` will error), and/or it
// won't be promoted, due to `&mut ...` or interior mutability.
Rvalue::Ref(_, kind, ref place) => {
let ty = place.ty(cx.body, cx.tcx).ty;
if let BorrowKind::Mut { .. } = kind {
// In theory, any zero-sized value could be borrowed
// mutably without consequences. However, only &mut []
// is allowed right now, and only in functions.
if cx.mode == Mode::StaticMut {
// Inside a `static mut`, &mut [...] is also allowed.
match ty.sty {
ty::Array(..) | ty::Slice(_) => {}
_ => return true,
}
} else if let ty::Array(_, len) = ty.sty {
// FIXME(eddyb) the `cx.mode == Mode::NonConstFn` condition
// seems unnecessary, given that this is merely a ZST.
match len.try_eval_usize(cx.tcx, cx.param_env) {
Some(0) if cx.mode == Mode::NonConstFn => {},
_ => return true,
}
} else {
return true;
}
}
}
Rvalue::Aggregate(ref kind, _) => {
if let AggregateKind::Adt(def, ..) = **kind {
if Some(def.did) == cx.tcx.lang_items().unsafe_cell_type() {
let ty = rvalue.ty(cx.body, cx.tcx);
assert_eq!(Self::in_any_value_of_ty(cx, ty), Some(true));
return true;
}
}
}
_ => {}
}
Self::in_rvalue_structurally(cx, rvalue)
}
}
/// Constant containing an ADT that implements `Drop`.
/// This must be ruled out (a) because we cannot run `Drop` during compile-time
/// as that might not be a `const fn`, and (b) because implicit promotion would
/// remove side-effects that occur as part of dropping that value.
struct NeedsDrop;
impl Qualif for NeedsDrop {
const IDX: usize = 1;
fn in_any_value_of_ty(cx: &ConstCx<'_, 'tcx>, ty: Ty<'tcx>) -> Option<bool> {
Some(ty.needs_drop(cx.tcx, cx.param_env))
}
fn in_rvalue(cx: &ConstCx<'_, 'tcx>, rvalue: &Rvalue<'tcx>) -> bool {
if let Rvalue::Aggregate(ref kind, _) = *rvalue {
if let AggregateKind::Adt(def, ..) = **kind {
if def.has_dtor(cx.tcx) {
return true;
}
}
}
Self::in_rvalue_structurally(cx, rvalue)
}
}
/// Not promotable at all - non-`const fn` calls, `asm!`,
/// pointer comparisons, ptr-to-int casts, etc.
/// Inside a const context all constness rules apply, so promotion simply has to follow the regular
/// constant rules (modulo interior mutability or `Drop` rules which are handled `HasMutInterior`
/// and `NeedsDrop` respectively). Basically this duplicates the checks that the const-checking
/// visitor enforces by emitting errors when working in const context.
struct IsNotPromotable;
impl Qualif for IsNotPromotable {
const IDX: usize = 2;
fn in_static(cx: &ConstCx<'_, 'tcx>, static_: &Static<'tcx>) -> bool {
match static_.kind {
StaticKind::Promoted(_, _) => unreachable!(),
StaticKind::Static => {
// Only allow statics (not consts) to refer to other statics.
let allowed = cx.mode == Mode::Static || cx.mode == Mode::StaticMut;
!allowed ||
cx.tcx.get_attrs(static_.def_id).iter().any(
|attr| attr.check_name(sym::thread_local)
)
}
}
}
fn in_projection(
cx: &ConstCx<'_, 'tcx>,
place: PlaceRef<'_, 'tcx>,
) -> bool {
if let [proj_base @ .., elem] = place.projection {
match elem {
ProjectionElem::Deref |
ProjectionElem::Downcast(..) => return true,
ProjectionElem::ConstantIndex {..} |
ProjectionElem::Subslice {..} |
ProjectionElem::Index(_) => {}
ProjectionElem::Field(..) => {
if cx.mode == Mode::NonConstFn {
let base_ty = Place::ty_from(place.base, proj_base, cx.body, cx.tcx).ty;
if let Some(def) = base_ty.ty_adt_def() {
// No promotion of union field accesses.
if def.is_union() {
return true;
}
}
}
}
}
Self::in_projection_structurally(cx, place)
} else {
bug!("This should be called if projection is not empty");
}
}
fn in_rvalue(cx: &ConstCx<'_, 'tcx>, rvalue: &Rvalue<'tcx>) -> bool {
match *rvalue {
Rvalue::Cast(CastKind::Misc, ref operand, cast_ty) if cx.mode == Mode::NonConstFn => {
let operand_ty = operand.ty(cx.body, cx.tcx);
let cast_in = CastTy::from_ty(operand_ty).expect("bad input type for cast");
let cast_out = CastTy::from_ty(cast_ty).expect("bad output type for cast");
match (cast_in, cast_out) {
(CastTy::Ptr(_), CastTy::Int(_)) |
(CastTy::FnPtr, CastTy::Int(_)) => {
// in normal functions, mark such casts as not promotable
return true;
}
_ => {}
}
}
Rvalue::BinaryOp(op, ref lhs, _) if cx.mode == Mode::NonConstFn => {
if let ty::RawPtr(_) | ty::FnPtr(..) = lhs.ty(cx.body, cx.tcx).sty {
assert!(op == BinOp::Eq || op == BinOp::Ne ||
op == BinOp::Le || op == BinOp::Lt ||
op == BinOp::Ge || op == BinOp::Gt ||
op == BinOp::Offset);
// raw pointer operations are not allowed inside promoteds
return true;
}
}
Rvalue::NullaryOp(NullOp::Box, _) => return true,
_ => {}
}
Self::in_rvalue_structurally(cx, rvalue)
}
fn in_call(
cx: &ConstCx<'_, 'tcx>,
callee: &Operand<'tcx>,
args: &[Operand<'tcx>],
_return_ty: Ty<'tcx>,
) -> bool {
let fn_ty = callee.ty(cx.body, cx.tcx);
match fn_ty.sty {
ty::FnDef(def_id, _) => {
match cx.tcx.fn_sig(def_id).abi() {
Abi::RustIntrinsic |
Abi::PlatformIntrinsic => {
assert!(!cx.tcx.is_const_fn(def_id));
match &cx.tcx.item_name(def_id).as_str()[..] {
| "size_of"
| "min_align_of"
| "needs_drop"
| "type_id"
| "bswap"
| "bitreverse"
| "ctpop"
| "cttz"
| "cttz_nonzero"
| "ctlz"
| "ctlz_nonzero"
| "wrapping_add"
| "wrapping_sub"
| "wrapping_mul"
| "unchecked_shl"
| "unchecked_shr"
| "rotate_left"
| "rotate_right"
| "add_with_overflow"
| "sub_with_overflow"
| "mul_with_overflow"
| "saturating_add"
| "saturating_sub"
| "transmute"
=> return true,
_ => {}
}
}
_ => {
let is_const_fn =
cx.tcx.is_const_fn(def_id) ||
cx.tcx.is_unstable_const_fn(def_id).is_some() ||
cx.is_const_panic_fn(def_id);
if !is_const_fn {
return true;
}
}
}
}
_ => return true,
}
Self::in_operand(cx, callee) || args.iter().any(|arg| Self::in_operand(cx, arg))
}
}
/// Refers to temporaries which cannot be promoted *implicitly*.
/// Explicit promotion happens e.g. for constant arguments declared via `rustc_args_required_const`.
/// Implicit promotion has almost the same rules, except that disallows `const fn` except for
/// those marked `#[rustc_promotable]`. This is to avoid changing a legitimate run-time operation
/// into a failing compile-time operation e.g. due to addresses being compared inside the function.
struct IsNotImplicitlyPromotable;
impl Qualif for IsNotImplicitlyPromotable {
const IDX: usize = 3;
fn in_call(
cx: &ConstCx<'_, 'tcx>,
callee: &Operand<'tcx>,
args: &[Operand<'tcx>],
_return_ty: Ty<'tcx>,
) -> bool {
if cx.mode == Mode::NonConstFn {
if let ty::FnDef(def_id, _) = callee.ty(cx.body, cx.tcx).sty {
// Never promote runtime `const fn` calls of
// functions without `#[rustc_promotable]`.
if !cx.tcx.is_promotable_const_fn(def_id) {
return true;
}
}
}
Self::in_operand(cx, callee) || args.iter().any(|arg| Self::in_operand(cx, arg))
}
}
// Ensure the `IDX` values are sequential (`0..QUALIF_COUNT`).
macro_rules! static_assert_seq_qualifs {
($i:expr => $first:ident $(, $rest:ident)*) => {
static_assert!({
static_assert_seq_qualifs!($i + 1 => $($rest),*);
$first::IDX == $i
});
};
($i:expr =>) => {
static_assert!(QUALIF_COUNT == $i);
};
}
static_assert_seq_qualifs!(
0 => HasMutInterior, NeedsDrop, IsNotPromotable, IsNotImplicitlyPromotable
);
impl ConstCx<'_, 'tcx> {
fn qualifs_in_any_value_of_ty(&self, ty: Ty<'tcx>) -> PerQualif<bool> {
let mut qualifs = PerQualif::default();
qualifs[HasMutInterior] = HasMutInterior::in_any_value_of_ty(self, ty).unwrap_or(false);
qualifs[NeedsDrop] = NeedsDrop::in_any_value_of_ty(self, ty).unwrap_or(false);
qualifs[IsNotPromotable] = IsNotPromotable::in_any_value_of_ty(self, ty).unwrap_or(false);
qualifs[IsNotImplicitlyPromotable] =
IsNotImplicitlyPromotable::in_any_value_of_ty(self, ty).unwrap_or(false);
qualifs
}
fn qualifs_in_local(&self, local: Local) -> PerQualif<bool> {
let mut qualifs = PerQualif::default();
qualifs[HasMutInterior] = HasMutInterior::in_local(self, local);
qualifs[NeedsDrop] = NeedsDrop::in_local(self, local);
qualifs[IsNotPromotable] = IsNotPromotable::in_local(self, local);
qualifs[IsNotImplicitlyPromotable] = IsNotImplicitlyPromotable::in_local(self, local);
qualifs
}
fn qualifs_in_value(&self, source: ValueSource<'_, 'tcx>) -> PerQualif<bool> {
let mut qualifs = PerQualif::default();
qualifs[HasMutInterior] = HasMutInterior::in_value(self, source);
qualifs[NeedsDrop] = NeedsDrop::in_value(self, source);
qualifs[IsNotPromotable] = IsNotPromotable::in_value(self, source);
qualifs[IsNotImplicitlyPromotable] = IsNotImplicitlyPromotable::in_value(self, source);
qualifs
}
}
/// Checks MIR for being admissible as a compile-time constant, using `ConstCx`
/// for value qualifications, and accumulates writes of
/// rvalue/call results to locals, in `local_qualif`.
/// It also records candidates for promotion in `promotion_candidates`,
/// both in functions and const/static items.
struct Checker<'a, 'tcx> {
cx: ConstCx<'a, 'tcx>,
span: Span,
def_id: DefId,
rpo: ReversePostorder<'a, 'tcx>,
temp_promotion_state: IndexVec<Local, TempState>,
promotion_candidates: Vec<Candidate>,
}
macro_rules! unleash_miri {
($this:expr) => {{
if $this.tcx.sess.opts.debugging_opts.unleash_the_miri_inside_of_you {
$this.tcx.sess.span_warn($this.span, "skipping const checks");
return;
}
}}
}
impl Deref for Checker<'a, 'tcx> {
type Target = ConstCx<'a, 'tcx>;
fn deref(&self) -> &Self::Target {
&self.cx
}
}
impl<'a, 'tcx> Checker<'a, 'tcx> {
fn new(tcx: TyCtxt<'tcx>, def_id: DefId, body: &'a Body<'tcx>, mode: Mode) -> Self {
assert!(def_id.is_local());
let mut rpo = traversal::reverse_postorder(body);
let temps = promote_consts::collect_temps(body, &mut rpo);
rpo.reset();
let param_env = tcx.param_env(def_id);
let mut cx = ConstCx {
tcx,
param_env,
mode,
body,
per_local: PerQualif::new(BitSet::new_empty(body.local_decls.len())),
};
for (local, decl) in body.local_decls.iter_enumerated() {
if let LocalKind::Arg = body.local_kind(local) {
let qualifs = cx.qualifs_in_any_value_of_ty(decl.ty);
for (per_local, qualif) in &mut cx.per_local.as_mut().zip(qualifs).0 {
if *qualif {
per_local.insert(local);
}
}
}
if !temps[local].is_promotable() {
cx.per_local[IsNotPromotable].insert(local);
}
if let LocalKind::Var = body.local_kind(local) {
// Sanity check to prevent implicit and explicit promotion of
// named locals
assert!(cx.per_local[IsNotPromotable].contains(local));
}
}
Checker {
cx,
span: body.span,
def_id,
rpo,
temp_promotion_state: temps,
promotion_candidates: vec![]
}
}
// FIXME(eddyb) we could split the errors into meaningful
// categories, but enabling full miri would make that
// slightly pointless (even with feature-gating).
fn not_const(&mut self) {
unleash_miri!(self);
if self.mode.requires_const_checking() {
let mut err = struct_span_err!(
self.tcx.sess,
self.span,
E0019,
"{} contains unimplemented expression type",
self.mode
);
if self.tcx.sess.teach(&err.get_code().unwrap()) {
err.note("A function call isn't allowed in the const's initialization expression \
because the expression's value must be known at compile-time.");
err.note("Remember: you can't use a function call inside a const's initialization \
expression! However, you can use it anywhere else.");
}
err.emit();
}
}
/// Assigns an rvalue/call qualification to the given destination.
fn assign(&mut self, dest: &Place<'tcx>, source: ValueSource<'_, 'tcx>, location: Location) {
trace!("assign: {:?} <- {:?}", dest, source);
let mut qualifs = self.qualifs_in_value(source);
match source {
ValueSource::Rvalue(&Rvalue::Ref(_, kind, ref place)) => {
// Getting `true` from `HasMutInterior::in_rvalue` means
// the borrowed place is disallowed from being borrowed,
// due to either a mutable borrow (with some exceptions),
// or an shared borrow of a value with interior mutability.
// Then `HasMutInterior` is replaced with `IsNotPromotable`,
// to avoid duplicate errors (e.g. from reborrowing).
if qualifs[HasMutInterior] {
qualifs[HasMutInterior] = false;
qualifs[IsNotPromotable] = true;
if self.mode.requires_const_checking() {
if !self.tcx.sess.opts.debugging_opts.unleash_the_miri_inside_of_you {
if let BorrowKind::Mut { .. } = kind {
let mut err = struct_span_err!(self.tcx.sess, self.span, E0017,
"references in {}s may only refer \
to immutable values", self.mode);
err.span_label(self.span, format!("{}s require immutable values",
self.mode));
if self.tcx.sess.teach(&err.get_code().unwrap()) {
err.note("References in statics and constants may only refer \
to immutable values.\n\n\
Statics are shared everywhere, and if they refer to \
mutable data one might violate memory safety since \
holding multiple mutable references to shared data \
is not allowed.\n\n\
If you really want global mutable state, try using \
static mut or a global UnsafeCell.");
}
err.emit();
} else {
span_err!(self.tcx.sess, self.span, E0492,
"cannot borrow a constant which may contain \
interior mutability, create a static instead");
}
}
}
} else if let BorrowKind::Mut { .. } | BorrowKind::Shared = kind {
// Don't promote BorrowKind::Shallow borrows, as they don't
// reach codegen.
// We might have a candidate for promotion.
let candidate = Candidate::Ref(location);
// Start by traversing to the "base", with non-deref projections removed.
let deref_proj =
place.projection.iter().rev().find(|&elem| *elem == ProjectionElem::Deref);
debug!(
"qualify_consts: promotion candidate: place={:?} {:?}",
place.base, deref_proj
);
// We can only promote interior borrows of promotable temps (non-temps
// don't get promoted anyway).
// (If we bailed out of the loop due to a `Deref` above, we will definitely
// not enter the conditional here.)
if let (PlaceBase::Local(local), None) = (&place.base, deref_proj) {
if self.body.local_kind(*local) == LocalKind::Temp {
debug!("qualify_consts: promotion candidate: local={:?}", local);
// The borrowed place doesn't have `HasMutInterior`
// (from `in_rvalue`), so we can safely ignore
// `HasMutInterior` from the local's qualifications.
// This allows borrowing fields which don't have
// `HasMutInterior`, from a type that does, e.g.:
// `let _: &'static _ = &(Cell::new(1), 2).1;`
let mut local_qualifs = self.qualifs_in_local(*local);
// Any qualifications, except HasMutInterior (see above), disqualify
// from promotion.
// This is, in particular, the "implicit promotion" version of
// the check making sure that we don't run drop glue during const-eval.
local_qualifs[HasMutInterior] = false;
if !local_qualifs.0.iter().any(|&qualif| qualif) {
debug!("qualify_consts: promotion candidate: {:?}", candidate);
self.promotion_candidates.push(candidate);
}
}
}
}
},
ValueSource::Rvalue(&Rvalue::Repeat(ref operand, _)) => {
let candidate = Candidate::Repeat(location);
let not_promotable = IsNotImplicitlyPromotable::in_operand(self, operand) ||
IsNotPromotable::in_operand(self, operand);
debug!("assign: self.def_id={:?} operand={:?}", self.def_id, operand);
if !not_promotable && self.tcx.features().const_in_array_repeat_expressions {
debug!("assign: candidate={:?}", candidate);
self.promotion_candidates.push(candidate);
}
},
_ => {},
}
let mut dest_projection = &dest.projection[..];
let index = loop {
match (&dest.base, dest_projection) {
// We treat all locals equal in constants
(&PlaceBase::Local(index), []) => break index,
// projections are transparent for assignments
// we qualify the entire destination at once, even if just a field would have
// stricter qualification
(base, [proj_base @ .., _]) => {
// Catch more errors in the destination. `visit_place` also checks various
// projection rules like union field access and raw pointer deref
let context = PlaceContext::MutatingUse(MutatingUseContext::Store);
self.visit_place_base(base, context, location);
self.visit_projection(base, dest_projection, context, location);
dest_projection = proj_base;
},
(&PlaceBase::Static(box Static {
kind: StaticKind::Promoted(..),
..
}), []) => bug!("promoteds don't exist yet during promotion"),
(&PlaceBase::Static(box Static{ kind: _, .. }), []) => {
// Catch more errors in the destination. `visit_place` also checks that we
// do not try to access statics from constants or try to mutate statics
let context = PlaceContext::MutatingUse(MutatingUseContext::Store);
self.visit_place_base(&dest.base, context, location);
return;
}
}
};
let kind = self.body.local_kind(index);
debug!("store to {:?} {:?}", kind, index);
// Only handle promotable temps in non-const functions.
if self.mode == Mode::NonConstFn {
if kind != LocalKind::Temp ||
!self.temp_promotion_state[index].is_promotable() {
return;
}
}
// this is overly restrictive, because even full assignments do not clear the qualif
// While we could special case full assignments, this would be inconsistent with
// aggregates where we overwrite all fields via assignments, which would not get
// that feature.
for (per_local, qualif) in &mut self.cx.per_local.as_mut().zip(qualifs).0 {
if *qualif {
per_local.insert(index);
}
}
// Ensure the `IsNotPromotable` qualification is preserved.
// NOTE(eddyb) this is actually unnecessary right now, as
// we never replace the local's qualif, but we might in
// the future, and so it serves to catch changes that unset
// important bits (in which case, asserting `contains` could
// be replaced with calling `insert` to re-set the bit).
if kind == LocalKind::Temp {
if !self.temp_promotion_state[index].is_promotable() {
assert!(self.cx.per_local[IsNotPromotable].contains(index));
}
}
}
/// Check a whole const, static initializer or const fn.
fn check_const(&mut self) -> (u8, &'tcx BitSet<Local>) {
debug!("const-checking {} {:?}", self.mode, self.def_id);
let body = self.body;
let mut seen_blocks = BitSet::new_empty(body.basic_blocks().len());
let mut bb = START_BLOCK;
loop {
seen_blocks.insert(bb.index());
self.visit_basic_block_data(bb, &body[bb]);
let target = match body[bb].terminator().kind {
TerminatorKind::Goto { target } |
TerminatorKind::FalseUnwind { real_target: target, .. } |
TerminatorKind::Drop { target, .. } |
TerminatorKind::DropAndReplace { target, .. } |
TerminatorKind::Assert { target, .. } |
TerminatorKind::Call { destination: Some((_, target)), .. } => {
Some(target)
}
// Non-terminating calls cannot produce any value.
TerminatorKind::Call { destination: None, .. } => {
break;
}
TerminatorKind::SwitchInt {..} |
TerminatorKind::Resume |
TerminatorKind::Abort |
TerminatorKind::GeneratorDrop |
TerminatorKind::Yield { .. } |
TerminatorKind::Unreachable |
TerminatorKind::FalseEdges { .. } => None,
TerminatorKind::Return => {
break;
}
};
match target {
// No loops allowed.
Some(target) if !seen_blocks.contains(target.index()) => {
bb = target;
}
_ => {
self.not_const();
break;
}
}
}
// Collect all the temps we need to promote.
let mut promoted_temps = BitSet::new_empty(self.temp_promotion_state.len());
debug!("qualify_const: promotion_candidates={:?}", self.promotion_candidates);
for candidate in &self.promotion_candidates {
match *candidate {
Candidate::Repeat(Location { block: bb, statement_index: stmt_idx }) => {
if let StatementKind::Assign(box(_, Rvalue::Repeat(
Operand::Move(Place {
base: PlaceBase::Local(index),
projection: box [],
}),
_
))) = self.body[bb].statements[stmt_idx].kind {
promoted_temps.insert(index);
}
}
Candidate::Ref(Location { block: bb, statement_index: stmt_idx }) => {
if let StatementKind::Assign(
box(
_,
Rvalue::Ref(_, _, Place {
base: PlaceBase::Local(index),
projection: box [],
})
)
) = self.body[bb].statements[stmt_idx].kind {
promoted_temps.insert(index);
}
}
Candidate::Argument { .. } => {}
}
}
let mut qualifs = self.qualifs_in_local(RETURN_PLACE);
// Account for errors in consts by using the
// conservative type qualification instead.
if qualifs[IsNotPromotable] {
qualifs = self.qualifs_in_any_value_of_ty(body.return_ty());
}
(qualifs.encode_to_bits(), self.tcx.arena.alloc(promoted_temps))
}
}
impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> {
fn visit_place_base(
&mut self,
place_base: &PlaceBase<'tcx>,
context: PlaceContext,
location: Location,
) {
self.super_place_base(place_base, context, location);
match place_base {
PlaceBase::Local(_) => {}
PlaceBase::Static(box Static{ kind: StaticKind::Promoted(_, _), .. }) => {
unreachable!()
}
PlaceBase::Static(box Static{ kind: StaticKind::Static, def_id, .. }) => {
if self.tcx
.get_attrs(*def_id)
.iter()
.any(|attr| attr.check_name(sym::thread_local)) {
if self.mode.requires_const_checking() {
span_err!(self.tcx.sess, self.span, E0625,
"thread-local statics cannot be \
accessed at compile-time");
}
return;
}
// Only allow statics (not consts) to refer to other statics.
if self.mode == Mode::Static || self.mode == Mode::StaticMut {
if self.mode == Mode::Static && context.is_mutating_use() {
// this is not strictly necessary as miri will also bail out
// For interior mutability we can't really catch this statically as that
// goes through raw pointers and intermediate temporaries, so miri has
// to catch this anyway
self.tcx.sess.span_err(
self.span,
"cannot mutate statics in the initializer of another static",
);
}
return;
}
unleash_miri!(self);
if self.mode.requires_const_checking() {
let mut err = struct_span_err!(self.tcx.sess, self.span, E0013,
"{}s cannot refer to statics, use \
a constant instead", self.mode);
if self.tcx.sess.teach(&err.get_code().unwrap()) {
err.note(
"Static and const variables can refer to other const variables. \
But a const variable cannot refer to a static variable."
);
err.help(
"To fix this, the value can be extracted as a const and then used."
);
}
err.emit()
}
}
}
}
fn visit_projection(
&mut self,
place_base: &PlaceBase<'tcx>,
proj: &[PlaceElem<'tcx>],
context: PlaceContext,
location: Location,
) {
debug!(
"visit_place_projection: proj={:?} context={:?} location={:?}",
proj, context, location,
);
self.super_projection(place_base, proj, context, location);
if let [proj_base @ .., elem] = proj {
match elem {
ProjectionElem::Deref => {
if context.is_mutating_use() {
// `not_const` errors out in const contexts
self.not_const()
}
let base_ty = Place::ty_from(place_base, proj_base, self.body, self.tcx).ty;
match self.mode {
Mode::NonConstFn => {},
_ => {
if let ty::RawPtr(_) = base_ty.sty {
if !self.tcx.features().const_raw_ptr_deref {
emit_feature_err(
&self.tcx.sess.parse_sess, sym::const_raw_ptr_deref,
self.span, GateIssue::Language,
&format!(
"dereferencing raw pointers in {}s is unstable",
self.mode,
),
);
}
}
}
}
}
ProjectionElem::ConstantIndex {..} |
ProjectionElem::Subslice {..} |
ProjectionElem::Field(..) |
ProjectionElem::Index(_) => {
let base_ty = Place::ty_from(place_base, proj_base, self.body, self.tcx).ty;
if let Some(def) = base_ty.ty_adt_def() {
if def.is_union() {
match self.mode {
Mode::ConstFn => | ,
| Mode::NonConstFn
| Mode::Static
| Mode::StaticMut
| Mode::Const
=> {},
}
}
}
}
ProjectionElem::Downcast(..) => {
self.not_const()
}
}
}
}
fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
debug!("visit_operand: operand={:?} location={:?}", operand, location);
self.super_operand(operand, location);
match *operand {
Operand::Move(ref place) => {
// Mark the consumed locals to indicate later drops are noops.
if let Place {
base: PlaceBase::Local(local),
projection: box [],
} = *place {
self.cx.per_local[NeedsDrop].remove(local);
}
}
Operand::Copy(_) |
Operand::Constant(_) => {}
}
}
fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
debug!("visit_rvalue: rvalue={:?} location={:?}", rvalue, location);
// Check nested operands and places.
if let Rvalue::Ref(_, kind, ref place) = *rvalue {
// Special-case reborrows.
let mut reborrow_place = None;
if let box [proj_base @ .., elem] = &place.projection {
if *elem == ProjectionElem::Deref {
let base_ty = Place::ty_from(&place.base, proj_base, self.body, self.tcx).ty;
if let ty::Ref(..) = base_ty.sty {
reborrow_place = Some(proj_base);
}
}
}
if let Some(proj) = reborrow_place {
let ctx = match kind {
BorrowKind::Shared => PlaceContext::NonMutatingUse(
NonMutatingUseContext::SharedBorrow,
),
BorrowKind::Shallow => PlaceContext::NonMutatingUse(
NonMutatingUseContext::ShallowBorrow,
),
BorrowKind::Unique => PlaceContext::NonMutatingUse(
NonMutatingUseContext::UniqueBorrow,
),
BorrowKind::Mut { .. } => PlaceContext::MutatingUse(
MutatingUseContext::Borrow,
),
};
self.visit_place_base(&place.base, ctx, location);
self.visit_projection(&place.base, proj, ctx, location);
} else {
self.super_rvalue(rvalue, location);
}
} else {
self.super_rvalue(rvalue, location);
}
match *rvalue {
Rvalue::Use(_) |
Rvalue::Repeat(..) |
Rvalue::UnaryOp(UnOp::Neg, _) |
Rvalue::UnaryOp(UnOp::Not, _) |
Rvalue::NullaryOp(NullOp::SizeOf, _) |
Rvalue::CheckedBinaryOp(..) |
Rvalue::Cast(CastKind::Pointer(_), ..) |
Rvalue::Discriminant(..) |
Rvalue::Len(_) |
Rvalue::Ref(..) |
Rvalue::Aggregate(..) => {}
Rvalue::Cast(CastKind::Misc, ref operand, cast_ty) => {
let operand_ty = operand.ty(self.body, self.tcx);
let cast_in = CastTy::from_ty(operand_ty).expect("bad input type for cast");
let cast_out = CastTy::from_ty(cast_ty).expect("bad output type for cast");
match (cast_in, cast_out) {
(CastTy::Ptr(_), CastTy::Int(_)) |
(CastTy::FnPtr, CastTy::Int(_)) if self.mode != Mode::NonConstFn => {
unleash_miri!(self);
if !self.tcx.features().const_raw_ptr_to_usize_cast {
// in const fn and constants require the feature gate
// FIXME: make it unsafe inside const fn and constants
emit_feature_err(
&self.tcx.sess.parse_sess, sym::const_raw_ptr_to_usize_cast,
self.span, GateIssue::Language,
&format!(
"casting pointers to integers in {}s is unstable",
self.mode,
),
);
}
}
_ => {}
}
}
Rvalue::BinaryOp(op, ref lhs, _) => {
if let ty::RawPtr(_) | ty::FnPtr(..) = lhs.ty(self.body, self.tcx).sty {
assert!(op == BinOp::Eq || op == BinOp::Ne ||
op == BinOp::Le || op == BinOp::Lt ||
op == BinOp::Ge || op == BinOp::Gt ||
op == BinOp::Offset);
unleash_miri!(self);
if self.mode.requires_const_checking() &&
!self.tcx.features().const_compare_raw_pointers
{
// require the feature gate inside constants and const fn
// FIXME: make it unsafe to use these operations
emit_feature_err(
&self.tcx.sess.parse_sess,
sym::const_compare_raw_pointers,
self.span,
GateIssue::Language,
&format!("comparing raw pointers inside {}", self.mode),
);
}
}
}
Rvalue::NullaryOp(NullOp::Box, _) => {
unleash_miri!(self);
if self.mode.requires_const_checking() {
let mut err = struct_span_err!(self.tcx.sess, self.span, E0010,
"allocations are not allowed in {}s", self.mode);
err.span_label(self.span, format!("allocation not allowed in {}s", self.mode));
if self.tcx.sess.teach(&err.get_code().unwrap()) {
err.note(
"The value of statics and constants must be known at compile time, \
and they live for the entire lifetime of a program. Creating a boxed \
value allocates memory on the heap at runtime, and therefore cannot \
be done at compile time."
);
}
err.emit();
}
}
}
}
fn visit_terminator_kind(&mut self,
kind: &TerminatorKind<'tcx>,
location: Location) {
debug!("visit_terminator_kind: kind={:?} location={:?}", kind, location);
if let TerminatorKind::Call { ref func, ref args, ref destination, .. } = *kind {
if let Some((ref dest, _)) = *destination {
self.assign(dest, ValueSource::Call {
callee: func,
args,
return_ty: dest.ty(self.body, self.tcx).ty,
}, location);
}
let fn_ty = func.ty(self.body, self.tcx);
let mut callee_def_id = None;
let mut is_shuffle = false;
match fn_ty.sty {
ty::FnDef(def_id, _) => {
callee_def_id = Some(def_id);
match self.tcx.fn_sig(def_id).abi() {
Abi::RustIntrinsic |
Abi::PlatformIntrinsic => {
assert!(!self.tcx.is_const_fn(def_id));
match &self.tcx.item_name(def_id).as_str()[..] {
// special intrinsic that can be called diretly without an intrinsic
// feature gate needs a language feature gate
"transmute" => {
if self.mode.requires_const_checking() {
// const eval transmute calls only with the feature gate
if !self.tcx.features().const_transmute {
emit_feature_err(
&self.tcx.sess.parse_sess, sym::const_transmute,
self.span, GateIssue::Language,
&format!("The use of std::mem::transmute() \
is gated in {}s", self.mode));
}
}
}
name if name.starts_with("simd_shuffle") => {
is_shuffle = true;
}
// no need to check feature gates, intrinsics are only callable
// from the libstd or with forever unstable feature gates
_ => {}
}
}
_ => {
// In normal functions no calls are feature-gated.
if self.mode.requires_const_checking() {
let unleash_miri = self
.tcx
.sess
.opts
.debugging_opts
.unleash_the_miri_inside_of_you;
if self.tcx.is_const_fn(def_id) || unleash_miri {
// stable const fns or unstable const fns
// with their feature gate active
// FIXME(eddyb) move stability checks from `is_const_fn` here.
} else if self.is_const_panic_fn(def_id) {
// Check the const_panic feature gate.
// FIXME: cannot allow this inside `allow_internal_unstable`
// because that would make `panic!` insta stable in constants,
// since the macro is marked with the attribute.
if !self.tcx.features().const_panic {
// Don't allow panics in constants without the feature gate.
emit_feature_err(
&self.tcx.sess.parse_sess,
sym::const_panic,
self.span,
GateIssue::Language,
&format!("panicking in {}s is unstable", self.mode),
);
}
} else if let Some(feature)
= self.tcx.is_unstable_const_fn(def_id) {
// Check `#[unstable]` const fns or `#[rustc_const_unstable]`
// functions without the feature gate active in this crate in
// order to report a better error message than the one below.
if !self.span.allows_unstable(feature) {
let mut err = self.tcx.sess.struct_span_err(self.span,
&format!("`{}` is not yet stable as a const fn",
self.tcx.def_path_str(def_id)));
if nightly_options::is_nightly_build() {
help!(&mut err,
"add `#![feature({})]` to the \
crate attributes to enable",
feature);
}
err.emit();
}
} else {
let mut err = struct_span_err!(
self.tcx.sess,
self.span,
E0015,
"calls in {}s are limited to constant functions, \
tuple structs and tuple variants",
self.mode,
);
err.emit();
}
}
}
}
}
ty::FnPtr(_) => {
let unleash_miri = self
.tcx
.sess
.opts
.debugging_opts
.unleash_the_miri_inside_of_you;
if self.mode.requires_const_checking() && !unleash_miri {
let mut err = self.tcx.sess.struct_span_err(
self.span,
"function pointers are not allowed in const fn"
);
err.emit();
}
}
_ => {
self.not_const();
}
}
// No need to do anything in constants and statics, as everything is "constant" anyway
// so promotion would be useless.
if self.mode != Mode::Static && self.mode != Mode::Const {
let constant_args = callee_def_id.and_then(|id| {
args_required_const(self.tcx, id)
}).unwrap_or_default();
for (i, arg) in args.iter().enumerate() {
if !(is_shuffle && i == 2 || constant_args.contains(&i)) {
continue;
}
let candidate = Candidate::Argument { bb: location.block, index: i };
// Since the argument is required to be constant,
// we care about constness, not promotability.
// If we checked for promotability, we'd miss out on
// the results of function calls (which are never promoted
// in runtime code).
// This is not a problem, because the argument explicitly
// requests constness, in contrast to regular promotion
// which happens even without the user requesting it.
// We can error out with a hard error if the argument is not
// constant here.
if !IsNotPromotable::in_operand(self, arg) {
debug!("visit_terminator_kind: candidate={:?}", candidate);
self.promotion_candidates.push(candidate);
} else {
if is_shuffle {
span_err!(self.tcx.sess, self.span, E0526,
"shuffle indices are not constant");
} else {
self.tcx.sess.span_err(self.span,
&format!("argument {} is required to be a constant",
i + 1));
}
}
}
}
// Check callee and argument operands.
self.visit_operand(func, location);
for arg in args {
self.visit_operand(arg, location);
}
} else if let TerminatorKind::Drop {
location: ref place, ..
} | TerminatorKind::DropAndReplace {
location: ref place, ..
} = *kind {
match *kind {
TerminatorKind::DropAndReplace { .. } => {}
_ => self.super_terminator_kind(kind, location),
}
// Deny *any* live drops anywhere other than functions.
if self.mode.requires_const_checking() {
unleash_miri!(self);
// HACK(eddyb): emulate a bit of dataflow analysis,
// conservatively, that drop elaboration will do.
let needs_drop = if let Place {
base: PlaceBase::Local(local),
projection: box [],
} = *place {
if NeedsDrop::in_local(self, local) {
Some(self.body.local_decls[local].source_info.span)
} else {
None
}
} else {
Some(self.span)
};
if let Some(span) = needs_drop {
// Double-check the type being dropped, to minimize false positives.
let ty = place.ty(self.body, self.tcx).ty;
if ty.needs_drop(self.tcx, self.param_env) {
struct_span_err!(self.tcx.sess, span, E0493,
"destructors cannot be evaluated at compile-time")
.span_label(span, format!("{}s cannot evaluate destructors",
self.mode))
.emit();
}
}
}
match *kind {
TerminatorKind::DropAndReplace { ref value, .. } => {
self.assign(place, ValueSource::DropAndReplace(value), location);
self.visit_operand(value, location);
}
_ => {}
}
} else {
// Qualify any operands inside other terminators.
self.super_terminator_kind(kind, location);
}
}
fn visit_assign(&mut self,
dest: &Place<'tcx>,
rvalue: &Rvalue<'tcx>,
location: Location) {
debug!("visit_assign: dest={:?} rvalue={:?} location={:?}", dest, rvalue, location);
self.assign(dest, ValueSource::Rvalue(rvalue), location);
self.visit_rvalue(rvalue, location);
}
fn visit_source_info(&mut self, source_info: &SourceInfo) {
debug!("visit_source_info: source_info={:?}", source_info);
self.span = source_info.span;
}
fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
debug!("visit_statement: statement={:?} location={:?}", statement, location);
match statement.kind {
StatementKind::Assign(..) => {
self.super_statement(statement, location);
}
StatementKind::FakeRead(FakeReadCause::ForMatchedPlace, _) => {
self.not_const();
}
// FIXME(eddyb) should these really do nothing?
StatementKind::FakeRead(..) |
StatementKind::SetDiscriminant { .. } |
StatementKind::StorageLive(_) |
StatementKind::StorageDead(_) |
StatementKind::InlineAsm {..} |
StatementKind::Retag { .. } |
StatementKind::AscribeUserType(..) |
StatementKind::Nop => {}
}
}
}
pub fn provide(providers: &mut Providers<'_>) {
*providers = Providers {
mir_const_qualif,
..*providers
};
}
fn mir_const_qualif(tcx: TyCtxt<'_>, def_id: DefId) -> (u8, &BitSet<Local>) {
// N.B., this `borrow()` is guaranteed to be valid (i.e., the value
// cannot yet be stolen), because `mir_validated()`, which steals
// from `mir_const(), forces this query to execute before
// performing the steal.
let body = &tcx.mir_const(def_id).borrow();
if body.return_ty().references_error() {
tcx.sess.delay_span_bug(body.span, "mir_const_qualif: MIR had errors");
return (1 << IsNotPromotable::IDX, tcx.arena.alloc(BitSet::new_empty(0)));
}
Checker::new(tcx, def_id, body, Mode::Const).check_const()
}
pub struct QualifyAndPromoteConstants<'tcx> {
pub promoted: Cell<IndexVec<Promoted, Body<'tcx>>>,
}
impl<'tcx> Default for QualifyAndPromoteConstants<'tcx> {
fn default() -> Self {
QualifyAndPromoteConstants {
promoted: Cell::new(IndexVec::new()),
}
}
}
impl<'tcx> MirPass<'tcx> for QualifyAndPromoteConstants<'tcx> {
fn run_pass(&self, tcx: TyCtxt<'tcx>, src: MirSource<'tcx>, body: &mut Body<'tcx>) {
// There's not really any point in promoting errorful MIR.
if body.return_ty().references_error() {
tcx.sess.delay_span_bug(body.span, "QualifyAndPromoteConstants: MIR had errors");
return;
}
if src.promoted.is_some() {
return;
}
let def_id = src.def_id();
let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
let mode = determine_mode(tcx, hir_id, def_id);
debug!("run_pass: mode={:?}", mode);
if let Mode::NonConstFn | Mode::ConstFn = mode {
// This is ugly because Checker holds onto mir,
// which can't be mutated until its scope ends.
let (temps, candidates) = {
let mut checker = Checker::new(tcx, def_id, body, mode);
if let Mode::ConstFn = mode {
if tcx.sess.opts.debugging_opts.unleash_the_miri_inside_of_you {
checker.check_const();
} else if tcx.is_min_const_fn(def_id) {
// Enforce `min_const_fn` for stable `const fn`s.
use super::qualify_min_const_fn::is_min_const_fn;
if let Err((span, err)) = is_min_const_fn(tcx, def_id, body) {
error_min_const_fn_violation(tcx, span, err);
} else {
// this should not produce any errors, but better safe than sorry
// FIXME(#53819)
checker.check_const();
}
} else {
// Enforce a constant-like CFG for `const fn`.
checker.check_const();
}
} else {
while let Some((bb, data)) = checker.rpo.next() {
checker.visit_basic_block_data(bb, data);
}
}
(checker.temp_promotion_state, checker.promotion_candidates)
};
// Do the actual promotion, now that we know what's viable.
self.promoted.set(
promote_consts::promote_candidates(def_id, body, tcx, temps, candidates)
);
} else {
check_short_circuiting_in_const_local(tcx, body, mode);
let promoted_temps = match mode {
Mode::Const => tcx.mir_const_qualif(def_id).1,
_ => Checker::new(tcx, def_id, body, mode).check_const().1,
};
remove_drop_and_storage_dead_on_promoted_locals(body, promoted_temps);
}
if mode == Mode::Static && !tcx.has_attr(def_id, sym::thread_local) {
// `static`s (not `static mut`s) which are not `#[thread_local]` must be `Sync`.
check_static_is_sync(tcx, body, hir_id);
}
}
}
fn determine_mode(tcx: TyCtxt<'_>, hir_id: HirId, def_id: DefId) -> Mode {
match tcx.hir().body_owner_kind(hir_id) {
hir::BodyOwnerKind::Closure => Mode::NonConstFn,
hir::BodyOwnerKind::Fn if tcx.is_const_fn(def_id) => Mode::ConstFn,
hir::BodyOwnerKind::Fn => Mode::NonConstFn,
hir::BodyOwnerKind::Const => Mode::Const,
hir::BodyOwnerKind::Static(hir::MutImmutable) => Mode::Static,
hir::BodyOwnerKind::Static(hir::MutMutable) => Mode::StaticMut,
}
}
fn error_min_const_fn_violation(tcx: TyCtxt<'_>, span: Span, msg: Cow<'_, str>) {
struct_span_err!(tcx.sess, span, E0723, "{}", msg)
.note("for more information, see issue https://github.com/rust-lang/rust/issues/57563")
.help("add `#![feature(const_fn)]` to the crate attributes to enable")
.emit();
}
fn check_short_circuiting_in_const_local(tcx: TyCtxt<'_>, body: &mut Body<'tcx>, mode: Mode) {
if body.control_flow_destroyed.is_empty() {
return;
}
let mut locals = body.vars_iter();
if let Some(local) = locals.next() {
let span = body.local_decls[local].source_info.span;
let mut error = tcx.sess.struct_span_err(
span,
&format!(
"new features like let bindings are not permitted in {}s \
which also use short circuiting operators",
mode,
),
);
for (span, kind) in body.control_flow_destroyed.iter() {
error.span_note(
*span,
&format!("use of {} here does not actually short circuit due to \
the const evaluator presently not being able to do control flow. \
See https://github.com/rust-lang/rust/issues/49146 for more \
information.", kind),
);
}
for local in locals {
let span = body.local_decls[local].source_info.span;
error.span_note(span, "more locals defined here");
}
error.emit();
}
}
/// In `const` and `static` everything without `StorageDead`
/// is `'static`, we don't have to create promoted MIR fragments,
/// just remove `Drop` and `StorageDead` on "promoted" locals.
fn remove_drop_and_storage_dead_on_promoted_locals(
body: &mut Body<'tcx>,
promoted_temps: &BitSet<Local>,
) {
debug!("run_pass: promoted_temps={:?}", promoted_temps);
for block in body.basic_blocks_mut() {
block.statements.retain(|statement| {
match statement.kind {
StatementKind::StorageDead(index) => !promoted_temps.contains(index),
_ => true
}
});
let terminator = block.terminator_mut();
match terminator.kind {
TerminatorKind::Drop {
location: Place {
base: PlaceBase::Local(index),
projection: box [],
},
target,
..
} if promoted_temps.contains(index) => {
terminator.kind = TerminatorKind::Goto { target };
}
_ => {}
}
}
}
fn check_static_is_sync(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>, hir_id: HirId) {
let ty = body.return_ty();
tcx.infer_ctxt().enter(|infcx| {
let cause = traits::ObligationCause::new(body.span, hir_id, traits::SharedStatic);
let mut fulfillment_cx = traits::FulfillmentContext::new();
let sync_def_id = tcx.require_lang_item(lang_items::SyncTraitLangItem, Some(body.span));
fulfillment_cx.register_bound(&infcx, ty::ParamEnv::empty(), ty, sync_def_id, cause);
if let Err(err) = fulfillment_cx.select_all_or_error(&infcx) {
infcx.report_fulfillment_errors(&err, None, false);
}
});
}
fn args_required_const(tcx: TyCtxt<'_>, def_id: DefId) -> Option<FxHashSet<usize>> {
let attrs = tcx.get_attrs(def_id);
let attr = attrs.iter().find(|a| a.check_name(sym::rustc_args_required_const))?;
let mut ret = FxHashSet::default();
for meta in attr.meta_item_list()? {
match meta.literal()?.node {
LitKind::Int(a, _) => { ret.insert(a as usize); }
_ => return None,
}
}
Some(ret)
}
| {
if !self.tcx.features().const_fn_union {
emit_feature_err(
&self.tcx.sess.parse_sess, sym::const_fn_union,
self.span, GateIssue::Language,
"unions in const fn are unstable",
);
}
} |
where-allowed-2.rs | //! Ideally, these tests would go in `where-allowed.rs`, but we bail out
//! too early to display them.
use std::fmt::Debug;
// Disallowed
fn in_adt_in_return() -> Vec<impl Debug> |
//~^ ERROR opaque type expands to a recursive type
fn main() {}
| { panic!() } |
main.go | package main |
func main() {
cmd.Execute()
} |
import "github.com/dcamachoj/linos-cli/cmd" |
utils.py | from typing import Optional, Union, Tuple, List, Dict
import functools
from torch import nn
from torch import optim
from torch.nn import functional as F
from transformers.trainer_pt_utils import get_parameter_names
import torchmetrics
from .lr_scheduler import (
get_cosine_schedule_with_warmup,
get_polynomial_decay_schedule_with_warmup,
get_linear_schedule_with_warmup,
)
from ..constants import (
BINARY, MULTICLASS, REGRESSION, MAX, MIN, NORM_FIT, BIT_FIT,
ACC, ACCURACY, RMSE, ROOT_MEAN_SQUARED_ERROR, R2, QUADRATIC_KAPPA,
ROC_AUC, AVERAGE_PRECISION, LOG_LOSS, CROSS_ENTROPY,
PEARSONR, SPEARMANR,
)
import warnings
def get_loss_func(problem_type: str):
"""
Choose a suitable Pytorch loss module based on the provided problem type.
Parameters
----------
problem_type
Type of problem.
Returns
-------
A Pytorch loss module.
"""
if problem_type in [BINARY, MULTICLASS]:
loss_func = nn.CrossEntropyLoss()
elif problem_type == REGRESSION:
loss_func = nn.MSELoss()
else:
raise NotImplementedError
return loss_func
def get_metric(
metric_name: str,
problem_type: str,
num_classes: Optional[int] = None,
pos_label: Optional[int] = None,
):
"""
Obtain a torchmerics.Metric from its name.
Define a customized metric function in case that torchmetrics doesn't support some metric.
Parameters
----------
metric_name
Name of metric
problem_type
The type of the problem.
num_classes
Number of classes, used in the quadratic_kappa metric for binary classification.
pos_label
The label (0 or 1) of binary classification's positive class, which is used in some metrics, e.g., AUROC.
Returns
-------
torchmetrics.Metric
A torchmetrics.Metric object.
mode
The min/max mode used in selecting model checkpoints.
- min
Its means that smaller metric is better.
- max
It means that larger metric is better.
custom_metric_func
A customized metric function.
"""
metric_name = metric_name.lower()
if metric_name in [ACC, ACCURACY]:
return torchmetrics.Accuracy(), MAX, None
elif metric_name in [RMSE, ROOT_MEAN_SQUARED_ERROR]:
return torchmetrics.MeanSquaredError(squared=False), MIN, None
elif metric_name == R2:
return torchmetrics.R2Score(), MAX, None
elif metric_name == QUADRATIC_KAPPA:
return torchmetrics.CohenKappa(num_classes=num_classes,
weights="quadratic"), MAX, None
elif metric_name == ROC_AUC:
return torchmetrics.AUROC(pos_label=pos_label), MAX, None
elif metric_name == AVERAGE_PRECISION:
return torchmetrics.AveragePrecision(pos_label=pos_label), MAX, None
elif metric_name in [LOG_LOSS, CROSS_ENTROPY]:
return torchmetrics.MeanMetric(), MIN, \
functools.partial(F.cross_entropy, reduction="none")
elif metric_name == PEARSONR:
return torchmetrics.PearsonCorrCoef(), MAX, None
elif metric_name == SPEARMANR:
return torchmetrics.SpearmanCorrCoef(), MAX, None
else:
warnings.warn(f"Currently, we cannot convert the metric: {metric_name} to a metric supported in torchmetrics. "
f"Thus, we will fall-back to use accuracy for multi-class classification problems "
f", ROC-AUC for binary classification problem, and MSE for regression problems.", UserWarning)
if problem_type == REGRESSION:
return torchmetrics.MeanSquaredError(squared=False), MIN, None
elif problem_type == MULTICLASS:
return torchmetrics.Accuracy(), MAX, None
elif problem_type == BINARY:
return torchmetrics.AUROC(pos_label=pos_label), MAX, None
else:
raise ValueError(f'The problem_type={problem_type} is currently not supported')
def get_optimizer(
optim_type: str,
optimizer_grouped_parameters,
lr: float,
weight_decay: float,
eps: Optional[float] = 1e-6,
betas: Optional[Tuple[float, float]] = (0.9, 0.999),
momentum: Optional[float] = 0.9,
):
"""
Choose a Pytorch optimizer based on its name.
Parameters
----------
optim_type
Name of optimizer.
optimizer_grouped_parameters
The model parameters to be optimized.
lr
Learning rate.
weight_decay
Optimizer weight decay.
eps
Optimizer eps.
betas
Optimizer betas.
momentum
Momentum used in the SGD optimizer.
Returns
-------
A Pytorch optimizer.
"""
if optim_type == "adamw":
optimizer = optim.AdamW(
optimizer_grouped_parameters,
lr=lr,
weight_decay=weight_decay,
eps=eps,
betas=betas,
)
elif optim_type == "adam":
optimizer = optim.Adam(
optimizer_grouped_parameters,
lr=lr,
weight_decay=weight_decay,
)
elif optim_type == "sgd":
optimizer = optim.SGD(
optimizer_grouped_parameters,
lr=lr, | momentum=momentum,
)
else:
raise ValueError(f"unknown optimizer: {optim_type}")
return optimizer
def get_lr_scheduler(
optimizer: optim.Optimizer,
num_max_steps: int,
num_warmup_steps: int,
lr_schedule: str,
end_lr: Union[float, int],
):
"""
Get the learning rate scheduler from its name. Here we use our defined learning rate
scheduler instead of those imported from "transformers" because we want to support
Pytorch lightning's "ddp_spawn" training strategy.
Parameters
----------
optimizer
A Pytorch optimizer.
num_max_steps
Number of maximum training steps.
num_warmup_steps
Number of steps to do learning rate warmup.
lr_schedule
Name of the learning rate scheduler.
end_lr
The final learning rate after decay.
Returns
-------
A learning rate scheduler.
"""
if lr_schedule == "cosine_decay":
scheduler = get_cosine_schedule_with_warmup(
optimizer=optimizer,
num_warmup_steps=num_warmup_steps,
num_training_steps=num_max_steps,
)
elif lr_schedule == "polynomial_decay":
scheduler = get_polynomial_decay_schedule_with_warmup(
optimizer=optimizer,
num_warmup_steps=num_warmup_steps,
num_training_steps=num_max_steps,
lr_end=end_lr,
power=1,
)
elif lr_schedule == "linear_decay":
scheduler = get_linear_schedule_with_warmup(
optimizer=optimizer,
num_warmup_steps=num_warmup_steps,
num_training_steps=num_max_steps
)
else:
raise ValueError(f"unknown lr schedule: {lr_schedule}")
return scheduler
def get_weight_decay_param_names(model: nn.Module):
"""
Set the layer normalization parameters and other layers' bias parameters not to use weight decay.
Parameters
----------
model
A Pytorch model.
Returns
-------
A list of parameter names not using weight decay.
"""
# By default, we should not apply weight decay for all the norm layers
decay_param_names = get_parameter_names(model,
[nn.LayerNorm, nn.BatchNorm1d, nn.BatchNorm2d, nn.BatchNorm3d,
nn.GroupNorm])
decay_param_names = [name for name in decay_param_names if "bias" not in name]
return decay_param_names
def get_norm_layer_param_names(model: nn.Module):
"""
Get parameters associated with the normalization layers
Parameters
----------
model
A Pytorch model
Returns
-------
norm_param_names
A list of normalization parameter names
"""
all_param_names = [name for name, _ in model.named_parameters()]
all_param_names_except_norm_names = get_parameter_names(
model, [nn.LayerNorm, nn.BatchNorm1d, nn.BatchNorm2d, nn.BatchNorm3d, nn.GroupNorm])
norm_param_names = [name for name in all_param_names if name not in all_param_names_except_norm_names]
return norm_param_names
def apply_single_lr(
model: nn.Module,
lr: float,
weight_decay: float,
return_params: Optional[bool] = True,
):
"""
Set to use a single learning rate for all parameters. Layer normalization parameters and other
layers' bias parameters don't use weight decay.
Parameters
----------
model
A Pytorch model.
lr
Learning rate.
weight_decay
Weight decay.
return_params
Whether to return parameters or their names. If you want to double-check
whether the learning rate setup is as expected, you can set "return_params=False",
and print the layer names along with their learning rates through
"print("Param groups = %s" % json.dumps(optimizer_grouped_parameters, indent=2))".
Returns
-------
The grouped parameters or their names.
"""
decay_param_names = get_weight_decay_param_names(model)
optimizer_grouped_parameters = [
{
"params": [p if return_params else n for n, p in model.named_parameters() if n in decay_param_names],
"weight_decay": weight_decay,
"lr": lr,
},
{
"params": [p if return_params else n for n, p in model.named_parameters() if n not in decay_param_names],
"weight_decay": 0.0,
"lr": lr,
},
]
return optimizer_grouped_parameters
def apply_two_stages_lr(
model: nn.Module,
lr: float,
lr_mult: Union[float, int],
weight_decay: float,
return_params: Optional[bool] = True,
):
"""
Set up the pretrained backbone to use a smaller learning rate (lr * lr_mult).
The newly added head layers use the normal learning rate (lr).
Layer normalization parameters and other layers' bias parameters don't use weight decay.
Parameters
----------
model
A Pytorch model.
lr
The learning rate.
lr_mult
The multiplier (0, 1) to scale down the learning rate.
weight_decay
Weight decay.
return_params
return_params
Whether to return parameters or their names. If you want to double-check
whether the learning rate setup is as expected, you can set "return_params=False",
and print the layer names along with their learning rates through
"print("Param groups = %s" % json.dumps(optimizer_grouped_parameters, indent=2))".
Returns
-------
The grouped parameters or their names.
"""
decay_param_names = get_weight_decay_param_names(model)
optimizer_grouped_parameters = [
{
"params": [
p if return_params else n
for n, p in model.named_parameters()
if n in decay_param_names
and not any(bb in n for bb in model.head_layer_names)
],
"weight_decay": weight_decay,
"lr": lr,
},
{
"params": [
p if return_params else n
for n, p in model.named_parameters()
if n not in decay_param_names
and not any(bb in n for bb in model.head_layer_names)
],
"weight_decay": 0.0,
"lr": lr,
},
{
"params": [
p if return_params else n
for n, p in model.named_parameters()
if n in decay_param_names
and any(bb in n for bb in model.head_layer_names)
],
"weight_decay": weight_decay,
"lr": lr * lr_mult,
},
{
"params": [
p if return_params else n
for n, p in model.named_parameters()
if n not in decay_param_names
and any(bb in n for bb in model.head_layer_names)
],
"weight_decay": 0.0,
"lr": lr * lr_mult,
},
]
return optimizer_grouped_parameters
def apply_layerwise_lr_decay(
model: nn.Module,
lr: float,
lr_decay: float,
weight_decay: float,
efficient_finetune: Optional[str] = None,
):
"""
Assign monotonically decreasing learning rates for layers from the output end to the input end.
The intuition behind is that later layers are more task-related compared to the early layers.
Layer normalization parameters and other layers' bias parameters don't use weight decay.
If you want to double-check whether the learning rate setup is as expected,
you can print the layer names along with their learning rates through
"print("Param groups = %s" % json.dumps(parameter_group_names, indent=2))".
Parameters
----------
model
A Pytorch model.
lr
The learning rate.
lr_decay
The learning rate decay factor (0, 1).
weight_decay
Weight decay.
efficient_finetune
Efficient finetuning strategy. Can be "bit_fit", "norm_fit". It will only finetune part of the parameters
Returns
-------
The grouped parameters based on their layer ids and whether using weight decay.
"""
parameter_group_names = {}
parameter_group_vars = {}
decay_param_names = get_weight_decay_param_names(model)
norm_param_names = get_norm_layer_param_names(model)
for name, param in model.named_parameters():
if efficient_finetune == BIT_FIT:
# For bit_fit, we disable tuning everything except the bias terms
if 'bias' not in name:
param.requires_grad = False
elif efficient_finetune == NORM_FIT:
# For norm-fit, we finetune all the normalization layers and bias layers
if name not in norm_param_names and 'bias' not in name:
param.requires_grad = False
if not param.requires_grad:
continue # frozen weights
if name in decay_param_names:
group_name = "decay"
this_weight_decay = weight_decay
else:
group_name = "no_decay"
this_weight_decay = 0.
layer_id = model.name_to_id[name]
group_name = "layer_%d_%s" % (layer_id, group_name)
if group_name not in parameter_group_names:
scale = lr_decay ** layer_id
parameter_group_names[group_name] = {
"weight_decay": this_weight_decay,
"params": [],
"lr": scale * lr
}
parameter_group_vars[group_name] = {
"weight_decay": this_weight_decay,
"params": [],
"lr": scale * lr
}
parameter_group_vars[group_name]["params"].append(param)
parameter_group_names[group_name]["params"].append(name)
return list(parameter_group_vars.values()) | weight_decay=weight_decay, |
hello.rs | } | fn main() {
println!("Hello, World from Rust with WASM in the Browser!!!"); |
|
BenchmarkConfigurator.tsx | import {
IonContent,
IonHeader,
IonMenu,
IonTitle,
IonToolbar,
} from '@ionic/react';
import { BenchmarkConfiguratorProps } from 'apps/BenchmarkApp/components/BenchmarkConfigurator/BenchmarkConfiguratorProps';
import BinaryMetricsConfigurator from 'apps/BenchmarkApp/components/BenchmarkConfigurator/configurators/BinaryMetricsConfigurator';
import DecisionMatrixConfigurator from 'apps/BenchmarkApp/components/BenchmarkConfigurator/configurators/DecisionMatrixConfigurator';
import DefaultPlaceholderConfigurator from 'apps/BenchmarkApp/components/BenchmarkConfigurator/configurators/DefaultPlaceholderConfigurator';
import IntersectionConfigurator from 'apps/BenchmarkApp/components/BenchmarkConfigurator/configurators/IntersectionConfigurator';
import NMetricsConfigurator from 'apps/BenchmarkApp/components/BenchmarkConfigurator/configurators/NMetricsConfigurator';
import SimilarityDiagramConfigurator from 'apps/BenchmarkApp/components/BenchmarkConfigurator/configurators/SimilarityDiagramConfigurator';
import { KPIDiagramConfigurator } from 'apps/BenchmarkApp/components/BenchmarkConfigurator/configurators/SoftKPIDiagramConfigurator';
import React from 'react';
const BenchmarkConfigurator = ({
contentId,
}: BenchmarkConfiguratorProps): JSX.Element => (
<IonMenu contentId={contentId} type="push" className={'doNotPrint'}>
<IonHeader>
<IonToolbar color="primary">
<IonTitle>Configurator</IonTitle>
</IonToolbar>
</IonHeader>
<IonContent>
<DefaultPlaceholderConfigurator />
<BinaryMetricsConfigurator />
<NMetricsConfigurator />
<IntersectionConfigurator />
<KPIDiagramConfigurator />
<DecisionMatrixConfigurator />
<SimilarityDiagramConfigurator /> | </IonContent>
</IonMenu>
);
export default BenchmarkConfigurator; | |
karma.conf.js | // Karma configuration file, see link for more information
// https://karma-runner.github.io/1.0/config/configuration-file.html
module.exports = function (config) {
config.set({
basePath: '',
frameworks: ['jasmine', '@angular-devkit/build-angular'],
plugins: [
require('karma-jasmine'),
require('karma-chrome-launcher'),
require('karma-jasmine-html-reporter'),
require('karma-coverage-istanbul-reporter'),
require('@angular-devkit/build-angular/plugins/karma')
],
client: {
clearContext: false // leave Jasmine Spec Runner output visible in browser
},
coverageIstanbulReporter: {
dir: require('path').join(__dirname, './coverage/agenda'),
reports: ['html', 'lcovonly', 'text-summary'],
fixWebpackSourcePaths: true
},
reporters: ['progress', 'kjhtml'], | colors: true,
logLevel: config.LOG_INFO,
autoWatch: true,
browsers: ['Chrome'],
singleRun: false,
restartOnFileChange: true
});
}; | port: 9876, |
procedimento-especial.component.ts | import { ProcedimentoEspecialService } from './procedimento-especial.service';
import { ItemPrescricaoProcedimento } from './models/item-prescricao-procedimento';
import { PrescricaoMedicaService } from './../prescricao-medica.service';
import { FormBuilder } from '@angular/forms';
import { TIPO_PROCEDIMENTO_ESPECIAL } from './models/tipo-procedimento-especial';
import { ActivatedRoute, Router } from '@angular/router';
import { BreadcrumbService } from '@nuvem/primeng-components';
import { Component, OnInit, OnDestroy } from '@angular/core';
@Component({
selector: 'app-procedimento-especial',
templateUrl: './procedimento-especial.component.html', | export class ProcedimentoEspecialComponent implements OnInit, OnDestroy {
paciente: {};
tipoProcedimento = TIPO_PROCEDIMENTO_ESPECIAL;
itensPrescricaoProcedimento: ItemPrescricaoProcedimento[] = [];
prescricaoProcedimento = this.fb.group({
idPaciente: [null],
nome: [null],
tipo: 'PROCEDIMENTO',
dataPrescricao: [new Date()],
observacao: [null],
});
itemPrescricaoProcedimento = this.fb.group({
quantidadeOrteseProtese: [null],
tipoProcedimentoEspecial: [null],
informacoes: [null],
justificativa: [null],
duracaoSolicitada: [null],
tipoProcedimentoId: [null],
});
constructor(
private breadcrumbService: BreadcrumbService,
private route: ActivatedRoute,
private fb: FormBuilder,
private prescricaoMedicaService: PrescricaoMedicaService,
private procedimentoEspecialService: ProcedimentoEspecialService,
private router: Router,
) {}
ngOnInit() {
const codigoPaciente = this.route.snapshot.params['id'];
if (codigoPaciente) {
this.carregarPaciente(codigoPaciente);
}
this.breadcrumbService.setItems([
{ label: 'Prescrição Médica', routerLink: 'prescricao-medica' },
{ label: 'Procedimento Especial' },
]);
}
carregarPaciente(id: number) {
this.prescricaoMedicaService.buscarIdPaciente(id).subscribe((paciente) => {
this.paciente = paciente;
this.prescricaoProcedimento.patchValue({
idPaciente: paciente.id,
nome: paciente.nome,
});
});
}
incluirItem() {
if (this.itemPrescricaoProcedimento.valid) {
this.itensPrescricaoProcedimento.push(this.itemPrescricaoProcedimento.value);
this.itemPrescricaoProcedimento.reset();
}
}
prescrever() {
const prescricao = this.prescricaoProcedimento.value;
const prescricaoProcedimentoObject = Object.assign({}, prescricao, {
itemPrescricaoProcedimentoDTO: this.itensPrescricaoProcedimento,
});
this.procedimentoEspecialService
.prescreverProcedimento(prescricaoProcedimentoObject)
.subscribe(
(resposta) => {
this.router.navigate([
'/prescricao-medica/lista/',
prescricaoProcedimentoObject.idPaciente,
]);
return resposta;
},
(erro) => {
return erro;
},
);
}
ngOnDestroy() {
this.breadcrumbService.reset();
}
} | styleUrls: ['./procedimento-especial.component.css'],
}) |
timepicker.tsx | import { Component, Element, Prop, State, h } from '@stencil/core';
import moment from 'moment-mini';
import { renderHiddenField } from '../../utils/utils';
@Component({
tag: 'fw-timepicker',
shadow: true,
})
export class | {
@Element() host: HTMLElement;
/**
* State for all the time value\s
*/
@State() timeValues: any[] = [];
/**
* Format in which time values are populated in the list box. If the value is hh:mm p, the time values are in the 12-hour format. If the value is hh:mm, the time values are in the 24-hr format.
*/
@Prop() format: 'hh:mm A' | 'HH:mm' = 'hh:mm A';
/**
* Set true to disable the element
*/
@Prop() disabled = false;
/**
* Represent the intervals and can be a number or array of numbers representing the minute values in an hour
*/
@State() isMeridianFormat?: boolean = this.format === 'hh:mm A';
/**
* Time output value
*/
@Prop({ mutable: true }) value?: string;
/**
* Name of the component, saved as part of form data.
*/
@Prop() name = '';
/**
* Time interval between the values displayed in the list, specified in minutes.
*/
@Prop() interval = 30;
/**
* Lower time-limit for the values displayed in the list. If this attribute’s value is in the hh:mm format, it is assumed to be hh:mm AM.
*/
@Prop() minTime?: string = this.isMeridianFormat ? '12:00 AM' : '00:00';
/**
* Upper time-limit for the values displayed in the list. If this attribute’s value is in the hh:mm format, it is assumed to be hh:mm AM.
*/
@Prop() maxTime?: string = this.isMeridianFormat ? '11:30 PM' : '23:30';
/**
* Boolean representing whethere it is default end time
*/
@State() isDefaultEndTime = ['11:30 PM', '23:30'].includes(this.maxTime);
private getTimeOptionsMeta = nonMeridianFormat => {
const preferredFormat = this.format;
const timeIntervalArgs = {
interval: this.interval,
startTime: moment(this.minTime, preferredFormat).format(nonMeridianFormat),
endTime: moment(this.maxTime, preferredFormat).format(nonMeridianFormat),
};
return timeIntervalArgs;
}
private setTimeValues = () => {
const meridianFormat = 'hh:mm A';
const nonMeridianFormat = 'HH:mm';
const { interval, startTime, endTime } =
this.getTimeOptionsMeta(nonMeridianFormat);
let currentTimeInMs = moment(startTime, nonMeridianFormat).valueOf();
const endTimeInMs = moment(endTime, nonMeridianFormat).valueOf();
while (currentTimeInMs <= endTimeInMs) {
this.timeValues.push({
meridianFormat: moment(currentTimeInMs).format(meridianFormat),
nonMeridianFormat: moment(currentTimeInMs).format(nonMeridianFormat),
});
currentTimeInMs = moment(currentTimeInMs)
.add(interval, 'minutes')
.valueOf();
}
}
private currentTimeLabel(time: any) {
return this.isMeridianFormat ? time.meridianFormat : time.nonMeridianFormat;
}
private currentTimeValue(time: any) {
return time.nonMeridianFormat;
}
private setTimeValue(e: any) {
const { value } = e.detail;
this.value = value;
}
private setEndTime() {
if (this.isDefaultEndTime) {
this.maxTime = this.isMeridianFormat ?
`11:59 PM` : `23:59`;
}
}
componentWillLoad() {
if (this.interval !== 30) {
this.setEndTime();
}
this.setTimeValues();
}
render() {
const { host, name, value } = this;
renderHiddenField(host, name, value);
return (
<fw-select disabled={ this.disabled } value={ this.value }
onFwChange = { e => this.setTimeValue(e) }>
{
this.timeValues.map(time =>
<fw-select-option value = {this.currentTimeValue(time)}>
{ this.currentTimeLabel(time) }
</fw-select-option>
)
}
</fw-select>
);
}
}
| Timepicker |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.